Merge development into master

pull/2382/head
github-actions[bot] 3 months ago committed by GitHub
commit 38094e6323
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -48,22 +48,23 @@ If you need something that is not already part of Bazarr, feel free to create a
## Supported subtitles providers:
- Addic7ed
- Argenteam
- Assrt
- BetaSeries
- BSplayer
- Embedded Subtitles
- Gestdown.info
- GreekSubs
- GreekSubtitles
- HDBits.org
- Hosszupuska
- LegendasDivx
- Karagarga.in
- Ktuvit (Get `hashed_password` using method described [here](https://github.com/XBMCil/service.subtitles.ktuvit))
- LegendasDivx
- Napiprojekt
- Napisy24
- Nekur
- OpenSubtitles.org
- OpenSubtitles.com
- OpenSubtitles.org (VIP users only)
- Podnapisi
- RegieLive
- Sous-Titres.eu
@ -79,7 +80,6 @@ If you need something that is not already part of Bazarr, feel free to create a
- Subtitrari-noi.ro
- subtitri.id.lv
- Subtitulamos.tv
- Sucha
- Supersubtitles
- Titlovi
- Titrari.ro
@ -100,4 +100,4 @@ If you need something that is not already part of Bazarr, feel free to create a
### License
- [GNU GPL v3](http://www.gnu.org/licenses/gpl.html)
- Copyright 2010-2019
- Copyright 2010-2024

@ -13,7 +13,7 @@ from bazarr.app.get_args import args
def check_python_version():
python_version = platform.python_version_tuple()
minimum_py3_tuple = (3, 7, 0)
minimum_py3_tuple = (3, 8, 0)
minimum_py3_str = ".".join(str(i) for i in minimum_py3_tuple)
if int(python_version[0]) < minimum_py3_tuple[0]:
@ -52,22 +52,27 @@ dir_name = os.path.dirname(__file__)
def end_child_process(ep):
try:
ep.kill()
if os.name != 'nt':
try:
ep.send_signal(signal.SIGINT)
except ProcessLookupError:
pass
else:
import win32api
import win32con
try:
win32api.GenerateConsoleCtrlEvent(win32con.CTRL_C_EVENT, ep.pid)
except KeyboardInterrupt:
pass
except:
pass
def terminate_child_process(ep):
try:
ep.terminate()
except:
pass
def start_bazarr():
script = [get_python_path(), "-u", os.path.normcase(os.path.join(dir_name, 'bazarr', 'main.py'))] + sys.argv[1:]
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL)
atexit.register(end_child_process, ep=ep)
signal.signal(signal.SIGTERM, lambda signal_no, frame: terminate_child_process(ep))
signal.signal(signal.SIGTERM, lambda signal_no, frame: end_child_process(ep))
def check_status():

@ -129,8 +129,8 @@ class EpisodesBlacklist(Resource):
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty episodes subtitles blacklist')
delete_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=True, help='Subtitles ID')
delete_request_parser.add_argument('provider', type=str, required=False, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=False, help='Subtitles ID')
@authenticate
@api_ns_episodes_blacklist.doc(parser=delete_request_parser)

@ -122,8 +122,8 @@ class MoviesBlacklist(Resource):
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty movies subtitles blacklist')
delete_request_parser.add_argument('provider', type=str, required=True, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=True, help='Subtitles ID')
delete_request_parser.add_argument('provider', type=str, required=False, help='Provider name')
delete_request_parser.add_argument('subs_id', type=str, required=False, help='Subtitles ID')
@authenticate
@api_ns_movies_blacklist.doc(parser=delete_request_parser)

@ -4,17 +4,18 @@ import os
import sys
import gc
from flask_restx import Resource, Namespace, reqparse
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableMovies, database, select
from languages.get_languages import alpha3_from_alpha2
from utilities.path_mappings import path_mappings
from utilities.video_analyzer import subtitles_sync_references
from subtitles.tools.subsyncer import SubSyncer
from subtitles.tools.translate import translate_subtitles_file
from subtitles.tools.mods import subtitles_apply_mods
from subtitles.indexer.series import store_subtitles
from subtitles.indexer.movies import store_subtitles_movie
from app.config import settings
from app.config import settings, empty_values
from app.event_handler import event_stream
from ..utils import authenticate
@ -25,6 +26,56 @@ api_ns_subtitles = Namespace('Subtitles', description='Apply mods/tools on exter
@api_ns_subtitles.route('subtitles')
class Subtitles(Resource):
get_request_parser = reqparse.RequestParser()
get_request_parser.add_argument('subtitlesPath', type=str, required=True, help='External subtitles file path')
get_request_parser.add_argument('sonarrEpisodeId', type=int, required=False, help='Sonarr Episode ID')
get_request_parser.add_argument('radarrMovieId', type=int, required=False, help='Radarr Movie ID')
audio_tracks_data_model = api_ns_subtitles.model('audio_tracks_data_model', {
'stream': fields.String(),
'name': fields.String(),
'language': fields.String(),
})
embedded_subtitles_data_model = api_ns_subtitles.model('embedded_subtitles_data_model', {
'stream': fields.String(),
'name': fields.String(),
'language': fields.String(),
'forced': fields.Boolean(),
'hearing_impaired': fields.Boolean(),
})
external_subtitles_data_model = api_ns_subtitles.model('external_subtitles_data_model', {
'name': fields.String(),
'path': fields.String(),
'language': fields.String(),
'forced': fields.Boolean(),
'hearing_impaired': fields.Boolean(),
})
get_response_model = api_ns_subtitles.model('SubtitlesGetResponse', {
'audio_tracks': fields.Nested(audio_tracks_data_model),
'embedded_subtitles_tracks': fields.Nested(embedded_subtitles_data_model),
'external_subtitles_tracks': fields.Nested(external_subtitles_data_model),
})
@authenticate
@api_ns_subtitles.response(200, 'Success')
@api_ns_subtitles.response(401, 'Not Authenticated')
@api_ns_subtitles.doc(parser=get_request_parser)
def get(self):
"""Return available audio and embedded subtitles tracks with external subtitles. Used for manual subsync
modal"""
args = self.get_request_parser.parse_args()
subtitlesPath = args.get('subtitlesPath')
episodeId = args.get('sonarrEpisodeId', None)
movieId = args.get('radarrMovieId', None)
result = subtitles_sync_references(subtitles_path=subtitlesPath, sonarr_episode_id=episodeId,
radarr_movie_id=movieId)
return marshal(result, self.get_response_model, envelope='data')
patch_request_parser = reqparse.RequestParser()
patch_request_parser.add_argument('action', type=str, required=True,
help='Action from ["sync", "translate" or mods name]')
@ -32,10 +83,20 @@ class Subtitles(Resource):
patch_request_parser.add_argument('path', type=str, required=True, help='Subtitles file path')
patch_request_parser.add_argument('type', type=str, required=True, help='Media type from ["episode", "movie"]')
patch_request_parser.add_argument('id', type=int, required=True, help='Media ID (episodeId, radarrId)')
patch_request_parser.add_argument('forced', type=str, required=False, help='Forced subtitles from ["True", "False"]')
patch_request_parser.add_argument('forced', type=str, required=False,
help='Forced subtitles from ["True", "False"]')
patch_request_parser.add_argument('hi', type=str, required=False, help='HI subtitles from ["True", "False"]')
patch_request_parser.add_argument('original_format', type=str, required=False,
help='Use original subtitles format from ["True", "False"]')
patch_request_parser.add_argument('reference', type=str, required=False,
help='Reference to use for sync from video file track number (a:0) or some '
'subtitles file path')
patch_request_parser.add_argument('max_offset_seconds', type=str, required=False,
help='Maximum offset seconds to allow')
patch_request_parser.add_argument('no_fix_framerate', type=str, required=False,
help='Don\'t try to fix framerate from ["True", "False"]')
patch_request_parser.add_argument('gss', type=str, required=False,
help='Use Golden-Section Search from ["True", "False"]')
@authenticate
@api_ns_subtitles.doc(parser=patch_request_parser)
@ -79,19 +140,30 @@ class Subtitles(Resource):
video_path = path_mappings.path_replace_movie(metadata.path)
if action == 'sync':
sync_kwargs = {
'video_path': video_path,
'srt_path': subtitles_path,
'srt_lang': language,
'reference': args.get('reference') if args.get('reference') not in empty_values else video_path,
'max_offset_seconds': args.get('max_offset_seconds') if args.get('max_offset_seconds') not in
empty_values else str(settings.subsync.max_offset_seconds),
'no_fix_framerate': args.get('no_fix_framerate') == 'True',
'gss': args.get('gss') == 'True',
}
subsync = SubSyncer()
if media_type == 'episode':
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='series', sonarr_series_id=metadata.sonarrSeriesId,
sonarr_episode_id=id)
else:
try:
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='movies', radarr_id=id)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
del subsync
gc.collect()
try:
if media_type == 'episode':
sync_kwargs['sonarr_series_id'] = metadata.sonarrSeriesId
sync_kwargs['sonarr_episode_id'] = id
else:
sync_kwargs['radarr_id'] = id
subsync.sync(**sync_kwargs)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
finally:
del subsync
gc.collect()
elif action == 'translate':
from_language = subtitles_lang_from_filename(subtitles_path)
dest_language = language

@ -35,7 +35,7 @@ def create_app():
app.config["DEBUG"] = False
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling')
async_mode='threading', allow_upgrades=False, transports='polling', engineio_logger=False)
@app.errorhandler(404)
def page_not_found(_):

@ -34,6 +34,9 @@ def validate_ip_address(ip_string):
return False
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
ONE_HUNDRED_YEARS_IN_HOURS = 876000
class Validator(OriginalValidator):
# Give the ability to personalize messages sent by the original dynasync Validator class.
default_messages = MappingProxyType(
@ -99,14 +102,15 @@ validators = [
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
is_in=[6, 12, 24, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
Validator('general.anti_captcha_provider', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'anti-captcha', 'death-by-captcha']),
Validator('general.wanted_search_frequency', must_exist=True, default=6, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.wanted_search_frequency', must_exist=True, default=6, is_type_of=int, is_in=[6, 12, 24, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.wanted_search_frequency_movie', must_exist=True, default=6, is_type_of=int,
is_in=[6, 12, 24]),
is_in=[6, 12, 24, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.subzero_mods', must_exist=True, default='', is_type_of=str),
Validator('general.dont_notify_manual_actions', must_exist=True, default=False, is_type_of=bool),
Validator('general.hi_extension', must_exist=True, default='hi', is_type_of=str, is_in=['hi', 'cc', 'sdh']),
@ -122,8 +126,8 @@ validators = [
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'basic', 'form']),
Validator('auth.username', must_exist=True, default='', is_type_of=str),
Validator('auth.password', must_exist=True, default='', is_type_of=str),
Validator('auth.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('auth.password', must_exist=True, default='', is_type_of=str, cast=str),
# cors section
Validator('cors.enabled', must_exist=True, default=False, is_type_of=bool),
@ -151,14 +155,14 @@ validators = [
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('sonarr.episodes_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.sync_only_monitored_series', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.sync_only_monitored_episodes', must_exist=True, default=False, is_type_of=bool),
# radarr section
Validator('radarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
@ -174,23 +178,24 @@ validators = [
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.sync_only_monitored_movies', must_exist=True, default=False, is_type_of=bool),
# proxy section
Validator('proxy.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'socks5', 'http']),
Validator('proxy.url', must_exist=True, default='', is_type_of=str),
Validator('proxy.port', must_exist=True, default='', is_type_of=(str, int)),
Validator('proxy.username', must_exist=True, default='', is_type_of=str),
Validator('proxy.password', must_exist=True, default='', is_type_of=str),
Validator('proxy.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('proxy.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('proxy.exclude', must_exist=True, default=["localhost", "127.0.0.1"], is_type_of=list),
# opensubtitles.org section
Validator('opensubtitles.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitles.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitles.use_tag_search', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.vip', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.ssl', must_exist=True, default=False, is_type_of=bool),
@ -198,13 +203,14 @@ validators = [
Validator('opensubtitles.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# opensubtitles.com section
Validator('opensubtitlescom.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitlescom.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('opensubtitlescom.use_hash', must_exist=True, default=True, is_type_of=bool),
Validator('opensubtitlescom.include_ai_translated', must_exist=True, default=False, is_type_of=bool),
# addic7ed section
Validator('addic7ed.username', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.password', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('addic7ed.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('addic7ed.cookies', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
@ -217,57 +223,60 @@ validators = [
Validator('subf2m.user_agent', must_exist=True, default='', is_type_of=str),
# hdbits section
Validator('hdbits.username', must_exist=True, default='', is_type_of=str),
Validator('hdbits.passkey', must_exist=True, default='', is_type_of=str),
Validator('hdbits.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('hdbits.passkey', must_exist=True, default='', is_type_of=str, cast=str),
# whisperai section
Validator('whisperai.endpoint', must_exist=True, default='http://127.0.0.1:9000', is_type_of=str),
Validator('whisperai.response', must_exist=True, default=5, is_type_of=int, gte=1),
Validator('whisperai.timeout', must_exist=True, default=3600, is_type_of=int, gte=1),
Validator('whisperai.loglevel', must_exist=True, default='INFO', is_type_of=str,
is_in=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']),
# legendasdivx section
Validator('legendasdivx.username', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# ktuvit section
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str, cast=str),
# xsubs section
Validator('xsubs.username', must_exist=True, default='', is_type_of=str),
Validator('xsubs.password', must_exist=True, default='', is_type_of=str),
Validator('xsubs.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('xsubs.password', must_exist=True, default='', is_type_of=str, cast=str),
# assrt section
Validator('assrt.token', must_exist=True, default='', is_type_of=str),
Validator('assrt.token', must_exist=True, default='', is_type_of=str, cast=str),
# anticaptcha section
Validator('anticaptcha.anti_captcha_key', must_exist=True, default='', is_type_of=str),
# deathbycaptcha section
Validator('deathbycaptcha.username', must_exist=True, default='', is_type_of=str),
Validator('deathbycaptcha.password', must_exist=True, default='', is_type_of=str),
Validator('deathbycaptcha.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('deathbycaptcha.password', must_exist=True, default='', is_type_of=str, cast=str),
# napisy24 section
Validator('napisy24.username', must_exist=True, default='', is_type_of=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str),
Validator('napisy24.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str, cast=str),
# subscene section
Validator('subscene.username', must_exist=True, default='', is_type_of=str),
Validator('subscene.password', must_exist=True, default='', is_type_of=str),
Validator('subscene.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('subscene.password', must_exist=True, default='', is_type_of=str, cast=str),
# betaseries section
Validator('betaseries.token', must_exist=True, default='', is_type_of=str),
Validator('betaseries.token', must_exist=True, default='', is_type_of=str, cast=str),
# analytics section
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
# titlovi section
Validator('titlovi.username', must_exist=True, default='', is_type_of=str),
Validator('titlovi.password', must_exist=True, default='', is_type_of=str),
Validator('titlovi.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titlovi.password', must_exist=True, default='', is_type_of=str, cast=str),
# titulky section
Validator('titulky.username', must_exist=True, default='', is_type_of=str),
Validator('titulky.password', must_exist=True, default='', is_type_of=str),
Validator('titulky.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titulky.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('titulky.approved_only', must_exist=True, default=False, is_type_of=bool),
# embeddedsubtitles section
@ -277,10 +286,10 @@ validators = [
Validator('embeddedsubtitles.unknown_as_english', must_exist=True, default=False, is_type_of=bool),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.password', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str),
Validator('karagarga.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str, cast=str),
# subsync section
Validator('subsync.use_subsync', must_exist=True, default=False, is_type_of=bool),
@ -290,6 +299,13 @@ validators = [
Validator('subsync.subsync_movie_threshold', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('subsync.debug', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.force_audio', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.checker', must_exist=True, default={}, is_type_of=dict),
Validator('subsync.checker.blacklisted_providers', must_exist=True, default=[], is_type_of=list),
Validator('subsync.checker.blacklisted_languages', must_exist=True, default=[], is_type_of=list),
Validator('subsync.no_fix_framerate', must_exist=True, default=True, is_type_of=bool),
Validator('subsync.gss', must_exist=True, default=True, is_type_of=bool),
Validator('subsync.max_offset_seconds', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 300, 600]),
# series_scores section
Validator('series_scores.hash', must_exist=True, default=359, is_type_of=int),
@ -323,8 +339,8 @@ validators = [
Validator('postgresql.host', must_exist=True, default='localhost', is_type_of=str),
Validator('postgresql.port', must_exist=True, default=5432, is_type_of=int, gte=1, lte=65535),
Validator('postgresql.database', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str, cast=str),
]
@ -399,7 +415,9 @@ array_keys = ['excluded_tags',
'enabled_providers',
'path_mappings',
'path_mappings_movie',
'language_equals']
'language_equals',
'blacklisted_languages',
'blacklisted_providers']
empty_values = ['', 'None', 'null', 'undefined', None, []]
@ -408,8 +426,6 @@ str_keys = ['chmod']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if settings.sonarr.series_sync < 15:
settings.sonarr.series_sync = 60
if settings.sonarr.episodes_sync < 15:
settings.sonarr.episodes_sync = 60
if settings.radarr.movies_sync < 15:
settings.radarr.movies_sync = 60
@ -519,7 +535,7 @@ def save_settings(settings_items):
if key == 'settings-auth-password':
if value != settings.auth.password and value is not None:
value = hashlib.md5(value.encode('utf-8')).hexdigest()
value = hashlib.md5(f"{value}".encode('utf-8')).hexdigest()
if key == 'settings-general-debug':
configure_debug = True
@ -533,7 +549,7 @@ def save_settings(settings_items):
if key in ['update_schedule', 'settings-general-use_sonarr', 'settings-general-use_radarr',
'settings-general-auto_update', 'settings-general-upgrade_subs',
'settings-sonarr-series_sync', 'settings-sonarr-episodes_sync', 'settings-radarr-movies_sync',
'settings-sonarr-series_sync', 'settings-radarr-movies_sync',
'settings-sonarr-full_update', 'settings-sonarr-full_update_day', 'settings-sonarr-full_update_hour',
'settings-radarr-full_update', 'settings-radarr-full_update_day', 'settings-radarr-full_update_hour',
'settings-general-wanted_search_frequency', 'settings-general-wanted_search_frequency_movie',
@ -627,7 +643,10 @@ def save_settings(settings_items):
reset_throttled_providers(only_auth_or_conf_error=True)
if settings_keys[0] == 'settings':
settings[settings_keys[1]][settings_keys[2]] = value
if len(settings_keys) == 3:
settings[settings_keys[1]][settings_keys[2]] = value
elif len(settings_keys) == 4:
settings[settings_keys[1]][settings_keys[2]][settings_keys[3]] = value
if settings_keys[0] == 'subzero':
mod = settings_keys[1]
@ -774,3 +793,31 @@ def configure_proxy_func():
def get_scores():
settings = get_settings()
return {"movie": settings["movie_scores"], "episode": settings["series_scores"]}
def sync_checker(subtitle):
" This function can be extended with settings. It only takes a Subtitle argument"
logging.debug("Checker data [%s] for %s", settings.subsync.checker, subtitle)
bl_providers = settings.subsync.checker.blacklisted_providers
# TODO
# bl_languages = settings.subsync.checker.blacklisted_languages
verdicts = set()
# You can add more inner checkers. The following is a verfy basic one for providers,
# but you can make your own functions, etc to handle more complex stuff. You have
# subtitle data to compare.
verdicts.add(subtitle.provider_name not in bl_providers)
met = False not in verdicts
if met is True:
logging.debug("BAZARR Sync checker passed.")
return True
else:
logging.debug("BAZARR Sync checker not passed. Won't sync.")
return False

@ -5,6 +5,7 @@ import json
import logging
import os
import flask_migrate
import signal
from dogpile.cache import make_region
from datetime import datetime
@ -12,7 +13,7 @@ from datetime import datetime
from sqlalchemy import create_engine, inspect, DateTime, ForeignKey, Integer, LargeBinary, Text, func, text, BigInteger
# importing here to be indirectly imported in other modules later
from sqlalchemy import update, delete, select, func # noqa W0611
from sqlalchemy.orm import scoped_session, sessionmaker, mapped_column
from sqlalchemy.orm import scoped_session, sessionmaker, mapped_column, close_all_sessions
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
@ -74,11 +75,18 @@ session_factory = sessionmaker(bind=engine)
database = scoped_session(session_factory)
def close_database():
close_all_sessions()
engine.dispose()
@atexit.register
def _stop_worker_threads():
database.remove()
signal.signal(signal.SIGTERM, lambda signal_no, frame: close_database())
Base = declarative_base()
metadata = Base.metadata

@ -98,6 +98,9 @@ def provider_throttle_map():
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
IPAddressBlocked: (datetime.timedelta(hours=1), "1 hours"),
},
"titlovi": {
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
},
"titulky": {
DownloadLimitExceeded: (
titulky_limit_reset_timedelta(),
@ -122,7 +125,7 @@ def provider_throttle_map():
PROVIDERS_FORCED_OFF = ["addic7ed", "tvsubtitles", "legendasdivx", "napiprojekt", "shooter",
"hosszupuska", "supersubtitles", "titlovi", "argenteam", "assrt", "subscene"]
"hosszupuska", "supersubtitles", "titlovi", "assrt", "subscene"]
throttle_count = {}
@ -240,6 +243,7 @@ def get_providers_auth():
'opensubtitlescom': {'username': settings.opensubtitlescom.username,
'password': settings.opensubtitlescom.password,
'use_hash': settings.opensubtitlescom.use_hash,
'include_ai_translated': settings.opensubtitlescom.include_ai_translated,
'api_key': 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1'
},
'podnapisi': {
@ -307,8 +311,10 @@ def get_providers_auth():
},
'whisperai': {
'endpoint': settings.whisperai.endpoint,
'response': settings.whisperai.response,
'timeout': settings.whisperai.timeout,
'ffmpeg_path': _FFMPEG_BINARY,
'loglevel': settings.whisperai.loglevel,
}
}

@ -55,6 +55,36 @@ class NoExceptionFormatter(logging.Formatter):
def formatException(self, record):
return ''
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug == True:
# no filtering in debug mode
return True
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found' ],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'" ],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"' ]
]
wanted = True
listLength = len(unwantedMessages)
for i in range(0, listLength, 2):
if record.msg == unwantedMessages[i]:
exceptionTuple = record.exc_info
if exceptionTuple != None:
if str(exceptionTuple[1]) in unwantedMessages[i+1]:
wanted = False
break
return wanted
def configure_logging(debug=False):
warnings.simplefilter('ignore', category=ResourceWarning)
@ -88,7 +118,7 @@ def configure_logging(debug=False):
fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1,
backupCount=7, delay=True, encoding='utf-8')
f = FileHandlerFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|',
'%d/%m/%Y %H:%M:%S')
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(f)
fh.setLevel(log_level)
logger.addHandler(fh)
@ -129,6 +159,7 @@ def configure_logging(debug=False):
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.ERROR)
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("knowit").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
logging.getLogger("guessit").setLevel(logging.WARNING)

@ -36,6 +36,20 @@ if not args.no_update:
else:
from .check_update import check_releases
from dateutil.relativedelta import relativedelta
NO_INTERVAL = "None"
NEVER_DATE = "Never"
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
def a_long_time_from_now(job):
# currently defined as more than a year from now
delta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
return delta.total_seconds() > ONE_YEAR_IN_SECONDS
def in_a_century():
century = datetime.now() + relativedelta(years=100)
return century.year
class Scheduler:
@ -106,7 +120,9 @@ class Scheduler:
('minute', 60),
('second', 1)
]
if seconds > ONE_YEAR_IN_SECONDS:
# more than a year is None
return NO_INTERVAL
strings = []
for period_name, period_seconds in periods:
if seconds > period_seconds:
@ -118,14 +134,11 @@ class Scheduler:
def get_time_from_cron(cron):
year = str(cron[0])
if year == "2100":
return "Never"
day = str(cron[4])
hour = str(cron[5])
if day == "*":
text = "everyday"
text = "every day"
else:
text = f"every {day_name[int(day)]}"
@ -136,12 +149,20 @@ class Scheduler:
task_list = []
for job in self.aps_scheduler.get_jobs():
next_run = 'Never'
next_run = NEVER_DATE
if job.next_run_time:
next_run = pretty.date(job.next_run_time.replace(tzinfo=None))
if isinstance(job.trigger, CronTrigger):
if job.next_run_time and str(job.trigger.__getstate__()['fields'][0]) != "2100":
if a_long_time_from_now(job):
# Never for IntervalTrigger jobs
next_run = NEVER_DATE
else:
next_run = pretty.date(job.next_run_time.replace(tzinfo=None))
if isinstance(job.trigger, CronTrigger):
if a_long_time_from_now(job):
# Never for CronTrigger jobs
next_run = NEVER_DATE
else:
if job.next_run_time:
next_run = pretty.date(job.next_run_time.replace(tzinfo=None))
if job.id in self.__running_tasks:
running = True
@ -149,13 +170,21 @@ class Scheduler:
running = False
if isinstance(job.trigger, IntervalTrigger):
interval = f"every {get_time_from_interval(job.trigger.__getstate__()['interval'])}"
interval = get_time_from_interval(job.trigger.__getstate__()['interval'])
if interval != NO_INTERVAL:
interval = f"every {interval}"
# else:
# interval = "100 Year Interval"
task_list.append({'name': job.name, 'interval': interval, 'next_run_in': next_run,
'next_run_time': next_run, 'job_id': job.id, 'job_running': running})
elif isinstance(job.trigger, CronTrigger):
task_list.append({'name': job.name, 'interval': get_time_from_cron(job.trigger.fields),
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
'job_running': running})
if a_long_time_from_now(job):
interval = NO_INTERVAL
else:
interval = get_time_from_cron(job.trigger.fields)
task_list.append({'name': job.name, 'interval': interval,
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
'job_running': running})
return task_list
@ -175,29 +204,23 @@ class Scheduler:
def __cache_cleanup_task(self):
self.aps_scheduler.add_job(cache_maintenance, IntervalTrigger(hours=24), max_instances=1, coalesce=True,
misfire_grace_time=15, id='cache_cleanup', name='Cache maintenance')
misfire_grace_time=15, id='cache_cleanup', name='Cache Maintenance')
def __check_health_task(self):
self.aps_scheduler.add_job(check_health, IntervalTrigger(hours=6), max_instances=1, coalesce=True,
misfire_grace_time=15, id='check_health', name='Check health')
misfire_grace_time=15, id='check_health', name='Check Health')
def __automatic_backup(self):
backup = settings.backup.frequency
if backup == "Daily":
self.aps_scheduler.add_job(
backup_to_zip, CronTrigger(hour=settings.backup.hour), max_instances=1, coalesce=True,
misfire_grace_time=15, id='backup', name='Backup database and configuration file',
replace_existing=True)
trigger = CronTrigger(hour=settings.backup.hour)
elif backup == "Weekly":
self.aps_scheduler.add_job(
backup_to_zip, CronTrigger(day_of_week=settings.backup.day, hour=settings.backup.hour),
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup database and configuration file', replace_existing=True)
trigger = CronTrigger(day_of_week=settings.backup.day, hour=settings.backup.hour)
elif backup == "Manually":
try:
self.aps_scheduler.remove_job(job_id='backup')
except JobLookupError:
pass
trigger = CronTrigger(year=in_a_century())
self.aps_scheduler.add_job(backup_to_zip, trigger,
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup Database and Configuration File', replace_existing=True)
def __sonarr_full_update_task(self):
if settings.general.use_sonarr:
@ -206,18 +229,18 @@ class Scheduler:
self.aps_scheduler.add_job(
update_all_episodes, CronTrigger(hour=settings.sonarr.full_update_hour), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_all_episodes',
name='Index all Episode Subtitles from disk', replace_existing=True)
name='Index All Episode Subtitles from Disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_episodes,
CronTrigger(day_of_week=settings.sonarr.full_update_day, hour=settings.sonarr.full_update_hour),
max_instances=1, coalesce=True, misfire_grace_time=15, id='update_all_episodes',
name='Index all Episode Subtitles from disk', replace_existing=True)
name='Index All Episode Subtitles from Disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_episodes, CronTrigger(year='2100'), max_instances=1, coalesce=True,
update_all_episodes, CronTrigger(year=in_a_century()), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_all_episodes',
name='Index all Episode Subtitles from disk', replace_existing=True)
name='Index All Episode Subtitles from Disk', replace_existing=True)
def __radarr_full_update_task(self):
if settings.general.use_radarr:
@ -226,17 +249,17 @@ class Scheduler:
self.aps_scheduler.add_job(
update_all_movies, CronTrigger(hour=settings.radarr.full_update_hour), max_instances=1,
coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Index all Movie Subtitles from disk', replace_existing=True)
id='update_all_movies', name='Index All Movie Subtitles from Disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_movies,
CronTrigger(day_of_week=settings.radarr.full_update_day, hour=settings.radarr.full_update_hour),
max_instances=1, coalesce=True, misfire_grace_time=15, id='update_all_movies',
name='Index all Movie Subtitles from disk', replace_existing=True)
name='Index All Movie Subtitles from Disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_movies, CronTrigger(year='2100'), max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Index all Movie Subtitles from disk', replace_existing=True)
update_all_movies, CronTrigger(year=in_a_century()), max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Index All Movie Subtitles from Disk', replace_existing=True)
def __update_bazarr_task(self):
if not args.no_update and os.environ["BAZARR_VERSION"] != '':
@ -248,7 +271,7 @@ class Scheduler:
misfire_grace_time=15, id='update_bazarr', name=task_name, replace_existing=True)
else:
self.aps_scheduler.add_job(
check_if_new_update, CronTrigger(year='2100'), hour=4, id='update_bazarr', name=task_name,
check_if_new_update, CronTrigger(year=in_a_century()), hour=4, id='update_bazarr', name=task_name,
replace_existing=True)
self.aps_scheduler.add_job(
check_releases, IntervalTrigger(hours=3), max_instances=1, coalesce=True, misfire_grace_time=15,
@ -269,13 +292,13 @@ class Scheduler:
wanted_search_missing_subtitles_series,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency)), max_instances=1, coalesce=True,
misfire_grace_time=15, id='wanted_search_missing_subtitles_series', replace_existing=True,
name='Search for wanted Series Subtitles')
name='Search for Missing Series Subtitles')
if settings.general.use_radarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_movies,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency_movie)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='wanted_search_missing_subtitles_movies',
name='Search for wanted Movies Subtitles', replace_existing=True)
name='Search for Missing Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.upgrade_subs and \
@ -283,11 +306,19 @@ class Scheduler:
self.aps_scheduler.add_job(
upgrade_subtitles, IntervalTrigger(hours=int(settings.general.upgrade_frequency)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
name='Upgrade previously downloaded Subtitles', replace_existing=True)
name='Upgrade Previously Downloaded Subtitles', replace_existing=True)
else:
try:
self.aps_scheduler.remove_job(job_id='upgrade_subtitles')
except JobLookupError:
pass
def __randomize_interval_task(self):
for job in self.aps_scheduler.get_jobs():
if isinstance(job.trigger, IntervalTrigger):
# do not randomize the Never jobs
if job.trigger.interval.total_seconds() > ONE_YEAR_IN_SECONDS:
continue
self.aps_scheduler.modify_job(job.id,
next_run_time=datetime.now(tz=self.timezone) +
timedelta(seconds=randrange(

@ -13,7 +13,7 @@ from api import api_bp
from .ui import ui_bp
from .get_args import args
from .config import settings, base_url
from .database import database
from .database import close_database
from .app import create_app
app = create_app()
@ -63,49 +63,40 @@ class Server:
self.shutdown()
def start(self):
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
f'{self.server.effective_port}')
try:
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
f'{self.server.effective_port}')
try:
self.server.run()
except Exception:
pass
except KeyboardInterrupt:
self.server.run()
except (KeyboardInterrupt, SystemExit):
self.shutdown()
except Exception:
pass
def shutdown(self):
try:
self.server.close()
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
database.close()
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
os._exit(0)
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
close_database()
self.server.close()
os._exit(0)
def restart(self):
try:
self.server.close()
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
database.close()
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
os._exit(0)
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
close_database()
self.server.close()
os._exit(0)
webserver = Server()

@ -340,14 +340,20 @@ def consume_queue(queue):
data = queue.popleft()
except IndexError:
pass
except (KeyboardInterrupt, SystemExit):
break
else:
dispatcher(data)
sleep(0.1)
# start both queue consuming threads
threading.Thread(target=consume_queue, args=(sonarr_queue,)).start()
threading.Thread(target=consume_queue, args=(radarr_queue,)).start()
sonarr_queue_thread = threading.Thread(target=consume_queue, args=(sonarr_queue,))
sonarr_queue_thread.daemon = True
sonarr_queue_thread.start()
radarr_queue_thread = threading.Thread(target=consume_queue, args=(radarr_queue,))
radarr_queue_thread.daemon = True
radarr_queue_thread.start()
# instantiate proper SignalR client
sonarr_signalr_client = SonarrSignalrClientLegacy() if get_sonarr_info.version().startswith(('0.', '2.', '3.')) else \

@ -8,3 +8,6 @@ headers = {"User-Agent": os.environ["SZ_USER_AGENT"]}
# hearing-impaired detection regex
hi_regex = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
# minimum file size for Bazarr to consider it a video
MINIMUM_VIDEO_SIZE = 20480

@ -77,6 +77,8 @@ def is_virtualenv():
# deploy requirements.txt
if not args.no_update:
try:
if os.name == 'nt':
import win32api, win32con # noqa E401
import lxml, numpy, webrtcvad, setuptools, PIL # noqa E401
except ImportError:
try:
@ -194,16 +196,28 @@ def init_binaries():
exe = get_binary("unar")
rarfile.UNAR_TOOL = exe
rarfile.UNRAR_TOOL = None
rarfile.tool_setup(unrar=False, unar=True, bsdtar=False, force=True)
rarfile.SEVENZIP_TOOL = None
rarfile.tool_setup(unrar=False, unar=True, bsdtar=False, sevenzip=False, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
try:
exe = get_binary("unrar")
rarfile.UNRAR_TOOL = exe
rarfile.UNAR_TOOL = None
rarfile.tool_setup(unrar=True, unar=False, bsdtar=False, force=True)
rarfile.SEVENZIP_TOOL = None
rarfile.tool_setup(unrar=True, unar=False, bsdtar=False, sevenzip=False, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
logging.exception("BAZARR requires a rar archive extraction utilities (unrar, unar) and it can't be found.")
raise BinaryNotFound
try:
exe = get_binary("7z")
rarfile.UNRAR_TOOL = None
rarfile.UNAR_TOOL = None
rarfile.SEVENZIP_TOOL = "7z"
rarfile.tool_setup(unrar=False, unar=False, bsdtar=False, sevenzip=True, force=True)
except (BinaryNotFound, rarfile.RarCannotExec):
logging.exception("BAZARR requires a rar archive extraction utilities (unrar, unar, 7zip) and it can't be found.")
raise BinaryNotFound
else:
logging.debug("Using 7zip from: %s", exe)
return exe
else:
logging.debug("Using UnRAR from: %s", exe)
return exe

@ -24,7 +24,9 @@ class CustomLanguage:
_possible_matches = ("pt-br", "pob", "pb", "brazilian", "brasil", "brazil")
_extensions = (".pt-br", ".pob", ".pb")
_extensions_forced = (".pt-br.forced", ".pob.forced", ".pb.forced")
_extensions_hi = (".pt-br.hi", ".pob.hi", ".pb.hi")
_extensions_hi = (".pt-br.hi", ".pob.hi", ".pb.hi",
".pt-br.cc", ".pob.cc", ".pb.cc",
".pt-br.sdh", ".pob.sdh", ".pb.sdh")
def subzero_language(self):
return Language(self.official_alpha3, self.iso)
@ -45,7 +47,7 @@ class CustomLanguage:
@classmethod
def register(cls, table):
"Register the custom language subclasses in the database."
"""Register the custom language subclasses in the database."""
for sub in cls.__subclasses__():
database.execute(
@ -107,6 +109,23 @@ class BrazilianPortuguese(CustomLanguage):
pass
class Portuguese(CustomLanguage):
alpha2 = "pt"
alpha3 = "por"
language = "pt-PT"
official_alpha2 = "pt"
official_alpha3 = "por"
name = "Portuguese"
iso = "PT"
_scripts = []
_possible_matches = ("pt-pt", "por", "pt")
_extensions = (".pt-pt", ".por", ".pt")
_extensions_forced = (".pt-pt.forced", ".por.forced", ".pt.forced")
_extensions_hi = (".pt-pt.hi", ".por.hi", ".pt.hi",
".pt-pt.cc", ".por.cc", ".pt.cc",
".pt-pt.sdh", ".por.sdh", ".pt.sdh")
class ChineseTraditional(CustomLanguage):
alpha2 = "zt"
alpha3 = "zht"
@ -119,67 +138,27 @@ class ChineseTraditional(CustomLanguage):
# We'll use literals for now
_scripts = ("Hant",)
_extensions = (
".cht",
".tc",
".zh-tw",
".zht",
".zh-hant",
".zhhant",
".zh_hant",
".hant",
".big5",
".traditional",
".cht", ".tc", ".zh-tw", ".zht", ".zh-hant", ".zhhant", ".zh_hant", ".hant", ".big5", ".traditional",
)
_extensions_forced = (
".cht.forced",
".tc.forced",
".zht.forced",
"hant.forced",
".big5.forced",
"繁體中文.forced",
"雙語.forced",
".cht.forced", ".tc.forced", ".zht.forced", "hant.forced", ".big5.forced", "繁體中文.forced", "雙語.forced",
".zh-tw.forced",
)
_extensions_hi = (
".cht.hi",
".tc.hi",
".zht.hi",
"hant.hi",
".big5.hi",
"繁體中文.hi",
"雙語.hi",
".zh-tw.hi",
".cht.hi", ".tc.hi", ".zht.hi", "hant.hi", ".big5.hi", "繁體中文.hi", "雙語.hi", ".zh-tw.hi",
)
_extensions_fuzzy = ("", "雙語")
_extensions_disamb_fuzzy = ("", "双语")
_extensions_disamb = (
".chs",
".sc",
".zhs",
".zh-hans",
".hans",
".zh_hans",
".zhhans",
".gb",
".simplified",
".chs", ".sc", ".zhs", ".zh-hans", ".hans", ".zh_hans", ".zhhans", ".gb", ".simplified",
)
_extensions_disamb_forced = (
".chs.forced",
".sc.forced",
".zhs.forced",
"hans.forced",
".gb.forced",
"简体中文.forced",
"双语.forced",
".chs.forced", ".sc.forced", ".zhs.forced", "hans.forced", ".gb.forced", "简体中文.forced", "双语.forced",
)
_extensions_disamb_hi = (
".chs.hi",
".sc.hi",
".zhs.hi",
"hans.hi",
".gb.hi",
"简体中文.hi",
"双语.hi",
".chs.hi", ".sc.hi", ".zhs.hi", "hans.hi", ".gb.hi", "简体中文.hi", "双语.hi",
".chs.cc", ".sc.cc", ".zhs.cc", "hans.cc", ".gb.cc", "简体中文.cc", "双语.cc",
".chs.sdh", ".sc.sdh", ".zhs.sdh", "hans.sdh", ".gb.sdh", "简体中文.sdh", "双语.sdh",
)
@classmethod
@ -231,31 +210,14 @@ class LatinAmericanSpanish(CustomLanguage):
iso = "MX" # Not fair, but ok
_scripts = ("419",)
_possible_matches = (
"es-la",
"spa-la",
"spl",
"mx",
"latin",
"mexic",
"argent",
"latam",
"es-la", "spa-la", "spl", "mx", "latin", "mexic", "argent", "latam",
)
_extensions = (".es-la", ".spl", ".spa-la", ".ea", ".es-mx", ".lat", ".es.ar")
_extensions_forced = (
".es-la.forced",
".spl.forced",
".spa-la.forced",
".ea.forced",
".es-mx.forced",
".lat.forced",
".es.ar.forced",
".es-la.forced", ".spl.forced", ".spa-la.forced", ".ea.forced", ".es-mx.forced", ".lat.forced", ".es.ar.forced",
)
_extensions_hi = (
".es-la.hi",
".spl.hi",
".spa-la.hi",
".ea.hi",
".es-mx.hi",
".lat.hi",
".es.ar.hi",
".es-la.hi", ".spl.hi", ".spa-la.hi", ".ea.hi", ".es-mx.hi", ".lat.hi", ".es.ar.hi",
".es-la.cc", ".spl.cc", ".spa-la.cc", ".ea.cc", ".es-mx.cc", ".lat.cc", ".es.ar.cc",
".es-la.sdh", ".spl.sdh", ".spa-la.sdh", ".ea.sdh", ".es-mx.sdh", ".lat.sdh", ".es.ar.sdh",
)

@ -1,8 +1,6 @@
# coding=utf-8
import os
import io
import logging
from threading import Thread
@ -75,9 +73,15 @@ update_notifier()
if not args.no_signalr:
if settings.general.use_sonarr:
Thread(target=sonarr_signalr_client.start).start()
sonarr_signalr_thread = Thread(target=sonarr_signalr_client.start)
sonarr_signalr_thread.daemon = True
sonarr_signalr_thread.start()
sonarr_signalr_thread.join()
if settings.general.use_radarr:
Thread(target=radarr_signalr_client.start).start()
radarr_signalr_thread = Thread(target=radarr_signalr_client.start)
radarr_signalr_thread.daemon = True
radarr_signalr_thread.start()
radarr_signalr_thread.join()
if __name__ == "__main__":

@ -29,7 +29,7 @@ def blacklist_log_movie(radarr_id, provider, subs_id, language):
def blacklist_delete_movie(provider, subs_id):
database.execute(
delete(TableBlacklistMovie)
.where((TableBlacklistMovie.provider == provider) and (TableBlacklistMovie.subs_id == subs_id)))
.where((TableBlacklistMovie.provider == provider) & (TableBlacklistMovie.subs_id == subs_id)))
event_stream(type='movie-blacklist', action='delete')

@ -2,6 +2,7 @@
import os
import logging
from constants import MINIMUM_VIDEO_SIZE
from sqlalchemy.exc import IntegrityError
@ -16,6 +17,13 @@ from app.event_handler import event_stream, show_progress, hide_progress
from .utils import get_profile_list, get_tags, get_movies_from_radarr_api
from .parser import movieParser
# map between booleans and strings in DB
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_MOVIES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def update_all_movies():
movies_full_scan_subtitles()
@ -45,6 +53,16 @@ def update_movie(updated_movie, send_event):
event_stream(type='movie', action='update', payload=updated_movie['radarrId'])
def get_movie_monitored_status(movie_id):
existing_movie_monitored = database.execute(
select(TableMovies.monitored)
.where(TableMovies.tmdbId == movie_id))\
.first()
if existing_movie_monitored is None:
return True
else:
return bool_map[existing_movie_monitored[0]]
# Insert new movies in DB
def add_movie(added_movie, send_event):
try:
@ -104,12 +122,12 @@ def update_movies(send_event=True):
current_movies_radarr = [str(movie['tmdbId']) for movie in movies if movie['hasFile'] and
'movieFile' in movie and
(movie['movieFile']['size'] > 20480 or
get_movie_file_size_from_db(movie['movieFile']['path']) > 20480)]
(movie['movieFile']['size'] > MINIMUM_VIDEO_SIZE or
get_movie_file_size_from_db(movie['movieFile']['path']) > MINIMUM_VIDEO_SIZE)]
# Remove old movies from DB
# Remove movies from DB that either no longer exist in Radarr or exist and Radarr says do not have a movie file
movies_to_delete = list(set(current_movies_id_db) - set(current_movies_radarr))
movies_deleted = []
if len(movies_to_delete):
try:
database.execute(delete(TableMovies).where(TableMovies.tmdbId.in_(movies_to_delete)))
@ -117,11 +135,19 @@ def update_movies(send_event=True):
logging.error(f"BAZARR cannot delete movies because of {e}")
else:
for removed_movie in movies_to_delete:
movies_deleted.append(removed_movie)
if send_event:
event_stream(type='movie', action='delete', payload=removed_movie)
# Build new and updated movies
# Add new movies and update movies that Radarr says have media files
# Any new movies added to Radarr that don't have media files yet will not be added to DB
movies_count = len(movies)
sync_monitored = settings.radarr.sync_only_monitored_movies
if sync_monitored:
skipped_count = 0
files_missing = 0
movies_added = []
movies_updated = []
for i, movie in enumerate(movies):
if send_event:
show_progress(id='movies_progress',
@ -129,12 +155,22 @@ def update_movies(send_event=True):
name=movie['title'],
value=i,
count=movies_count)
# Only movies that Radarr says have files downloaded will be kept up to date in the DB
if movie['hasFile'] is True:
if 'movieFile' in movie:
if (movie['movieFile']['size'] > 20480 or
get_movie_file_size_from_db(movie['movieFile']['path']) > 20480):
# Add movies in radarr to current movies list
if sync_monitored:
if get_movie_monitored_status(movie['tmdbId']) != movie['monitored']:
# monitored status is not the same as our DB
trace(f"{i}: (Monitor Status Mismatch) {movie['title']}")
elif not movie['monitored']:
trace(f"{i}: (Skipped Unmonitored) {movie['title']}")
skipped_count += 1
continue
if (movie['movieFile']['size'] > MINIMUM_VIDEO_SIZE or
get_movie_file_size_from_db(movie['movieFile']['path']) > MINIMUM_VIDEO_SIZE):
# Add/update movies from Radarr that have a movie file to current movies list
trace(f"{i}: (Processing) {movie['title']}")
if str(movie['tmdbId']) in current_movies_id_db:
parsed_movie = movieParser(movie, action='update',
tags_dict=tagsDict,
@ -142,16 +178,29 @@ def update_movies(send_event=True):
audio_profiles=audio_profiles)
if not any([parsed_movie.items() <= x for x in current_movies_db_kv]):
update_movie(parsed_movie, send_event)
movies_updated.append(parsed_movie['title'])
else:
parsed_movie = movieParser(movie, action='insert',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)
add_movie(parsed_movie, send_event)
movies_added.append(parsed_movie['title'])
else:
trace(f"{i}: (Skipped File Missing) {movie['title']}")
files_missing += 1
if send_event:
hide_progress(id='movies_progress')
trace(f"Skipped {files_missing} file missing movies out of {i}")
if sync_monitored:
trace(f"Skipped {skipped_count} unmonitored movies out of {i}")
trace(f"Processed {i - files_missing - skipped_count} movies out of {i} " +
f"with {len(movies_added)} added, {len(movies_updated)} updated and {len(movies_deleted)} deleted")
else:
trace(f"Processed {i - files_missing} movies out of {i} with {len(movies_added)} added and {len(movies_updated)} updated")
logging.debug('BAZARR All movies synced from Radarr into database.')

@ -13,12 +13,6 @@ from .converter import RadarrFormatAudioCodec, RadarrFormatVideoCodec
def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles):
if 'movieFile' in movie:
# Detect file separator
if movie['path'][0] == "/":
separator = "/"
else:
separator = "\\"
try:
overview = str(movie['overview'])
except Exception:
@ -120,10 +114,9 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
if action == 'update':
return {'radarrId': int(movie["id"]),
parsed_movie = {'radarrId': int(movie["id"]),
'title': movie["title"],
'path': movie["path"] + separator + movie['movieFile']['relativePath'],
'path': os.path.join(movie["path"], movie['movieFile']['relativePath']),
'tmdbId': str(movie["tmdbId"]),
'poster': poster,
'fanart': fanart,
@ -142,30 +135,12 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
'movie_file_id': int(movie['movieFile']['id']),
'tags': str(tags),
'file_size': movie['movieFile']['size']}
else:
return {'radarrId': int(movie["id"]),
'title': movie["title"],
'path': movie["path"] + separator + movie['movieFile']['relativePath'],
'tmdbId': str(movie["tmdbId"]),
'subtitles': '[]',
'overview': overview,
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sceneName': sceneName,
'monitored': str(bool(movie['monitored'])),
'sortTitle': movie['sortTitle'],
'year': str(movie['year']),
'alternativeTitles': alternativeTitles,
'format': format,
'resolution': resolution,
'video_codec': videoCodec,
'audio_codec': audioCodec,
'imdbId': imdbId,
'movie_file_id': int(movie['movieFile']['id']),
'tags': str(tags),
'profileId': movie_default_profile,
'file_size': movie['movieFile']['size']}
if action == 'insert':
parsed_movie['subtitles'] = '[]'
parsed_movie['profileId'] = movie_default_profile
return parsed_movie
def profile_id_to_language(id, profiles):

@ -30,7 +30,7 @@ def blacklist_log(sonarr_series_id, sonarr_episode_id, provider, subs_id, langua
def blacklist_delete(provider, subs_id):
database.execute(
delete(TableBlacklist)
.where((TableBlacklist.provider == provider) and (TableBlacklist.subs_id == subs_id)))
.where((TableBlacklist.provider == provider) & (TableBlacklist.subs_id == subs_id)))
event_stream(type='episode-blacklist', action='delete')

@ -2,10 +2,11 @@
import os
import logging
from constants import MINIMUM_VIDEO_SIZE
from sqlalchemy.exc import IntegrityError
from app.database import database, TableEpisodes, delete, update, insert, select
from app.database import database, TableShows, TableEpisodes, delete, update, insert, select
from app.config import settings
from utilities.path_mappings import path_mappings
from subtitles.indexer.series import store_subtitles, series_full_scan_subtitles
@ -16,14 +17,29 @@ from sonarr.info import get_sonarr_info, url_sonarr
from .parser import episodeParser
from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_api
# map between booleans and strings in DB
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_EPISODES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def get_episodes_monitored_table(series_id):
episodes_monitored = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.monitored)
.where(TableEpisodes.sonarrSeriesId == series_id))\
.all()
episode_dict = dict((x, y) for x, y in episodes_monitored)
return episode_dict
def update_all_episodes():
series_full_scan_subtitles()
logging.info('BAZARR All existing episode subtitles indexed from disk.')
def sync_episodes(series_id, send_event=True):
logging.debug('BAZARR Starting episodes sync from Sonarr.')
logging.debug(f'BAZARR Starting episodes sync from Sonarr for series ID {series_id}.')
apikey_sonarr = settings.sonarr.apikey
# Get current episodes id in DB
@ -58,16 +74,42 @@ def sync_episodes(series_id, send_event=True):
if item:
episode['episodeFile'] = item[0]
sync_monitored = settings.sonarr.sync_only_monitored_series and settings.sonarr.sync_only_monitored_episodes
if sync_monitored:
episodes_monitored = get_episodes_monitored_table(series_id)
skipped_count = 0
for episode in episodes:
if 'hasFile' in episode:
if episode['hasFile'] is True:
if 'episodeFile' in episode:
# monitored_status_db = get_episodes_monitored_status(episode['episodeFileId'])
if sync_monitored:
try:
monitored_status_db = bool_map[episodes_monitored[episode['episodeFileId']]]
except KeyError:
monitored_status_db = None
if monitored_status_db is None:
# not in db, might need to add, if we have a file on disk
pass
elif monitored_status_db != episode['monitored']:
# monitored status changed and we don't know about it until now
trace(f"(Monitor Status Mismatch) {episode['title']}")
# pass
elif not episode['monitored']:
# Add unmonitored episode in sonarr to current episode list, otherwise it will be deleted from db
current_episodes_sonarr.append(episode['id'])
skipped_count += 1
continue
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
if episode['episodeFile']['size'] > MINIMUM_VIDEO_SIZE or bazarr_file_size > MINIMUM_VIDEO_SIZE:
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode['id'])
@ -80,6 +122,12 @@ def sync_episodes(series_id, send_event=True):
episodes_to_add.append(episodeParser(episode))
else:
return
if sync_monitored:
# try to avoid unnecessary database calls
if settings.general.debug:
series_title = database.execute(select(TableShows.title).where(TableShows.sonarrSeriesId == series_id)).first()[0]
trace(f"Skipped {skipped_count} unmonitored episodes out of {len(episodes)} for {series_title}")
# Remove old episodes from DB
episodes_to_delete = list(set(current_episodes_id_db_list) - set(current_episodes_sonarr))

@ -4,6 +4,7 @@ import os
from app.config import settings
from app.database import TableShows, database, select
from constants import MINIMUM_VIDEO_SIZE
from utilities.path_mappings import path_mappings
from utilities.video_analyzer import embedded_audio_reader
from sonarr.info import get_sonarr_info
@ -92,7 +93,7 @@ def episodeParser(episode):
bazarr_file_size = os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
if episode['episodeFile']['size'] > MINIMUM_VIDEO_SIZE or bazarr_file_size > MINIMUM_VIDEO_SIZE:
if 'sceneName' in episode['episodeFile']:
sceneName = episode['episodeFile']['sceneName']
else:

@ -16,6 +16,20 @@ from .episodes import sync_episodes
from .parser import seriesParser
from .utils import get_profile_list, get_tags, get_series_from_sonarr_api
# map between booleans and strings in DB
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_SERIES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def get_series_monitored_table():
series_monitored = database.execute(
select(TableShows.tvdbId, TableShows.monitored))\
.all()
series_dict = dict((x, y) for x, y in series_monitored)
return series_dict
def update_series(send_event=True):
check_sonarr_rootfolder()
@ -55,6 +69,12 @@ def update_series(send_event=True):
current_shows_sonarr = []
series_count = len(series)
sync_monitored = settings.sonarr.sync_only_monitored_series
if sync_monitored:
series_monitored = get_series_monitored_table()
skipped_count = 0
trace(f"Starting sync for {series_count} shows")
for i, show in enumerate(series):
if send_event:
show_progress(id='series_progress',
@ -63,6 +83,26 @@ def update_series(send_event=True):
value=i,
count=series_count)
if sync_monitored:
try:
monitored_status_db = bool_map[series_monitored[show['tvdbId']]]
except KeyError:
monitored_status_db = None
if monitored_status_db is None:
# not in db, need to add
pass
elif monitored_status_db != show['monitored']:
# monitored status changed and we don't know about it until now
trace(f"{i}: (Monitor Status Mismatch) {show['title']}")
# pass
elif not show['monitored']:
# Add unmonitored series in sonarr to current series list, otherwise it will be deleted from db
trace(f"{i}: (Skipped Unmonitored) {show['title']}")
current_shows_sonarr.append(show['id'])
skipped_count += 1
continue
trace(f"{i}: (Processing) {show['title']}")
# Add shows in Sonarr to current shows list
current_shows_sonarr.append(show['id'])
@ -76,6 +116,7 @@ def update_series(send_event=True):
.filter_by(**updated_series))\
.first():
try:
trace(f"Updating {show['title']}")
database.execute(
update(TableShows)
.values(updated_series)
@ -92,6 +133,7 @@ def update_series(send_event=True):
audio_profiles=audio_profiles)
try:
trace(f"Inserting {show['title']}")
database.execute(
insert(TableShows)
.values(added_series))
@ -110,6 +152,10 @@ def update_series(send_event=True):
removed_series = list(set(current_shows_db) - set(current_shows_sonarr))
for series in removed_series:
# try to avoid unnecessary database calls
if settings.general.debug:
series_title = database.execute(select(TableShows.title).where(TableShows.sonarrSeriesId == series)).first()[0]
trace(f"Deleting {series_title}")
database.execute(
delete(TableShows)
.where(TableShows.sonarrSeriesId == series))
@ -120,6 +166,8 @@ def update_series(send_event=True):
if send_event:
hide_progress(id='series_progress')
if sync_monitored:
trace(f"skipped {skipped_count} unmonitored series out of {i}")
logging.debug('BAZARR All series synced from Sonarr into database.')

@ -3,7 +3,7 @@
import logging
from app.config import settings
from app.config import settings, sync_checker as _defaul_sync_checker
from utilities.path_mappings import path_mappings
from utilities.post_processing import pp_replace, set_chmod
from languages.get_languages import alpha2_from_alpha3, alpha2_from_language, alpha3_from_language, language_from_alpha3
@ -43,6 +43,8 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
postprocessing_cmd = settings.general.postprocessing_cmd
downloaded_provider = subtitle.provider_name
uploader = subtitle.uploader
release_info = subtitle.release_info
downloaded_language_code3 = _get_download_code3(subtitle)
downloaded_language = language_from_alpha3(downloaded_language_code3)
@ -69,6 +71,9 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
message = (f"{downloaded_language}{modifier_string} subtitles {action} from {downloaded_provider} with a score of "
f"{percent_score}%.")
sync_checker = _defaul_sync_checker
logging.debug("Sync checker: %s", sync_checker)
if media_type == 'series':
episode_metadata = database.execute(
select(TableEpisodes.sonarrSeriesId, TableEpisodes.sonarrEpisodeId)
@ -79,13 +84,14 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
series_id = episode_metadata.sonarrSeriesId
episode_id = episode_metadata.sonarrEpisodeId
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
sonarr_series_id=episode_metadata.sonarrSeriesId,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
if sync_checker(subtitle) is True:
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2,
percent_score=percent_score,
sonarr_series_id=episode_metadata.sonarrSeriesId,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
else:
movie_metadata = database.execute(
select(TableMovies.radarrId)
@ -96,17 +102,18 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
series_id = ""
episode_id = movie_metadata.radarrId
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
radarr_id=movie_metadata.radarrId)
if sync_checker(subtitle) is True:
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2,
percent_score=percent_score,
radarr_id=movie_metadata.radarrId)
if use_postprocessing is True:
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language, downloaded_language_code2,
downloaded_language_code3, audio_language, audio_language_code2, audio_language_code3,
percent_score, subtitle_id, downloaded_provider, series_id, episode_id)
percent_score, subtitle_id, downloaded_provider, uploader, release_info, series_id, episode_id)
if media_type == 'series':
use_pp_threshold = settings.general.use_postprocessing_threshold

@ -8,7 +8,7 @@ from app.config import settings
from subtitles.tools.subsyncer import SubSyncer
def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_score, sonarr_series_id=None,
def sync_subtitles(video_path, srt_path, srt_lang, forced, percent_score, sonarr_series_id=None,
sonarr_episode_id=None, radarr_id=None):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
@ -17,7 +17,7 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
else:
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
f'subtitles: {srt_path}.')
if media_type == 'series':
if sonarr_episode_id:
use_subsync_threshold = settings.subsync.use_subsync_threshold
subsync_threshold = settings.subsync.subsync_threshold
else:
@ -26,7 +26,7 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
subsync = SubSyncer()
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang, media_type=media_type,
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang,
sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, radarr_id=radarr_id)
del subsync
gc.collect()

@ -30,8 +30,9 @@ class SubSyncer:
self.vad = 'subs_then_webrtc'
self.log_dir_path = os.path.join(args.config_dir, 'log')
def sync(self, video_path, srt_path, srt_lang, media_type, sonarr_series_id=None, sonarr_episode_id=None,
radarr_id=None):
def sync(self, video_path, srt_path, srt_lang, sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None,
reference=None, max_offset_seconds=str(settings.subsync.max_offset_seconds),
no_fix_framerate=settings.subsync.no_fix_framerate, gss=settings.subsync.gss):
self.reference = video_path
self.srtin = srt_path
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced.srt'
@ -52,20 +53,41 @@ class SubSyncer:
logging.debug('BAZARR FFmpeg used is %s', ffmpeg_exe)
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path, '--vad',
self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.force_audio:
unparsed_args.append('--no-fix-framerate')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.debug:
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
if os.path.isfile(self.srtout):
os.remove(self.srtout)
logging.debug('BAZARR deleted the previous subtitles synchronization attempt file.')
try:
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path,
'--vad', self.vad, '--log-dir-path', self.log_dir_path, '--max-offset-seconds',
max_offset_seconds, '--output-encoding', 'same']
if not settings.general.utf8_encode:
unparsed_args.append('--output-encoding')
unparsed_args.append('same')
if no_fix_framerate:
unparsed_args.append('--no-fix-framerate')
if gss:
unparsed_args.append('--gss')
if reference and reference != video_path and os.path.isfile(reference):
# subtitles path provided
self.reference = reference
elif reference and isinstance(reference, str) and len(reference) == 3 and reference[:2] in ['a:', 's:']:
# audio or subtitles track id provided
unparsed_args.append('--reference-stream')
unparsed_args.append(reference)
elif settings.subsync.force_audio:
# nothing else match and force audio settings is enabled
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.debug:
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
if os.path.isfile(self.srtout):
os.remove(self.srtout)
logging.debug('BAZARR deleted the previous subtitles synchronization attempt file.')
result = run(self.args)
except Exception:
logging.exception(
@ -95,7 +117,7 @@ class SubSyncer:
reversed_subtitles_path=srt_path,
hearing_impaired=None)
if media_type == 'series':
if sonarr_episode_id:
history_log(action=5, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,
result=result)
else:

@ -137,21 +137,21 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
return
series_id = episode_metadata.sonarrSeriesId
episode_id = episode_metadata.sonarrEpisodeId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, sonarr_series_id=episode_metadata.sonarrSeriesId, forced=forced,
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, percent_score=100,
sonarr_series_id=episode_metadata.sonarrSeriesId, forced=forced,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
else:
if not movie_metadata:
return
series_id = ""
episode_id = movie_metadata.radarrId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, radarr_id=movie_metadata.radarrId, forced=forced)
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, percent_score=100,
radarr_id=movie_metadata.radarrId, forced=forced)
if use_postprocessing:
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, uploaded_language_code2,
uploaded_language_code3, audio_language['name'], audio_language['code2'],
audio_language['code3'], 100, "1", "manual", series_id, episode_id)
audio_language['code3'], 100, "1", "manual", "user", "unknown", series_id, episode_id)
postprocessing(command, path)
set_chmod(subtitles_path=subtitle_path)

@ -151,6 +151,8 @@ def restore_from_backup():
try:
os.remove(restore_config_path)
except FileNotFoundError:
pass
except OSError:
logging.exception(f'Unable to delete {dest_config_path}')

@ -14,7 +14,7 @@ def check_credentials(user, pw, request, log_success=True):
ip_addr = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr)
username = settings.auth.username
password = settings.auth.password
if hashlib.md5(pw.encode('utf-8')).hexdigest() == password and user == username:
if hashlib.md5(f"{pw}".encode('utf-8')).hexdigest() == password and user == username:
if log_success:
logging.info(f'Successful authentication from {ip_addr} for user {user}')
return True

@ -16,7 +16,8 @@ def _escape(in_str):
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language,
episode_language_code2, episode_language_code3, score, subtitle_id, provider, series_id, episode_id):
episode_language_code2, episode_language_code3, score, subtitle_id, provider, uploader,
release_info, series_id, episode_id):
pp_command = re.sub(r'[\'"]?{{directory}}[\'"]?', _escape(os.path.dirname(episode)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode}}[\'"]?', _escape(episode), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_name}}[\'"]?', _escape(os.path.splitext(os.path.basename(episode))[0]),
@ -35,6 +36,8 @@ def pp_replace(pp_command, episode, subtitles, language, language_code2, languag
pp_command = re.sub(r'[\'"]?{{score}}[\'"]?', _escape(str(score)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitle_id}}[\'"]?', _escape(str(subtitle_id)), pp_command)
pp_command = re.sub(r'[\'"]?{{provider}}[\'"]?', _escape(str(provider)), pp_command)
pp_command = re.sub(r'[\'"]?{{uploader}}[\'"]?', _escape(str(uploader)), pp_command)
pp_command = re.sub(r'[\'"]?{{release_info}}[\'"]?', _escape(str(release_info)), pp_command)
pp_command = re.sub(r'[\'"]?{{series_id}}[\'"]?', _escape(str(series_id)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_id}}[\'"]?', _escape(str(episode_id)), pp_command)
return pp_command

@ -1,15 +1,16 @@
# coding=utf-8
import ast
import logging
import os
import pickle
from knowit.api import know, KnowitException
from languages.custom_lang import CustomLanguage
from languages.get_languages import language_from_alpha3, alpha3_from_alpha2
from app.config import settings
from app.database import TableEpisodes, TableMovies, database, update, select
from languages.custom_lang import CustomLanguage
from languages.get_languages import language_from_alpha2, language_from_alpha3, alpha3_from_alpha2
from utilities.path_mappings import path_mappings
from app.config import settings
from knowit.api import know, KnowitException
def _handle_alpha3(detected_language: dict):
@ -107,6 +108,110 @@ def embedded_audio_reader(file, file_size, episode_file_id=None, movie_file_id=N
return audio_list
def subtitles_sync_references(subtitles_path, sonarr_episode_id=None, radarr_movie_id=None):
references_dict = {'audio_tracks': [], 'embedded_subtitles_tracks': [], 'external_subtitles_tracks': []}
data = None
if sonarr_episode_id:
media_data = database.execute(
select(TableEpisodes.path, TableEpisodes.file_size, TableEpisodes.episode_file_id, TableEpisodes.subtitles)
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)) \
.first()
if not media_data:
return references_dict
data = parse_video_metadata(media_data.path, media_data.file_size, media_data.episode_file_id, None,
use_cache=True)
elif radarr_movie_id:
media_data = database.execute(
select(TableMovies.path, TableMovies.file_size, TableMovies.movie_file_id, TableMovies.subtitles)
.where(TableMovies.radarrId == radarr_movie_id)) \
.first()
if not media_data:
return references_dict
data = parse_video_metadata(media_data.path, media_data.file_size, None, media_data.movie_file_id,
use_cache=True)
if not data:
return references_dict
cache_provider = None
if "ffprobe" in data and data["ffprobe"]:
cache_provider = 'ffprobe'
elif 'mediainfo' in data and data["mediainfo"]:
cache_provider = 'mediainfo'
if cache_provider:
if 'audio' in data[cache_provider]:
track_id = 0
for detected_language in data[cache_provider]["audio"]:
name = detected_language.get("name", "").replace("(", "").replace(")", "")
if "language" not in detected_language:
language = 'Undefined'
else:
alpha3 = _handle_alpha3(detected_language)
language = language_from_alpha3(alpha3)
references_dict['audio_tracks'].append({'stream': f'a:{track_id}', 'name': name, 'language': language})
track_id += 1
if 'subtitle' in data[cache_provider]:
track_id = 0
bitmap_subs = ['dvd', 'pgs']
for detected_language in data[cache_provider]["subtitle"]:
if any([x in detected_language.get("name", "").lower() for x in bitmap_subs]):
# skipping bitmap based subtitles
track_id += 1
continue
name = detected_language.get("name", "").replace("(", "").replace(")", "")
if "language" not in detected_language:
language = 'Undefined'
else:
alpha3 = _handle_alpha3(detected_language)
language = language_from_alpha3(alpha3)
forced = detected_language.get("forced", False)
hearing_impaired = detected_language.get("hearing_impaired", False)
references_dict['embedded_subtitles_tracks'].append(
{'stream': f's:{track_id}', 'name': name, 'language': language, 'forced': forced,
'hearing_impaired': hearing_impaired}
)
track_id += 1
try:
parsed_subtitles = ast.literal_eval(media_data.subtitles)
except ValueError:
pass
else:
for subtitles in parsed_subtitles:
reversed_subtitles_path = path_mappings.path_replace_reverse(subtitles_path) if sonarr_episode_id else (
path_mappings.path_replace_reverse_movie(subtitles_path))
if subtitles[1] and subtitles[1] != reversed_subtitles_path:
language_dict = languages_from_colon_seperated_string(subtitles[0])
references_dict['external_subtitles_tracks'].append({
'name': os.path.basename(subtitles[1]),
'path': path_mappings.path_replace(subtitles[1]) if sonarr_episode_id else
path_mappings.path_replace_reverse_movie(subtitles[1]),
'language': language_dict['language'],
'forced': language_dict['forced'],
'hearing_impaired': language_dict['hi'],
})
else:
# excluding subtitles that is going to be synced from the external subtitles list
continue
return references_dict
def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=None, use_cache=True):
# Define default data keys value
data = {
@ -161,6 +266,11 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
elif embedded_subs_parser == 'mediainfo':
mediainfo_path = get_binary("mediainfo")
# see if file exists (perhaps offline)
if not os.path.exists(file):
logging.error(f'Video file "{file}" cannot be found for analysis')
return None
# if we have ffprobe available
if ffprobe_path:
try:
@ -195,3 +305,15 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
.values(ffprobe_cache=pickle.dumps(data, pickle.HIGHEST_PROTOCOL))
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file)))
return data
def languages_from_colon_seperated_string(lang):
splitted_language = lang.split(':')
language = language_from_alpha2(splitted_language[0])
forced = hi = False
if len(splitted_language) > 1:
if splitted_language[1] == 'forced':
forced = True
elif splitted_language[1] == 'hi':
hi = True
return {'language': language, 'forced': forced, 'hi': hi}

@ -59,7 +59,7 @@ In the project directory, you can run:
### `npm start`
Runs the app in the development mode.
Open `http://localhost:3000` to view it in the browser.
Open `http://localhost:5173` to view it in the browser.
The page will reload if you make edits.
You will also see any lint errors in the console.

@ -125,3 +125,27 @@ export function useSubtitleInfos(names: string[]) {
api.subtitles.info(names)
);
}
export function useRefTracksByEpisodeId(
subtitlesPath: string,
sonarrEpisodeId: number,
isEpisode: boolean
) {
return useQuery(
[QueryKeys.Episodes, sonarrEpisodeId, QueryKeys.Subtitles, subtitlesPath],
() => api.subtitles.getRefTracksByEpisodeId(subtitlesPath, sonarrEpisodeId),
{ enabled: isEpisode }
);
}
export function useRefTracksByMovieId(
subtitlesPath: string,
radarrMovieId: number,
isMovie: boolean
) {
return useQuery(
[QueryKeys.Movies, radarrMovieId, QueryKeys.Subtitles, subtitlesPath],
() => api.subtitles.getRefTracksByMovieId(subtitlesPath, radarrMovieId),
{ enabled: isMovie }
);
}

@ -5,6 +5,28 @@ class SubtitlesApi extends BaseApi {
super("/subtitles");
}
async getRefTracksByEpisodeId(
subtitlesPath: string,
sonarrEpisodeId: number
) {
const response = await this.get<DataWrapper<Item.RefTracks>>("", {
subtitlesPath,
sonarrEpisodeId,
});
return response.data;
}
async getRefTracksByMovieId(
subtitlesPath: string,
radarrMovieId?: number | undefined
) {
const response = await this.get<DataWrapper<Item.RefTracks>>("", {
subtitlesPath,
radarrMovieId,
});
return response.data;
}
async info(names: string[]) {
const response = await this.get<DataWrapper<SubtitleInfo[]>>(`/info`, {
filenames: names,

@ -25,6 +25,7 @@ import {
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { Divider, List, Menu, MenuProps, ScrollArea } from "@mantine/core";
import { FunctionComponent, ReactElement, useCallback, useMemo } from "react";
import { SyncSubtitleModal } from "./forms/SyncSubtitleForm";
export interface ToolOptions {
key: string;
@ -41,7 +42,8 @@ export function useTools() {
{
key: "sync",
icon: faPlay,
name: "Sync",
name: "Sync...",
modal: SyncSubtitleModal,
},
{
key: "remove_HI",

@ -0,0 +1,183 @@
/* eslint-disable camelcase */
import {
useRefTracksByEpisodeId,
useRefTracksByMovieId,
useSubtitleAction,
} from "@/apis/hooks";
import { useModals, withModal } from "@/modules/modals";
import { task } from "@/modules/task";
import { syncMaxOffsetSecondsOptions } from "@/pages/Settings/Subtitles/options";
import { toPython } from "@/utilities";
import { faInfoCircle } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { Alert, Button, Checkbox, Divider, Stack, Text } from "@mantine/core";
import { useForm } from "@mantine/form";
import { FunctionComponent } from "react";
import { Selector, SelectorOption } from "../inputs";
const TaskName = "Syncing Subtitle";
function useReferencedSubtitles(
mediaType: "episode" | "movie",
mediaId: number,
subtitlesPath: string
) {
// We cannot call hooks conditionally, we rely on useQuery "enabled" option to do only the required API call
const episodeData = useRefTracksByEpisodeId(
subtitlesPath,
mediaId,
mediaType === "episode"
);
const movieData = useRefTracksByMovieId(
subtitlesPath,
mediaId,
mediaType === "movie"
);
const mediaData = mediaType === "episode" ? episodeData : movieData;
const subtitles: { group: string; value: string; label: string }[] = [];
if (!mediaData.data) {
return [];
} else {
if (mediaData.data.audio_tracks.length > 0) {
mediaData.data.audio_tracks.forEach((item) => {
subtitles.push({
group: "Embedded audio tracks",
value: item.stream,
label: `${item.name || item.language} (${item.stream})`,
});
});
}
if (mediaData.data.embedded_subtitles_tracks.length > 0) {
mediaData.data.embedded_subtitles_tracks.forEach((item) => {
subtitles.push({
group: "Embedded subtitles tracks",
value: item.stream,
label: `${item.name || item.language} (${item.stream})`,
});
});
}
if (mediaData.data.external_subtitles_tracks.length > 0) {
mediaData.data.external_subtitles_tracks.forEach((item) => {
if (item) {
subtitles.push({
group: "External Subtitles files",
value: item.path,
label: item.name,
});
}
});
}
return subtitles;
}
}
interface Props {
selections: FormType.ModifySubtitle[];
onSubmit?: VoidFunction;
}
interface FormValues {
reference?: string;
maxOffsetSeconds?: string;
noFixFramerate: boolean;
gss: boolean;
}
const SyncSubtitleForm: FunctionComponent<Props> = ({
selections,
onSubmit,
}) => {
if (selections.length === 0) {
throw new Error("You need to select at least 1 media to sync");
}
const { mutateAsync } = useSubtitleAction();
const modals = useModals();
const mediaType = selections[0].type;
const mediaId = selections[0].id;
const subtitlesPath = selections[0].path;
const subtitles: SelectorOption<string>[] = useReferencedSubtitles(
mediaType,
mediaId,
subtitlesPath
);
const form = useForm<FormValues>({
initialValues: {
noFixFramerate: false,
gss: false,
},
});
return (
<form
onSubmit={form.onSubmit((parameters) => {
selections.forEach((s) => {
const form: FormType.ModifySubtitle = {
...s,
reference: parameters.reference,
max_offset_seconds: parameters.maxOffsetSeconds,
no_fix_framerate: toPython(parameters.noFixFramerate),
gss: toPython(parameters.gss),
};
task.create(s.path, TaskName, mutateAsync, { action: "sync", form });
});
onSubmit?.();
modals.closeSelf();
})}
>
<Stack>
<Alert
title="Subtitles"
color="gray"
icon={<FontAwesomeIcon icon={faInfoCircle}></FontAwesomeIcon>}
>
<Text size="sm">{selections.length} subtitles selected</Text>
</Alert>
<Selector
clearable
disabled={subtitles.length === 0 || selections.length !== 1}
label="Reference"
placeholder="Default: choose automatically within video file"
options={subtitles}
{...form.getInputProps("reference")}
></Selector>
<Selector
clearable
label="Max Offset Seconds"
options={syncMaxOffsetSecondsOptions}
placeholder="Select..."
{...form.getInputProps("maxOffsetSeconds")}
></Selector>
<Checkbox
label="No Fix Framerate"
{...form.getInputProps("noFixFramerate")}
></Checkbox>
<Checkbox
label="Golden-Section Search"
{...form.getInputProps("gss")}
></Checkbox>
<Divider></Divider>
<Button type="submit">Sync</Button>
</Stack>
</form>
);
};
export const SyncSubtitleModal = withModal(SyncSubtitleForm, "sync-subtitle", {
title: "Sync Subtitle Options",
size: "lg",
});
export default SyncSubtitleForm;

@ -20,7 +20,15 @@ import {
useRef,
useState,
} from "react";
import { Card, Check, Chips, Message, Password, Text } from "../components";
import {
Card,
Check,
Chips,
Selector as GlobalSelector,
Message,
Password,
Text,
} from "../components";
import {
FormContext,
FormValues,
@ -206,6 +214,7 @@ const ProviderTool: FunctionComponent<ProviderToolProps> = ({
info.inputs?.forEach((value) => {
const key = value.key;
const label = value.name ?? capitalize(value.key);
const options = value.options ?? [];
switch (value.type) {
case "text":
@ -236,6 +245,16 @@ const ProviderTool: FunctionComponent<ProviderToolProps> = ({
></Check>
);
return;
case "select":
elements.push(
<GlobalSelector
key={key}
label={label}
settingKey={`settings-${itemKey}-${key}`}
options={options}
></GlobalSelector>
);
return;
case "chips":
elements.push(
<Chips
@ -295,4 +314,5 @@ const ProviderTool: FunctionComponent<ProviderToolProps> = ({
const ProviderModal = withModal(ProviderTool, "provider-tool", {
title: "Provider",
size: "calc(50vw)",
});

@ -1,5 +1,15 @@
import { antiCaptchaOption } from "@/pages/Settings/Providers/options";
import { Anchor } from "@mantine/core";
import { FunctionComponent } from "react";
import { Layout, Section } from "../components";
import {
CollapseBox,
Layout,
Message,
Password,
Section,
Selector,
Text,
} from "../components";
import { ProviderView } from "./components";
const SettingsProvidersView: FunctionComponent = () => {
@ -8,6 +18,47 @@ const SettingsProvidersView: FunctionComponent = () => {
<Section header="Providers">
<ProviderView></ProviderView>
</Section>
<Section header="Anti-Captcha Options">
<Selector
clearable
label={"Choose the anti-captcha provider you want to use"}
placeholder="Select a provider"
settingKey="settings-general-anti_captcha_provider"
settingOptions={{ onSubmit: (v) => (v === undefined ? "None" : v) }}
options={antiCaptchaOption}
></Selector>
<Message></Message>
<CollapseBox
settingKey="settings-general-anti_captcha_provider"
on={(value) => value === "anti-captcha"}
>
<Text
label="Account Key"
settingKey="settings-anticaptcha-anti_captcha_key"
></Text>
<Anchor href="http://getcaptchasolution.com/eixxo1rsnw">
Anti-Captcha.com
</Anchor>
<Message>Link to subscribe</Message>
</CollapseBox>
<CollapseBox
settingKey="settings-general-anti_captcha_provider"
on={(value) => value === "death-by-captcha"}
>
<Text
label="Username"
settingKey="settings-deathbycaptcha-username"
></Text>
<Password
label="Password"
settingKey="settings-deathbycaptcha-password"
></Password>
<Anchor href="https://www.deathbycaptcha.com">
DeathByCaptcha.com
</Anchor>
<Message>Link to subscribe</Message>
</CollapseBox>
</Section>
</Layout>
);
};

@ -1,3 +1,4 @@
import { SelectorOption } from "@/components";
import { ReactText } from "react";
type Input<T, N> = {
@ -6,12 +7,14 @@ type Input<T, N> = {
defaultValue?: T;
name?: string;
description?: string;
options?: SelectorOption<string>[];
};
type AvailableInput =
| Input<ReactText, "text">
| Input<string, "password">
| Input<boolean, "switch">
| Input<string, "select">
| Input<ReactText[], "chips">;
export interface ProviderInfo {
@ -22,6 +25,14 @@ export interface ProviderInfo {
inputs?: AvailableInput[];
}
export const logLevelOptions: SelectorOption<string>[] = [
{ label: "DEBUG", value: "DEBUG" },
{ label: "INFO", value: "INFO" },
{ label: "WARNING", value: "WARNING" },
{ label: "ERROR", value: "ERROR" },
{ label: "CRITICAL", value: "CRITICAL" },
];
export const ProviderList: Readonly<ProviderInfo[]> = [
{
key: "addic7ed",
@ -52,7 +63,11 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
},
],
},
{ key: "argenteam", description: "LATAM Spanish Subtitles Provider" },
{
key: "argenteam_dump",
name: "Argenteam Dump",
description: "Subtitles dump of the now extinct Argenteam",
},
{
key: "assrt",
description: "Chinese Subtitles Provider",
@ -79,7 +94,7 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
key: "bsplayer",
name: "BSplayer",
description:
"Provider removed from Bazarr because it was causing too much issues so it will always return no subtitles",
"Provider removed from Bazarr because it was causing too many issues.\nIt will always return no subtitles.",
},
{
key: "embeddedsubtitles",
@ -146,22 +161,6 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
],
},
{ key: "hosszupuska", description: "Hungarian Subtitles Provider" },
{
key: "legendasdivx",
name: "LegendasDivx",
description: "Brazilian / Portuguese Subtitles Provider",
inputs: [
{
type: "text",
key: "username",
},
{
type: "password",
key: "password",
},
{ type: "switch", key: "skip_wrong_fps", name: "Skip Wrong FPS" },
],
},
{
key: "karagarga",
name: "Karagarga.in",
@ -203,26 +202,23 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
},
],
},
{ key: "napiprojekt", description: "Polish Subtitles Provider" },
{
key: "whisperai",
name: "Whisper",
description: "AI Generated Subtitles powered by Whisper",
key: "legendasdivx",
name: "LegendasDivx",
description: "Brazilian / Portuguese Subtitles Provider",
inputs: [
{
type: "text",
key: "endpoint",
defaultValue: "http://127.0.0.1:9000",
name: "Whisper ASR Docker Endpoint",
key: "username",
},
{
type: "text",
key: "timeout",
defaultValue: 3600,
name: "Transcription/translation timeout in seconds",
type: "password",
key: "password",
},
{ type: "switch", key: "skip_wrong_fps", name: "Skip Wrong FPS" },
],
},
{ key: "napiprojekt", description: "Polish Subtitles Provider" },
{
key: "napisy24",
description: "Polish Subtitles Provider",
@ -243,6 +239,7 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
{
key: "opensubtitles",
name: "OpenSubtitles.org",
description: "Only works if you have VIP status",
inputs: [
{
type: "text",
@ -286,6 +283,11 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
key: "use_hash",
name: "Use Hash",
},
{
type: "switch",
key: "include_ai_translated",
name: "Include AI translated subtitles in search results",
},
],
},
{
@ -303,7 +305,7 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
{
key: "regielive",
name: "RegieLive",
description: "Romanian Subtitles Provider.",
description: "Romanian Subtitles Provider",
},
{
key: "soustitreseu",
@ -311,11 +313,6 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
description: "Mostly French Subtitles Provider",
},
{ key: "subdivx", description: "LATAM Spanish / Spanish Subtitles Provider" },
{
key: "subssabbz",
name: "Subs.sab.bz",
description: "Bulgarian Subtitles Provider",
},
{
key: "subf2m",
name: "subf2m.co",
@ -335,16 +332,21 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
],
message: "Make sure to use a unique and credible user agent.",
},
{
key: "subssabbz",
name: "Subs.sab.bz",
description: "Bulgarian Subtitles Provider",
},
{
key: "subs4free",
name: "Subs4Free",
description: "Greek Subtitles Provider. Broken, may not works for some.",
description: "Greek Subtitles Provider. Broken, may not work for some.",
},
{
key: "subs4series",
name: "Subs4Series",
description:
"Greek Subtitles Provider. Requires anti-captcha provider to solve captchas for each download.",
"Greek Subtitles Provider.\nRequires anti-captcha provider to solve captchas for each download.",
},
{
key: "subscene",
@ -358,9 +360,9 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
key: "password",
},
],
description: "Broken, may not works for some. Use subf2m instead.",
description: "Broken, may not work for some. Use subf2m instead.",
},
{ key: "subscenter" },
{ key: "subscenter", description: "Hebrew Subtitles Provider" },
{
key: "subsunacs",
name: "Subsunacs.net",
@ -401,17 +403,10 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
name: "Titrari.ro",
description: "Mostly Romanian Subtitles Provider",
},
{
key: "tusubtitulo",
name: "Tusubtitulo.com",
description:
"Provider requested to be removed from Bazarr so it will always return no subtitles. Could potentially come back in the future with an upcoming premium account.",
// "LATAM Spanish / Spanish / English Subtitles Provider for TV Shows",
},
{
key: "titulky",
name: "Titulky.com",
description: "CZ/SK Subtitles Provider. Available only with VIP",
description: "CZ/SK Subtitles Provider. Available only with VIP.",
inputs: [
{
type: "text",
@ -428,8 +423,46 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
},
],
},
{
key: "tusubtitulo",
name: "Tusubtitulo.com",
description:
"Provider requested to be removed from Bazarr, so it will always return no subtitles.\nCould potentially come back in the future with an upcoming premium account.",
// "LATAM Spanish / Spanish / English Subtitles Provider for TV Shows",
},
{ key: "tvsubtitles", name: "TVSubtitles" },
{ key: "wizdom", description: "Wizdom.xyz Subtitles Provider." },
{
key: "whisperai",
name: "Whisper",
description: "AI Generated Subtitles powered by Whisper",
inputs: [
{
type: "text",
key: "endpoint",
defaultValue: "http://127.0.0.1:9000",
name: "Whisper ASR Docker Endpoint",
},
{
type: "text",
key: "response",
defaultValue: 5,
name: "Connection/response timeout in seconds",
},
{
type: "text",
key: "timeout",
defaultValue: 3600,
name: "Transcription/translation timeout in seconds",
},
{
type: "select",
key: "loglevel",
name: "Logging level",
options: logLevelOptions,
},
],
},
{ key: "wizdom", description: "Wizdom.xyz Subtitles Provider" },
{
key: "xsubs",
name: "XSubs",
@ -454,6 +487,6 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
{
key: "zimuku",
name: "Zimuku",
description: "Chinese Subtitles Provider. Anti-captcha required",
description: "Chinese Subtitles Provider. Anti-captcha required.",
},
];

@ -0,0 +1,12 @@
import { SelectorOption } from "@/components";
export const antiCaptchaOption: SelectorOption<string>[] = [
{
label: "Anti-Captcha",
value: "anti-captcha",
},
{
label: "Death by Captcha",
value: "death-by-captcha",
},
];

@ -12,7 +12,6 @@ import {
backupOptions,
dayOptions,
diskUpdateOptions,
episodesSyncOptions,
moviesSyncOptions,
seriesSyncOptions,
upgradeOptions,
@ -32,26 +31,63 @@ const SettingsSchedulerView: FunctionComponent = () => {
<Layout name="Scheduler">
<Section header="Sonarr/Radarr Sync">
<Selector
label="Update Series List from Sonarr"
label="Sync with Sonarr"
options={seriesSyncOptions}
settingKey="settings-sonarr-series_sync"
></Selector>
<Selector
label="Update Episodes List from Sonarr"
options={episodesSyncOptions}
settingKey="settings-sonarr-episodes_sync"
></Selector>
<Check
label="Sync Only Monitored Series"
settingKey={"settings-sonarr-sync_only_monitored_series"}
></Check>
<CollapseBox settingKey={"settings-sonarr-sync_only_monitored_series"}>
<Message>
If enabled, only series with a monitored status in Sonarr will be
synced. If you make changes to a specific unmonitored Sonarr series
and you want Bazarr to know about those changes, simply toggle the
monitored status back on in Sonarr and Bazarr will sync any changes.
</Message>
</CollapseBox>
<CollapseBox settingKey={"settings-sonarr-sync_only_monitored_series"}>
<Check
label="Sync Only Monitored Episodes"
settingKey={"settings-sonarr-sync_only_monitored_episodes"}
></Check>
<CollapseBox
settingKey={"settings-sonarr-sync_only_monitored_episodes"}
>
<Message>
If enabled, only episodes with a monitored status in Sonarr will
be synced. If you make changes to a specific unmonitored Sonarr
episode (or season) and you want Bazarr to know about those
changes, simply toggle the monitored status back on in Sonarr and
Bazarr will sync any changes. This setting is especially helpful
for long running TV series with many seasons and many episodes,
but that are still actively producing new episodes (e.g. Saturday
Night Live).
</Message>
</CollapseBox>
</CollapseBox>
<Selector
label="Update Movies List from Radarr"
label="Sync with Radarr"
options={moviesSyncOptions}
settingKey="settings-radarr-movies_sync"
></Selector>
<Check
label="Sync Only Monitored Movies"
settingKey={"settings-radarr-sync_only_monitored_movies"}
></Check>
<CollapseBox settingKey={"settings-radarr-sync_only_monitored_movies"}>
<Message>
If enabled, only movies with a monitored status in Radarr will be
synced. If you make changes to a specific unmonitored Radarr movie
and you want Bazarr to know about those changes, simply toggle the
monitored status back on in Radarr and Bazarr will sync any changes.
</Message>
</CollapseBox>
</Section>
<Section header="Disk Indexing">
<Selector
label="Update all Episode Subtitles from Disk"
label="Update All Episode Subtitles from Disk"
settingKey="settings-sonarr-full_update"
options={diskUpdateOptions}
></Selector>
@ -88,7 +124,7 @@ const SettingsSchedulerView: FunctionComponent = () => {
</Message>
<Selector
label="Update all Movie Subtitles from Disk"
label="Update All Movie Subtitles from Disk"
settingKey="settings-radarr-full_update"
options={diskUpdateOptions}
></Selector>
@ -144,7 +180,7 @@ const SettingsSchedulerView: FunctionComponent = () => {
</Section>
<Section header="Backup">
<Selector
label="Backup config and database"
label="Backup Database and Configuration File"
settingKey="settings-backup-frequency"
options={backupOptions}
></Selector>

@ -1,6 +1,7 @@
import { SelectorOption } from "@/components";
export const seriesSyncOptions: SelectorOption<number>[] = [
{ label: "Manually", value: 52560000 },
{ label: "15 Minutes", value: 15 },
{ label: "1 Hour", value: 60 },
{ label: "3 Hours", value: 180 },
@ -9,8 +10,6 @@ export const seriesSyncOptions: SelectorOption<number>[] = [
{ label: "24 Hours", value: 1440 },
];
export const episodesSyncOptions = seriesSyncOptions;
export const moviesSyncOptions = seriesSyncOptions;
export const diskUpdateOptions: SelectorOption<string>[] = [
@ -32,6 +31,7 @@ export const dayOptions: SelectorOption<number>[] = [
];
export const upgradeOptions: SelectorOption<number>[] = [
{ label: "Manually", value: 876000 },
{ label: "6 Hours", value: 6 },
{ label: "12 Hours", value: 12 },
{ label: "24 Hours", value: 24 },

@ -1,11 +1,11 @@
import { Anchor, Code, Table } from "@mantine/core";
import { Code, Table } from "@mantine/core";
import { FunctionComponent } from "react";
import {
Check,
CollapseBox,
Layout,
Message,
Password,
MultiSelector,
Section,
Selector,
Slider,
@ -18,11 +18,12 @@ import {
import {
adaptiveSearchingDelayOption,
adaptiveSearchingDeltaOption,
antiCaptchaOption,
colorOptions,
embeddedSubtitlesParserOption,
folderOptions,
hiExtensionOptions,
providerOptions,
syncMaxOffsetSecondsOptions,
} from "./options";
interface CommandOption {
@ -96,6 +97,14 @@ const commandOptions: CommandOption[] = [
option: "provider",
description: "Provider of the subtitle file",
},
{
option: "uploader",
description: "Uploader of the subtitle file",
},
{
option: "release_info",
description: "Release info for the subtitle file",
},
{
option: "series_id",
description: "Sonarr series ID (Empty if movie)",
@ -118,7 +127,7 @@ const commandOptionElements: JSX.Element[] = commandOptions.map((op, idx) => (
const SettingsSubtitlesView: FunctionComponent = () => {
return (
<Layout name="Subtitles">
<Section header="Subtitles Options">
<Section header="Basic Options">
<Selector
label="Subtitle Folder"
options={folderOptions}
@ -136,6 +145,65 @@ const SettingsSubtitlesView: FunctionComponent = () => {
settingKey="settings-general-subfolder_custom"
></Text>
</CollapseBox>
<Selector
label="Hearing-impaired subtitles extension"
options={hiExtensionOptions}
settingKey="settings-general-hi_extension"
></Selector>
<Message>
What file extension to use when saving hearing-impaired subtitles to
disk (e.g., video.en.sdh.srt).
</Message>
</Section>
<Section header="Embedded Subtitles">
<Check
label="Use Embedded Subtitles"
settingKey="settings-general-use_embedded_subs"
></Check>
<Message>
Use embedded subtitles in media files when determining missing ones.
</Message>
<CollapseBox indent settingKey="settings-general-use_embedded_subs">
<Selector
settingKey="settings-general-embedded_subtitles_parser"
settingOptions={{
onSaved: (v) => (v === undefined ? "ffprobe" : v),
}}
options={embeddedSubtitlesParserOption}
></Selector>
<Message>Embedded subtitles video parser</Message>
<Check
label="Ignore Embedded PGS Subtitles"
settingKey="settings-general-ignore_pgs_subs"
></Check>
<Message>
Ignores PGS Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Ignore Embedded VobSub Subtitles"
settingKey="settings-general-ignore_vobsub_subs"
></Check>
<Message>
Ignores VobSub Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Ignore Embedded ASS Subtitles"
settingKey="settings-general-ignore_ass_subs"
></Check>
<Message>
Ignores ASS Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Show Only Desired Languages"
settingKey="settings-general-embedded_subs_show_desired"
></Check>
<Message>
Hide embedded subtitles for languages that are not currently
desired.
</Message>
</CollapseBox>
</Section>
<Section header="Upgrading Subtitles">
<Check
label="Upgrade Previously Downloaded Subtitles"
settingKey="settings-general-upgrade_subs"
@ -161,52 +229,25 @@ const SettingsSubtitlesView: FunctionComponent = () => {
subtitles.
</Message>
</CollapseBox>
<Selector
label="Hearing-impaired subtitles extension"
options={hiExtensionOptions}
settingKey="settings-general-hi_extension"
></Selector>
</Section>
<Section header="Encoding">
<Check
label="Encode Subtitles To UTF8"
settingKey="settings-general-utf8_encode"
></Check>
<Message>
What file extension to use when saving hearing-impaired subtitles to
disk (e.g., video.en.sdh.srt).
Re-encode downloaded Subtitles to UTF8. Should be left enabled in most
case.
</Message>
</Section>
<Section header="Anti-Captcha Options">
<Selector
clearable
placeholder="Select a provider"
settingKey="settings-general-anti_captcha_provider"
settingOptions={{ onSubmit: (v) => (v === undefined ? "None" : v) }}
options={antiCaptchaOption}
></Selector>
<Message>Choose the anti-captcha provider you want to use</Message>
<CollapseBox
settingKey="settings-general-anti_captcha_provider"
on={(value) => value === "anti-captcha"}
>
<Anchor href="http://getcaptchasolution.com/eixxo1rsnw">
Anti-Captcha.com
</Anchor>
<Text
label="Account Key"
settingKey="settings-anticaptcha-anti_captcha_key"
></Text>
</CollapseBox>
<CollapseBox
settingKey="settings-general-anti_captcha_provider"
on={(value) => value === "death-by-captcha"}
>
<Anchor href="https://www.deathbycaptcha.com">
DeathByCaptcha.com
</Anchor>
<Text
label="Username"
settingKey="settings-deathbycaptcha-username"
></Text>
<Password
label="Password"
settingKey="settings-deathbycaptcha-password"
></Password>
<Section header="Permissions">
<Check
label="Change file permission (chmod)"
settingKey="settings-general-chmod_enabled"
></Check>
<CollapseBox indent settingKey="settings-general-chmod_enabled">
<Text placeholder="0777" settingKey="settings-general-chmod"></Text>
<Message>Must be 4 digit octal</Message>
</CollapseBox>
</Section>
<Section header="Performance / Optimization">
@ -248,52 +289,6 @@ const SettingsSubtitlesView: FunctionComponent = () => {
Search multiple providers at once (Don't choose this on low powered
devices)
</Message>
<Check
label="Use Embedded Subtitles"
settingKey="settings-general-use_embedded_subs"
></Check>
<Message>
Use embedded subtitles in media files when determining missing ones.
</Message>
<CollapseBox indent settingKey="settings-general-use_embedded_subs">
<Check
label="Ignore Embedded PGS Subtitles"
settingKey="settings-general-ignore_pgs_subs"
></Check>
<Message>
Ignores PGS Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Ignore Embedded VobSub Subtitles"
settingKey="settings-general-ignore_vobsub_subs"
></Check>
<Message>
Ignores VobSub Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Ignore Embedded ASS Subtitles"
settingKey="settings-general-ignore_ass_subs"
></Check>
<Message>
Ignores ASS Subtitles in Embedded Subtitles detection.
</Message>
<Check
label="Show Only Desired Languages"
settingKey="settings-general-embedded_subs_show_desired"
></Check>
<Message>
Hide embedded subtitles for languages that are not currently
desired.
</Message>
<Selector
settingKey="settings-general-embedded_subtitles_parser"
settingOptions={{
onSaved: (v) => (v === undefined ? "ffprobe" : v),
}}
options={embeddedSubtitlesParserOption}
></Selector>
<Message>Embedded subtitles video parser</Message>
</CollapseBox>
<Check
label="Skip video file hash calculation"
settingKey="settings-general-skip_hashing"
@ -304,15 +299,7 @@ const SettingsSubtitlesView: FunctionComponent = () => {
search results scores.
</Message>
</Section>
<Section header="Post-Processing">
<Check
label="Encode Subtitles To UTF8"
settingKey="settings-general-utf8_encode"
></Check>
<Message>
Re-encode downloaded Subtitles to UTF8. Should be left enabled in most
case.
</Message>
<Section header="Subzero Modifications">
<Check
label="Hearing Impaired"
settingOptions={{ onLoaded: SubzeroModification("remove_HI") }}
@ -380,14 +367,8 @@ const SettingsSubtitlesView: FunctionComponent = () => {
Reverses the punctuation in right-to-left subtitles for problematic
playback devices.
</Message>
<Check
label="Permission (chmod)"
settingKey="settings-general-chmod_enabled"
></Check>
<CollapseBox indent settingKey="settings-general-chmod_enabled">
<Text placeholder="0777" settingKey="settings-general-chmod"></Text>
<Message>Must be 4 digit octal</Message>
</CollapseBox>
</Section>
<Section header="Synchronizarion / Alignement">
<Check
label="Always use Audio Track as Reference for Syncing"
settingKey="settings-subsync-force_audio"
@ -396,6 +377,31 @@ const SettingsSubtitlesView: FunctionComponent = () => {
Use the audio track as reference for syncing, instead of using the
embedded subtitle.
</Message>
<Check
label="No Fix Framerate"
settingKey="settings-subsync-no_fix_framerate"
></Check>
<Message>
If specified, subsync will not attempt to correct a framerate mismatch
between reference and subtitles.
</Message>
<Check
label="Gold-Section Search"
settingKey="settings-subsync-gss"
></Check>
<Message>
If specified, use golden-section search to try to find the optimal
framerate ratio between video and subtitles.
</Message>
<Selector
label="Max offset seconds"
options={syncMaxOffsetSecondsOptions}
settingKey="settings-subsync-max_offset_seconds"
defaultValue={60}
></Selector>
<Message>
The max allowed offset seconds for any subtitle segment.
</Message>
<Check
label="Automatic Subtitles Synchronization"
settingKey="settings-subsync-use_subsync"
@ -405,6 +411,13 @@ const SettingsSubtitlesView: FunctionComponent = () => {
subtitles.
</Message>
<CollapseBox indent settingKey="settings-subsync-use_subsync">
<MultiSelector
placeholder="Select providers..."
label="Do not sync subtitles downloaded from those providers"
clearable
options={providerOptions}
settingKey="settings-subsync-checker-blacklisted_providers"
></MultiSelector>
<Check label="Debug" settingKey="settings-subsync-debug"></Check>
<Message>
Do not actually sync the subtitles but generate a .tar.gz file to be
@ -426,6 +439,8 @@ const SettingsSubtitlesView: FunctionComponent = () => {
<Slider settingKey="settings-subsync-subsync_movie_threshold"></Slider>
</CollapseBox>
</CollapseBox>
</Section>
<Section header="Custom post-processing">
<Check
settingKey="settings-general-use_postprocessing"
label="Custom Post-Processing"

@ -1,4 +1,5 @@
import { SelectorOption } from "@/components";
import { ProviderList } from "../Providers/list";
export const hiExtensionOptions: SelectorOption<string>[] = [
{
@ -30,17 +31,6 @@ export const folderOptions: SelectorOption<string>[] = [
},
];
export const antiCaptchaOption: SelectorOption<string>[] = [
{
label: "Anti-Captcha",
value: "anti-captcha",
},
{
label: "Death by Captcha",
value: "death-by-captcha",
},
];
export const embeddedSubtitlesParserOption: SelectorOption<string>[] = [
{
label: "ffprobe (faster)",
@ -165,3 +155,29 @@ export const colorOptions: SelectorOption<string>[] = [
value: buildColor("dark-grey"),
},
];
export const providerOptions: SelectorOption<string>[] = ProviderList.map(
(v) => ({
label: v.key,
value: v.key,
})
);
export const syncMaxOffsetSecondsOptions: SelectorOption<number>[] = [
{
label: "60",
value: 60,
},
{
label: "120",
value: 120,
},
{
label: "300",
value: 300,
},
{
label: "600",
value: 600,
},
];

@ -146,6 +146,7 @@ export const Slider: FunctionComponent<SliderProps> = (props) => {
<MantineSlider
{...sliderProps}
marks={marks}
labelAlwaysOn
onChange={update}
value={value ?? 0}
></MantineSlider>

@ -34,7 +34,7 @@ const WantedMoviesView: FunctionComponent = () => {
accessor: "missing_subtitles",
Cell: ({ row, value }) => {
const wanted = row.original;
const { hearing_impaired: hi, radarrId } = wanted;
const { radarrId } = wanted;
const { download } = useMovieSubtitleModification();
@ -55,8 +55,8 @@ const WantedMoviesView: FunctionComponent = () => {
radarrId,
form: {
language: item.code2,
hi,
forced: false,
hi: item.hi,
forced: item.forced,
},
}
);

@ -48,7 +48,6 @@ const WantedSeriesView: FunctionComponent = () => {
accessor: "missing_subtitles",
Cell: ({ row, value }) => {
const wanted = row.original;
const hi = wanted.hearing_impaired;
const seriesId = wanted.sonarrSeriesId;
const episodeId = wanted.sonarrEpisodeId;
@ -72,8 +71,8 @@ const WantedSeriesView: FunctionComponent = () => {
episodeId,
form: {
language: item.code2,
hi,
forced: false,
hi: item.hi,
forced: item.forced,
},
}
);

@ -51,6 +51,28 @@ interface Subtitle {
path: string | null | undefined; // TODO: FIX ME!!!!!!
}
interface AudioTrack {
stream: string;
name: string;
language: string;
}
interface SubtitleTrack {
stream: string;
name: string;
language: string;
forced: boolean;
hearing_impaired: boolean;
}
interface ExternalSubtitle {
name: string;
path: string;
language: string;
forced: boolean;
hearing_impaired: boolean;
}
interface PathType {
path: string;
}
@ -149,6 +171,12 @@ declare namespace Item {
season: number;
episode: number;
};
type RefTracks = {
audio_tracks: AudioTrack[];
embedded_subtitles_tracks: SubtitleTrack[];
external_subtitles_tracks: ExternalSubtitle[];
};
}
declare namespace Wanted {

@ -41,6 +41,13 @@ declare namespace FormType {
type: "episode" | "movie";
language: string;
path: string;
forced?: PythonBoolean;
hi?: PythonBoolean;
original_format?: PythonBoolean;
reference?: string;
max_offset_seconds?: string;
no_fix_framerate?: PythonBoolean;
gss?: PythonBoolean;
}
interface DownloadSeries {

@ -114,6 +114,9 @@ declare namespace Settings {
subsync_movie_threshold: number;
debug: boolean;
force_audio: boolean;
max_offset_seconds: number;
no_fix_framerate: boolean;
gss: boolean;
}
interface Analytic {
@ -144,7 +147,6 @@ declare namespace Settings {
full_update_hour: number;
only_monitored: boolean;
series_sync: number;
episodes_sync: number;
excluded_tags: string[];
excluded_series_types: SonarrSeriesType[];
}

@ -59,6 +59,10 @@ export function filterSubtitleBy(
}
}
export function toPython(value: boolean): PythonBoolean {
return value ? "True" : "False";
}
export * from "./env";
export * from "./hooks";
export * from "./validate";

@ -0,0 +1,33 @@
# This is a stub package designed to roughly emulate the _yaml
# extension module, which previously existed as a standalone module
# and has been moved into the `yaml` package namespace.
# It does not perfectly mimic its old counterpart, but should get
# close enough for anyone who's relying on it even when they shouldn't.
import yaml
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
# to tread carefully when poking at it here (it may not have the attributes we expect)
if not getattr(yaml, '__with_libyaml__', False):
from sys import version_info
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
raise exc("No module named '_yaml'")
else:
from yaml._yaml import *
import warnings
warnings.warn(
'The _yaml extension module is now located at yaml._yaml'
' and its location is subject to change. To use the'
' LibYAML-based parser and emitter, import from `yaml`:'
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
DeprecationWarning
)
del warnings
# Don't `del yaml` here because yaml is actually an existing
# namespace member of _yaml.
__name__ = '_yaml'
# If the module is top-level (i.e. not a part of any specific package)
# then the attribute should be set to ''.
# https://docs.python.org/3.8/library/types.html
__package__ = ''

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -458,7 +454,7 @@ class Apprise:
logger.error(msg)
raise TypeError(msg)
if not (title or body):
if not (title or body or attach):
msg = "No message content specified to deliver"
logger.error(msg)
raise TypeError(msg)
@ -498,25 +494,29 @@ class Apprise:
# If our code reaches here, we either did not define a tag (it
# was set to None), or we did define a tag and the logic above
# determined we need to notify the service it's associated with
if server.notify_format not in conversion_body_map:
# Perform Conversion
conversion_body_map[server.notify_format] = \
convert_between(
body_format, server.notify_format, content=body)
# First we need to generate a key we will use to determine if we
# need to build our data out. Entries without are merged with
# the body at this stage.
key = server.notify_format if server.title_maxlen > 0\
else f'_{server.notify_format}'
if key not in conversion_title_map:
# Prepare our title
conversion_title_map[server.notify_format] = \
'' if not title else title
conversion_title_map[key] = '' if not title else title
# Tidy Title IF required (hence it will become part of the
# body)
if server.title_maxlen <= 0 and \
conversion_title_map[server.notify_format]:
# Conversion of title only occurs for services where the title
# is blended with the body (title_maxlen <= 0)
if conversion_title_map[key] and server.title_maxlen <= 0:
conversion_title_map[key] = convert_between(
body_format, server.notify_format,
content=conversion_title_map[key])
conversion_title_map[server.notify_format] = \
convert_between(
body_format, server.notify_format,
content=conversion_title_map[server.notify_format])
# Our body is always converted no matter what
conversion_body_map[key] = \
convert_between(
body_format, server.notify_format, content=body)
if interpret_escapes:
#
@ -526,13 +526,13 @@ class Apprise:
try:
# Added overhead required due to Python 3 Encoding Bug
# identified here: https://bugs.python.org/issue21331
conversion_body_map[server.notify_format] = \
conversion_body_map[server.notify_format]\
conversion_body_map[key] = \
conversion_body_map[key]\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
conversion_title_map[server.notify_format] = \
conversion_title_map[server.notify_format]\
conversion_title_map[key] = \
conversion_title_map[key]\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
@ -543,8 +543,8 @@ class Apprise:
raise TypeError(msg)
kwargs = dict(
body=conversion_body_map[server.notify_format],
title=conversion_title_map[server.notify_format],
body=conversion_body_map[key],
title=conversion_title_map[key],
notify_type=notify_type,
attach=attach,
body_format=body_format
@ -685,6 +685,11 @@ class Apprise:
# Placeholder - populated below
'details': None,
# Let upstream service know of the plugins that support
# attachments
'attachment_support': getattr(
plugin, 'attachment_support', False),
# Differentiat between what is a custom loaded plugin and
# which is native.
'category': getattr(plugin, 'category', None)
@ -810,6 +815,36 @@ class Apprise:
# If we reach here, then we indexed out of range
raise IndexError('list index out of range')
def __getstate__(self):
"""
Pickle Support dumps()
"""
attributes = {
'asset': self.asset,
# Prepare our URL list as we need to extract the associated tags
# and asset details associated with it
'urls': [{
'url': server.url(privacy=False),
'tag': server.tags if server.tags else None,
'asset': server.asset} for server in self.servers],
'locale': self.locale,
'debug': self.debug,
'location': self.location,
}
return attributes
def __setstate__(self, state):
"""
Pickle Support loads()
"""
self.servers = list()
self.asset = state['asset']
self.locale = state['locale']
self.location = state['location']
for entry in state['urls']:
self.add(entry['url'], asset=entry['asset'], tag=entry['tag'])
def __bool__(self):
"""
Allows the Apprise object to be wrapped in an 'if statement'.

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -33,14 +29,13 @@
import ctypes
import locale
import contextlib
import os
import re
from os.path import join
from os.path import dirname
from os.path import abspath
from .logger import logger
# Define our translation domain
DOMAIN = 'apprise'
LOCALE_DIR = abspath(join(dirname(__file__), 'i18n'))
# This gets toggled to True if we succeed
GETTEXT_LOADED = False
@ -49,51 +44,41 @@ try:
# Initialize gettext
import gettext
# install() creates a _() in our builtins
gettext.install(DOMAIN, localedir=LOCALE_DIR)
# Toggle our flag
GETTEXT_LOADED = True
except ImportError:
# gettext isn't available; no problem, just fall back to using
# the library features without multi-language support.
import builtins
builtins.__dict__['_'] = lambda x: x # pragma: no branch
# gettext isn't available; no problem; Use the library features without
# multi-language support.
pass
class LazyTranslation:
class AppriseLocale:
"""
Doesn't translate anything until str() or unicode() references
are made.
A wrapper class to gettext so that we can manipulate multiple lanaguages
on the fly if required.
"""
def __init__(self, text, *args, **kwargs):
"""
Store our text
"""
self.text = text
super().__init__(*args, **kwargs)
# Define our translation domain
_domain = 'apprise'
def __str__(self):
return gettext.gettext(self.text)
# The path to our translations
_locale_dir = abspath(join(dirname(__file__), 'i18n'))
# Locale regular expression
_local_re = re.compile(
r'^((?P<ansii>C)|(?P<lang>([a-z]{2}))([_:](?P<country>[a-z]{2}))?)'
r'(\.(?P<enc>[a-z0-9-]+))?$', re.IGNORECASE)
# Lazy translation handling
def gettext_lazy(text):
"""
A dummy function that can be referenced
"""
return LazyTranslation(text=text)
# Define our default encoding
_default_encoding = 'utf-8'
# The function to assign `_` by default
_fn = 'gettext'
class AppriseLocale:
"""
A wrapper class to gettext so that we can manipulate multiple lanaguages
on the fly if required.
"""
# The language we should fall back to if all else fails
_default_language = 'en'
def __init__(self, language=None):
"""
@ -110,25 +95,55 @@ class AppriseLocale:
# Get our language
self.lang = AppriseLocale.detect_language(language)
# Our mapping to our _fn
self.__fn_map = None
if GETTEXT_LOADED is False:
# We're done
return
if self.lang:
# Add language
self.add(self.lang)
def add(self, lang=None, set_default=True):
"""
Add a language to our list
"""
lang = lang if lang else self._default_language
if lang not in self._gtobjs:
# Load our gettext object and install our language
try:
self._gtobjs[self.lang] = gettext.translation(
DOMAIN, localedir=LOCALE_DIR, languages=[self.lang])
self._gtobjs[lang] = gettext.translation(
self._domain, localedir=self._locale_dir, languages=[lang],
fallback=False)
# The non-intrusive method of applying the gettext change to
# the global namespace only
self.__fn_map = getattr(self._gtobjs[lang], self._fn)
# Install our language
self._gtobjs[self.lang].install()
except FileNotFoundError:
# The translation directory does not exist
logger.debug(
'Could not load translation path: %s',
join(self._locale_dir, lang))
except IOError:
# This occurs if we can't access/load our translations
pass
# Fallback (handle case where self.lang does not exist)
if self.lang not in self._gtobjs:
self._gtobjs[self.lang] = gettext
self.__fn_map = getattr(self._gtobjs[self.lang], self._fn)
return False
logger.trace('Loaded language %s', lang)
if set_default:
logger.debug('Language set to %s', lang)
self.lang = lang
return True
@contextlib.contextmanager
def lang_at(self, lang):
def lang_at(self, lang, mapto=_fn):
"""
The syntax works as:
with at.lang_at('fr'):
@ -138,50 +153,36 @@ class AppriseLocale:
"""
if GETTEXT_LOADED is False:
# yield
yield
# Do nothing
yield None
# we're done
return
# Tidy the language
lang = AppriseLocale.detect_language(lang, detect_fallback=False)
# Now attempt to load it
try:
if lang in self._gtobjs:
if lang != self.lang:
# Install our language only if we aren't using it
# already
self._gtobjs[lang].install()
else:
self._gtobjs[lang] = gettext.translation(
DOMAIN, localedir=LOCALE_DIR, languages=[self.lang])
# Install our language
self._gtobjs[lang].install()
if lang not in self._gtobjs and not self.add(lang, set_default=False):
# Do Nothing
yield getattr(self._gtobjs[self.lang], mapto)
else:
# Yield
yield
yield getattr(self._gtobjs[lang], mapto)
except (IOError, KeyError):
# This occurs if we can't access/load our translations
# Yield reguardless
yield
return
finally:
# Fall back to our previous language
if lang != self.lang and lang in self._gtobjs:
# Install our language
self._gtobjs[self.lang].install()
@property
def gettext(self):
"""
Return the current language gettext() function
return
Useful for assigning to `_`
"""
return self._gtobjs[self.lang].gettext
@staticmethod
def detect_language(lang=None, detect_fallback=True):
"""
returns the language (if it's retrievable)
Returns the language (if it's retrievable)
"""
# We want to only use the 2 character version of this language
# hence en_CA becomes en, en_US becomes en.
@ -190,6 +191,17 @@ class AppriseLocale:
# no detection enabled; we're done
return None
# Posix lookup
lookup = os.environ.get
localename = None
for variable in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'):
localename = lookup(variable, None)
if localename:
result = AppriseLocale._local_re.match(localename)
if result and result.group('lang'):
return result.group('lang').lower()
# Windows handling
if hasattr(ctypes, 'windll'):
windll = ctypes.windll.kernel32
try:
@ -203,11 +215,12 @@ class AppriseLocale:
# Fallback to posix detection
pass
# Built in locale library check
try:
# Detect language
lang = locale.getdefaultlocale()[0]
# Acquire our locale
lang = locale.getlocale()[0]
except ValueError as e:
except (ValueError, TypeError) as e:
# This occurs when an invalid locale was parsed from the
# environment variable. While we still return None in this
# case, we want to better notify the end user of this. Users
@ -217,9 +230,57 @@ class AppriseLocale:
'Language detection failure / {}'.format(str(e)))
return None
except TypeError:
# None is returned if the default can't be determined
# we're done in this case
return None
return None if not lang else lang[0:2].lower()
def __getstate__(self):
"""
Pickle Support dumps()
"""
state = self.__dict__.copy()
# Remove the unpicklable entries.
del state['_gtobjs']
del state['_AppriseLocale__fn_map']
return state
def __setstate__(self, state):
"""
Pickle Support loads()
"""
self.__dict__.update(state)
# Our mapping to our _fn
self.__fn_map = None
self._gtobjs = {}
self.add(state['lang'], set_default=True)
#
# Prepare our default LOCALE Singleton
#
LOCALE = AppriseLocale()
class LazyTranslation:
"""
Doesn't translate anything until str() or unicode() references
are made.
"""
def __init__(self, text, *args, **kwargs):
"""
Store our text
"""
self.text = text
super().__init__(*args, **kwargs)
def __str__(self):
return LOCALE.gettext(self.text) if GETTEXT_LOADED else self.text
# Lazy translation handling
def gettext_lazy(text):
"""
A dummy function that can be referenced
"""
return LazyTranslation(text=text)

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -204,7 +200,14 @@ class URLBase:
self.verify_certificate = parse_bool(kwargs.get('verify', True))
# Secure Mode
self.secure = kwargs.get('secure', False)
self.secure = kwargs.get('secure', None)
try:
if not isinstance(self.secure, bool):
# Attempt to detect
self.secure = kwargs.get('schema', '')[-1].lower() == 's'
except (TypeError, IndexError):
self.secure = False
self.host = URLBase.unquote(kwargs.get('host'))
self.port = kwargs.get('port')
@ -228,6 +231,11 @@ class URLBase:
# Always unquote the password if it exists
self.password = URLBase.unquote(self.password)
# Store our full path consistently ensuring it ends with a `/'
self.fullpath = URLBase.unquote(kwargs.get('fullpath'))
if not isinstance(self.fullpath, str) or not self.fullpath:
self.fullpath = '/'
# Store our Timeout Variables
if 'rto' in kwargs:
try:
@ -307,7 +315,36 @@ class URLBase:
arguments provied.
"""
raise NotImplementedError("url() is implimented by the child class.")
# Our default parameters
params = self.url_parameters(privacy=privacy, *args, **kwargs)
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=URLBase.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=URLBase.quote(self.user, safe=''),
)
default_port = 443 if self.secure else 80
return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format(
schema='https' if self.secure else 'http',
auth=auth,
# never encode hostname since we're expecting it to be a valid one
hostname=self.host,
port='' if self.port is None or self.port == default_port
else ':{}'.format(self.port),
fullpath=URLBase.quote(self.fullpath, safe='/')
if self.fullpath else '/',
params=URLBase.urlencode(params),
)
def __contains__(self, tags):
"""
@ -583,6 +620,33 @@ class URLBase:
"""
return (self.socket_connect_timeout, self.socket_read_timeout)
@property
def request_auth(self):
"""This is primarily used to fullfill the `auth` keyword argument
that is used by requests.get() and requests.put() calls.
"""
return (self.user, self.password) if self.user else None
@property
def request_url(self):
"""
Assemble a simple URL that can be used by the requests library
"""
# Acquire our schema
schema = 'https' if self.secure else 'http'
# Prepare our URL
url = '%s://%s' % (schema, self.host)
# Apply Port information if present
if isinstance(self.port, int):
url += ':%d' % self.port
# Append our full path
return url + self.fullpath
def url_parameters(self, *args, **kwargs):
"""
Provides a default set of args to work with. This can greatly
@ -603,7 +667,8 @@ class URLBase:
}
@staticmethod
def parse_url(url, verify_host=True, plus_to_space=False):
def parse_url(url, verify_host=True, plus_to_space=False,
strict_port=False):
"""Parses the URL and returns it broken apart into a dictionary.
This is very specific and customized for Apprise.
@ -624,13 +689,13 @@ class URLBase:
results = parse_url(
url, default_schema='unknown', verify_host=verify_host,
plus_to_space=plus_to_space)
plus_to_space=plus_to_space, strict_port=strict_port)
if not results:
# We're done; we failed to parse our url
return results
# if our URL ends with an 's', then assueme our secure flag is set.
# if our URL ends with an 's', then assume our secure flag is set.
results['secure'] = (results['schema'][-1] == 's')
# Support SSL Certificate 'verify' keyword. Default to being enabled
@ -650,6 +715,21 @@ class URLBase:
if 'user' in results['qsd']:
results['user'] = results['qsd']['user']
# parse_url() always creates a 'password' and 'user' entry in the
# results returned. Entries are set to None if they weren't specified
if results['password'] is None and 'user' in results['qsd']:
# Handle cases where the user= provided in 2 locations, we want
# the original to fall back as a being a password (if one wasn't
# otherwise defined)
# e.g.
# mailtos://PASSWORD@hostname?user=admin@mail-domain.com
# - the PASSWORD gets lost in the parse url() since a user=
# over-ride is specified.
presults = parse_url(results['url'])
if presults:
# Store our Password
results['password'] = presults['user']
# Store our socket read timeout if specified
if 'rto' in results['qsd']:
results['rto'] = results['qsd']['rto']

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -31,7 +27,7 @@
# POSSIBILITY OF SUCH DAMAGE.
__title__ = 'Apprise'
__version__ = '1.4.0'
__version__ = '1.6.0'
__author__ = 'Chris Caron'
__license__ = 'BSD'
__copywrite__ = 'Copyright (C) 2023 Chris Caron <lead2gold@gmail.com>'

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -68,7 +64,8 @@ class AttachBase(URLBase):
# set to zero (0), then no check is performed
# 1 MB = 1048576 bytes
# 5 MB = 5242880 bytes
max_file_size = 5242880
# 1 GB = 1048576000 bytes
max_file_size = 1048576000
# By default all attachments types are inaccessible.
# Developers of items identified in the attachment plugin directory

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -356,6 +352,77 @@ class ConfigBase(URLBase):
# missing and/or expired.
return True
@staticmethod
def __normalize_tag_groups(group_tags):
"""
Used to normalize a tag assign map which looks like:
{
'group': set('{tag1}', '{group1}', '{tag2}'),
'group1': set('{tag2}','{tag3}'),
}
Then normalized it (merging groups); with respect to the above, the
output would be:
{
'group': set('{tag1}', '{tag2}', '{tag3}),
'group1': set('{tag2}','{tag3}'),
}
"""
# Prepare a key set list we can use
tag_groups = set([str(x) for x in group_tags.keys()])
def _expand(tags, ignore=None):
"""
Expands based on tag provided and returns a set
this also updates the group_tags while it goes
"""
# Prepare ourselves a return set
results = set()
ignore = set() if ignore is None else ignore
# track groups
groups = set()
for tag in tags:
if tag in ignore:
continue
# Track our groups
groups.add(tag)
# Store what we know is worth keping
results |= group_tags[tag] - tag_groups
# Get simple tag assignments
found = group_tags[tag] & tag_groups
if not found:
continue
for gtag in found:
if gtag in ignore:
continue
# Go deeper (recursion)
ignore.add(tag)
group_tags[gtag] = _expand(set([gtag]), ignore=ignore)
results |= group_tags[gtag]
# Pop ignore
ignore.remove(tag)
return results
for tag in tag_groups:
# Get our tags
group_tags[tag] |= _expand(set([tag]))
if not group_tags[tag]:
ConfigBase.logger.warning(
'The group {} has no tags assigned to it'.format(tag))
del group_tags[tag]
@staticmethod
def parse_url(url, verify_host=True):
"""Parses the URL and returns it broken apart into a dictionary.
@ -541,6 +608,9 @@ class ConfigBase(URLBase):
# as additional configuration entries when loaded.
include <ConfigURL>
# Assign tag contents to a group identifier
<Group(s)>=<Tag(s)>
"""
# A list of loaded Notification Services
servers = list()
@ -549,6 +619,12 @@ class ConfigBase(URLBase):
# the include keyword
configs = list()
# Track all of the tags we want to assign later on
group_tags = {}
# Track our entries to preload
preloaded = []
# Prepare our Asset Object
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
@ -556,7 +632,7 @@ class ConfigBase(URLBase):
valid_line_re = re.compile(
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
r'(\s*(?P<tags>[a-z0-9, \t_-]+)\s*=|=)?\s*'
r'(?P<url>[a-z0-9]{2,9}://.*)|'
r'((?P<url>[a-z0-9]{1,12}://.*)|(?P<assign>[a-z0-9, \t_-]+))|'
r'include\s+(?P<config>.+))?\s*$', re.I)
try:
@ -582,8 +658,13 @@ class ConfigBase(URLBase):
# otherwise.
return (list(), list())
url, config = result.group('url'), result.group('config')
if not (url or config):
# Retrieve our line
url, assign, config = \
result.group('url'), \
result.group('assign'), \
result.group('config')
if not (url or config or assign):
# Comment/empty line; do nothing
continue
@ -603,6 +684,33 @@ class ConfigBase(URLBase):
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
if assign:
groups = set(parse_list(result.group('tags'), cast=str))
if not groups:
# no tags were assigned
ConfigBase.logger.warning(
'Unparseable tag assignment - no group(s) '
'on line {}'.format(line))
continue
# Get our tags
tags = set(parse_list(assign, cast=str))
if not tags:
# no tags were assigned
ConfigBase.logger.warning(
'Unparseable tag assignment - no tag(s) to assign '
'on line {}'.format(line))
continue
# Update our tag group map
for tag_group in groups:
if tag_group not in group_tags:
group_tags[tag_group] = set()
# ensure our tag group is never included in the assignment
group_tags[tag_group] |= tags - set([tag_group])
continue
# Acquire our url tokens
results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
@ -615,25 +723,57 @@ class ConfigBase(URLBase):
# Build a list of tags to associate with the newly added
# notifications if any were set
results['tag'] = set(parse_list(result.group('tags')))
results['tag'] = set(parse_list(result.group('tags'), cast=str))
# Set our Asset Object
results['asset'] = asset
# Store our preloaded entries
preloaded.append({
'results': results,
'line': line,
'loggable_url': loggable_url,
})
#
# Normalize Tag Groups
# - Expand Groups of Groups so that they don't exist
#
ConfigBase.__normalize_tag_groups(group_tags)
#
# URL Processing
#
for entry in preloaded:
# Point to our results entry for easier reference below
results = entry['results']
#
# Apply our tag groups if they're defined
#
for group, tags in group_tags.items():
# Detect if anything assigned to this tag also maps back to a
# group. If so we want to add the group to our list
if next((True for tag in results['tag']
if tag in tags), False):
results['tag'].add(group)
try:
# Attempt to create an instance of our plugin using the
# parsed URL information
plugin = common.NOTIFY_SCHEMA_MAP[results['schema']](**results)
plugin = common.NOTIFY_SCHEMA_MAP[
results['schema']](**results)
# Create log entry of loaded URL
ConfigBase.logger.debug(
'Loaded URL: %s', plugin.url(privacy=asset.secure_logging))
'Loaded URL: %s', plugin.url(
privacy=results['asset'].secure_logging))
except Exception as e:
# the arguments are invalid or can not be used.
ConfigBase.logger.warning(
'Could not load URL {} on line {}.'.format(
loggable_url, line))
entry['loggable_url'], entry['line']))
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
continue
@ -665,6 +805,12 @@ class ConfigBase(URLBase):
# the include keyword
configs = list()
# Group Assignments
group_tags = {}
# Track our entries to preload
preloaded = []
try:
# Load our data (safely)
result = yaml.load(content, Loader=yaml.SafeLoader)
@ -746,7 +892,45 @@ class ConfigBase(URLBase):
tags = result.get('tag', None)
if tags and isinstance(tags, (list, tuple, str)):
# Store any preset tags
global_tags = set(parse_list(tags))
global_tags = set(parse_list(tags, cast=str))
#
# groups root directive
#
groups = result.get('groups', None)
if not isinstance(groups, (list, tuple)):
# Not a problem; we simply have no group entry
groups = list()
# Iterate over each group defined and store it
for no, entry in enumerate(groups):
if not isinstance(entry, dict):
ConfigBase.logger.warning(
'No assignment for group {}, entry #{}'.format(
entry, no + 1))
continue
for _groups, tags in entry.items():
for group in parse_list(_groups, cast=str):
if isinstance(tags, (list, tuple)):
_tags = set()
for e in tags:
if isinstance(e, dict):
_tags |= set(e.keys())
else:
_tags |= set(parse_list(e, cast=str))
# Final assignment
tags = _tags
else:
tags = set(parse_list(tags, cast=str))
if group not in group_tags:
group_tags[group] = tags
else:
group_tags[group] |= tags
#
# include root directive
@ -938,8 +1122,8 @@ class ConfigBase(URLBase):
# The below ensures our tags are set correctly
if 'tag' in _results:
# Tidy our list up
_results['tag'] = \
set(parse_list(_results['tag'])) | global_tags
_results['tag'] = set(
parse_list(_results['tag'], cast=str)) | global_tags
else:
# Just use the global settings
@ -965,29 +1149,59 @@ class ConfigBase(URLBase):
# Prepare our Asset Object
_results['asset'] = asset
# Now we generate our plugin
try:
# Attempt to create an instance of our plugin using the
# parsed URL information
plugin = common.\
NOTIFY_SCHEMA_MAP[_results['schema']](**_results)
# Store our preloaded entries
preloaded.append({
'results': _results,
'entry': no + 1,
'item': entry,
})
# Create log entry of loaded URL
ConfigBase.logger.debug(
'Loaded URL: {}'.format(
plugin.url(privacy=asset.secure_logging)))
#
# Normalize Tag Groups
# - Expand Groups of Groups so that they don't exist
#
ConfigBase.__normalize_tag_groups(group_tags)
except Exception as e:
# the arguments are invalid or can not be used.
ConfigBase.logger.warning(
'Could not load Apprise YAML configuration '
'entry #{}, item #{}'
.format(no + 1, entry))
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
continue
#
# URL Processing
#
for entry in preloaded:
# Point to our results entry for easier reference below
results = entry['results']
#
# Apply our tag groups if they're defined
#
for group, tags in group_tags.items():
# Detect if anything assigned to this tag also maps back to a
# group. If so we want to add the group to our list
if next((True for tag in results['tag']
if tag in tags), False):
results['tag'].add(group)
# Now we generate our plugin
try:
# Attempt to create an instance of our plugin using the
# parsed URL information
plugin = common.\
NOTIFY_SCHEMA_MAP[results['schema']](**results)
# if we reach here, we successfully loaded our data
servers.append(plugin)
# Create log entry of loaded URL
ConfigBase.logger.debug(
'Loaded URL: %s', plugin.url(
privacy=results['asset'].secure_logging))
except Exception as e:
# the arguments are invalid or can not be used.
ConfigBase.logger.warning(
'Could not load Apprise YAML configuration '
'entry #{}, item #{}'
.format(entry['entry'], entry['item']))
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
continue
# if we reach here, we successfully loaded our data
servers.append(plugin)
return (servers, configs)

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -28,6 +24,7 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from ..plugins.NotifyBase import NotifyBase

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -77,6 +73,9 @@ class NotifyAppriseAPI(NotifyBase):
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api'
# Support attachments
attachment_support = True
# Depending on the number of transactions/notifications taking place, this
# could take a while. 30 seconds should be enough to perform the task
socket_read_timeout = 30.0
@ -164,10 +163,6 @@ class NotifyAppriseAPI(NotifyBase):
"""
super().__init__(**kwargs)
self.fullpath = kwargs.get('fullpath')
if not isinstance(self.fullpath, str):
self.fullpath = '/'
self.token = validate_regex(
token, *self.template_tokens['token']['regex'])
if not self.token:
@ -260,7 +255,7 @@ class NotifyAppriseAPI(NotifyBase):
attachments = []
files = []
if attach:
if attach and self.attachment_support:
for no, attachment in enumerate(attach, start=1):
# Perform some simple error checking
if not attachment:
@ -310,7 +305,10 @@ class NotifyAppriseAPI(NotifyBase):
if self.method == AppriseAPIMethod.JSON:
headers['Content-Type'] = 'application/json'
payload['attachments'] = attachments
if attachments:
payload['attachments'] = attachments
payload = dumps(payload)
if self.__tags:
@ -328,8 +326,8 @@ class NotifyAppriseAPI(NotifyBase):
url += ':%d' % self.port
fullpath = self.fullpath.strip('/')
url += '/{}/'.format(fullpath) if fullpath else '/'
url += 'notify/{}'.format(self.token)
url += '{}'.format('/' + fullpath) if fullpath else ''
url += '/notify/{}'.format(self.token)
# Some entries can not be over-ridden
headers.update({

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -127,10 +123,10 @@ class NotifyBark(NotifyBase):
# Define object templates
templates = (
'{schema}://{host}/{targets}',
'{schema}://{host}:{port}/{targets}',
'{schema}://{user}:{password}@{host}/{targets}',
'{schema}://{user}:{password}@{host}:{port}/{targets}',
'{schema}://{user}:{password}@{host}/{targets}',
)
# Define our template arguments
@ -163,6 +159,7 @@ class NotifyBark(NotifyBase):
'targets': {
'name': _('Targets'),
'type': 'list:string',
'required': True,
},
})

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -139,6 +135,18 @@ class NotifyBase(URLBase):
# Default Overflow Mode
overflow_mode = OverflowMode.UPSTREAM
# Support Attachments; this defaults to being disabled.
# Since apprise allows you to send attachments without a body or title
# defined, by letting Apprise know the plugin won't support attachments
# up front, it can quickly pass over and ignore calls to these end points.
# You must set this to true if your application can handle attachments.
# You must also consider a flow change to your notification if this is set
# to True as well as now there will be cases where both the body and title
# may not be set. There will never be a case where a body, or attachment
# isn't set in the same call to your notify() function.
attachment_support = False
# Default Title HTML Tagging
# When a title is specified for a notification service that doesn't accept
# titles, by default apprise tries to give a plesant view and convert the
@ -316,7 +324,7 @@ class NotifyBase(URLBase):
the_cors = (do_send(**kwargs2) for kwargs2 in send_calls)
return all(await asyncio.gather(*the_cors))
def _build_send_calls(self, body, title=None,
def _build_send_calls(self, body=None, title=None,
notify_type=NotifyType.INFO, overflow=None,
attach=None, body_format=None, **kwargs):
"""
@ -339,6 +347,28 @@ class NotifyBase(URLBase):
# bad attachments
raise
# Handle situations where the body is None
body = '' if not body else body
elif not (body or attach):
# If there is not an attachment at the very least, a body must be
# present
msg = "No message body or attachment was specified."
self.logger.warning(msg)
raise TypeError(msg)
if not body and not self.attachment_support:
# If no body was specified, then we know that an attachment
# was. This is logic checked earlier in the code.
#
# Knowing this, if the plugin itself doesn't support sending
# attachments, there is nothing further to do here, just move
# along.
msg = f"{self.service_name} does not support attachments; " \
" service skipped"
self.logger.warning(msg)
raise TypeError(msg)
# Handle situations where the title is None
title = '' if not title else title

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -151,6 +147,12 @@ class NotifyBoxcar(NotifyBase):
'to': {
'alias_of': 'targets',
},
'access': {
'alias_of': 'access_key',
},
'secret': {
'alias_of': 'secret_key',
},
})
def __init__(self, access, secret, targets=None, include_image=True,
@ -234,8 +236,7 @@ class NotifyBoxcar(NotifyBase):
if title:
payload['aps']['@title'] = title
if body:
payload['aps']['alert'] = body
payload['aps']['alert'] = body
if self._tags:
payload['tags'] = {'or': self._tags}
@ -381,6 +382,16 @@ class NotifyBoxcar(NotifyBase):
results['targets'] += \
NotifyBoxcar.parse_list(results['qsd'].get('to'))
# Access
if 'access' in results['qsd'] and results['qsd']['access']:
results['access'] = NotifyBoxcar.unquote(
results['qsd']['access'].strip())
# Secret
if 'secret' in results['qsd'] and results['qsd']['secret']:
results['secret'] = NotifyBoxcar.unquote(
results['qsd']['secret'].strip())
# Include images with our message
results['include_image'] = \
parse_bool(results['qsd'].get('image', True))

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -121,11 +117,13 @@ class NotifyBulkSMS(NotifyBase):
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
'required': True,
},
'target_phone': {
'name': _('Target Phone No'),
@ -144,6 +142,7 @@ class NotifyBulkSMS(NotifyBase):
'targets': {
'name': _('Targets'),
'type': 'list:string',
'required': True,
},
})

@ -0,0 +1,460 @@
# -*- coding: utf-8 -*-
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Sign-up with https://burstsms.com/
#
# Define your API Secret here and acquire your API Key
# - https://can.transmitsms.com/profile
#
import requests
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import parse_bool
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
class BurstSMSCountryCode:
# Australia
AU = 'au'
# New Zeland
NZ = 'nz'
# United Kingdom
UK = 'gb'
# United States
US = 'us'
BURST_SMS_COUNTRY_CODES = (
BurstSMSCountryCode.AU,
BurstSMSCountryCode.NZ,
BurstSMSCountryCode.UK,
BurstSMSCountryCode.US,
)
class NotifyBurstSMS(NotifyBase):
"""
A wrapper for Burst SMS Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Burst SMS'
# The services URL
service_url = 'https://burstsms.com/'
# The default protocol
secure_protocol = 'burstsms'
# The maximum amount of SMS Messages that can reside within a single
# batch transfer based on:
# https://developer.transmitsms.com/#74911cf8-dec6-4319-a499-7f535a7fd08c
default_batch_size = 500
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_burst_sms'
# Burst SMS uses the http protocol with JSON requests
notify_url = 'https://api.transmitsms.com/send-sms.json'
# The maximum length of the body
body_maxlen = 160
# A title can not be used for SMS Messages. Setting this to zero will
# cause any title (if defined) to get placed into the message body.
title_maxlen = 0
# Define object templates
templates = (
'{schema}://{apikey}:{secret}@{sender_id}/{targets}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'apikey': {
'name': _('API Key'),
'type': 'string',
'required': True,
'regex': (r'^[a-z0-9]+$', 'i'),
'private': True,
},
'secret': {
'name': _('API Secret'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9]+$', 'i'),
},
'sender_id': {
'name': _('Sender ID'),
'type': 'string',
'required': True,
'map_to': 'source',
},
'target_phone': {
'name': _('Target Phone No'),
'type': 'string',
'prefix': '+',
'regex': (r'^[0-9\s)(+-]+$', 'i'),
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
'required': True,
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'from': {
'alias_of': 'sender_id',
},
'key': {
'alias_of': 'apikey',
},
'secret': {
'alias_of': 'secret',
},
'country': {
'name': _('Country'),
'type': 'choice:string',
'values': BURST_SMS_COUNTRY_CODES,
'default': BurstSMSCountryCode.US,
},
# Validity
# Expire a message send if it is undeliverable (defined in minutes)
# If set to Zero (0); this is the default and sets the max validity
# period
'validity': {
'name': _('validity'),
'type': 'int',
'default': 0
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
})
def __init__(self, apikey, secret, source, targets=None, country=None,
validity=None, batch=None, **kwargs):
"""
Initialize Burst SMS Object
"""
super().__init__(**kwargs)
# API Key (associated with project)
self.apikey = validate_regex(
apikey, *self.template_tokens['apikey']['regex'])
if not self.apikey:
msg = 'An invalid Burst SMS API Key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# API Secret (associated with project)
self.secret = validate_regex(
secret, *self.template_tokens['secret']['regex'])
if not self.secret:
msg = 'An invalid Burst SMS API Secret ' \
'({}) was specified.'.format(secret)
self.logger.warning(msg)
raise TypeError(msg)
if not country:
self.country = self.template_args['country']['default']
else:
self.country = country.lower().strip()
if country not in BURST_SMS_COUNTRY_CODES:
msg = 'An invalid Burst SMS country ' \
'({}) was specified.'.format(country)
self.logger.warning(msg)
raise TypeError(msg)
# Set our Validity
self.validity = self.template_args['validity']['default']
if validity:
try:
self.validity = int(validity)
except (ValueError, TypeError):
msg = 'The Burst SMS Validity specified ({}) is invalid.'\
.format(validity)
self.logger.warning(msg)
raise TypeError(msg)
# Prepare Batch Mode Flag
self.batch = self.template_args['batch']['default'] \
if batch is None else batch
# The Sender ID
self.source = validate_regex(source)
if not self.source:
msg = 'The Account Sender ID specified ' \
'({}) is invalid.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Parse our targets
self.targets = list()
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result['full'])
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Burst SMS Notification
"""
if not self.targets:
self.logger.warning(
'There are no valid Burst SMS targets to notify.')
return False
# error tracking (used for function return)
has_error = False
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
}
# Prepare our authentication
auth = (self.apikey, self.secret)
# Prepare our payload
payload = {
'countrycode': self.country,
'message': body,
# Sender ID
'from': self.source,
# The to gets populated in the loop below
'to': None,
}
# Send in batches if identified to do so
batch_size = 1 if not self.batch else self.default_batch_size
# Create a copy of the targets list
targets = list(self.targets)
for index in range(0, len(targets), batch_size):
# Prepare our user
payload['to'] = ','.join(self.targets[index:index + batch_size])
# Some Debug Logging
self.logger.debug('Burst SMS POST URL: {} (cert_verify={})'.format(
self.notify_url, self.verify_certificate))
self.logger.debug('Burst SMS Payload: {}' .format(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
self.notify_url,
data=payload,
headers=headers,
auth=auth,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyBurstSMS.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send Burst SMS notification to {} '
'target(s): {}{}error={}.'.format(
len(self.targets[index:index + batch_size]),
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
has_error = True
continue
else:
self.logger.info(
'Sent Burst SMS notification to %d target(s).' %
len(self.targets[index:index + batch_size]))
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Burst SMS '
'notification to %d target(s).' %
len(self.targets[index:index + batch_size]))
self.logger.debug('Socket Exception: %s' % str(e))
# Mark our failure
has_error = True
continue
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'country': self.country,
'batch': 'yes' if self.batch else 'no',
}
if self.validity:
params['validity'] = str(self.validity)
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
return '{schema}://{key}:{secret}@{source}/{targets}/?{params}'.format(
schema=self.secure_protocol,
key=self.pprint(self.apikey, privacy, safe=''),
secret=self.pprint(
self.secret, privacy, mode=PrivacyMode.Secret, safe=''),
source=NotifyBurstSMS.quote(self.source, safe=''),
targets='/'.join(
[NotifyBurstSMS.quote(x, safe='') for x in self.targets]),
params=NotifyBurstSMS.urlencode(params))
def __len__(self):
"""
Returns the number of targets associated with this notification
"""
#
# Factor batch into calculation
#
batch_size = 1 if not self.batch else self.default_batch_size
targets = len(self.targets)
if batch_size > 1:
targets = int(targets / batch_size) + \
(1 if targets % batch_size else 0)
return targets if targets > 0 else 1
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# The hostname is our source (Sender ID)
results['source'] = NotifyBurstSMS.unquote(results['host'])
# Get any remaining targets
results['targets'] = NotifyBurstSMS.split_path(results['fullpath'])
# Get our account_side and auth_token from the user/pass config
results['apikey'] = NotifyBurstSMS.unquote(results['user'])
results['secret'] = NotifyBurstSMS.unquote(results['password'])
# API Key
if 'key' in results['qsd'] and len(results['qsd']['key']):
# Extract the API Key from an argument
results['apikey'] = \
NotifyBurstSMS.unquote(results['qsd']['key'])
# API Secret
if 'secret' in results['qsd'] and len(results['qsd']['secret']):
# Extract the API Secret from an argument
results['secret'] = \
NotifyBurstSMS.unquote(results['qsd']['secret'])
# Support the 'from' and 'source' variable so that we can support
# targets this way too.
# The 'from' makes it easier to use yaml configuration
if 'from' in results['qsd'] and len(results['qsd']['from']):
results['source'] = \
NotifyBurstSMS.unquote(results['qsd']['from'])
if 'source' in results['qsd'] and len(results['qsd']['source']):
results['source'] = \
NotifyBurstSMS.unquote(results['qsd']['source'])
# Support country
if 'country' in results['qsd'] and len(results['qsd']['country']):
results['country'] = \
NotifyBurstSMS.unquote(results['qsd']['country'])
# Support validity value
if 'validity' in results['qsd'] and len(results['qsd']['validity']):
results['validity'] = \
NotifyBurstSMS.unquote(results['qsd']['validity'])
# Get Batch Mode Flag
if 'batch' in results['qsd'] and len(results['qsd']['batch']):
results['batch'] = parse_bool(results['qsd']['batch'])
# Support the 'to' variable so that we can support rooms this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyBurstSMS.parse_phone_no(results['qsd']['to'])
return results

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -114,6 +110,7 @@ class NotifyD7Networks(NotifyBase):
'targets': {
'name': _('Targets'),
'type': 'list:string',
'required': True,
},
})

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -103,13 +99,18 @@ class NotifyDingTalk(NotifyBase):
'regex': (r'^[a-z0-9]+$', 'i'),
},
'secret': {
'name': _('Token'),
'name': _('Secret'),
'type': 'string',
'private': True,
'regex': (r'^[a-z0-9]+$', 'i'),
},
'targets': {
'target_phone_no': {
'name': _('Target Phone No'),
'type': 'string',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -50,6 +46,9 @@
import re
import requests
from json import dumps
from datetime import timedelta
from datetime import datetime
from datetime import timezone
from .NotifyBase import NotifyBase
from ..common import NotifyImageSize
@ -81,9 +80,23 @@ class NotifyDiscord(NotifyBase):
# Discord Webhook
notify_url = 'https://discord.com/api/webhooks'
# Support attachments
attachment_support = True
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_256
# Discord is kind enough to return how many more requests we're allowed to
# continue to make within it's header response as:
# X-RateLimit-Reset: The epoc time (in seconds) we can expect our
# rate-limit to be reset.
# X-RateLimit-Remaining: an integer identifying how many requests we're
# still allow to make.
request_rate_per_sec = 0
# Taken right from google.auth.helpers:
clock_skew = timedelta(seconds=10)
# The maximum allowable characters allowed in the body per message
body_maxlen = 2000
@ -135,6 +148,13 @@ class NotifyDiscord(NotifyBase):
'name': _('Avatar URL'),
'type': 'string',
},
'href': {
'name': _('URL'),
'type': 'string',
},
'url': {
'alias_of': 'href',
},
# Send a message to the specified thread within a webhook's channel.
# The thread will automatically be unarchived.
'thread': {
@ -166,7 +186,8 @@ class NotifyDiscord(NotifyBase):
def __init__(self, webhook_id, webhook_token, tts=False, avatar=True,
footer=False, footer_logo=True, include_image=False,
fields=True, avatar_url=None, thread=None, **kwargs):
fields=True, avatar_url=None, href=None, thread=None,
**kwargs):
"""
Initialize Discord Object
@ -215,6 +236,15 @@ class NotifyDiscord(NotifyBase):
# dynamically generated avatar url images
self.avatar_url = avatar_url
# A URL to have the title link to
self.href = href
# For Tracking Purposes
self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None)
# Default to 1.0
self.ratelimit_remaining = 1.0
return
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
@ -235,61 +265,6 @@ class NotifyDiscord(NotifyBase):
# Acquire image_url
image_url = self.image_url(notify_type)
# our fields variable
fields = []
if self.notify_format == NotifyFormat.MARKDOWN:
# Use embeds for payload
payload['embeds'] = [{
'author': {
'name': self.app_id,
'url': self.app_url,
},
'title': title,
'description': body,
# Our color associated with our notification
'color': self.color(notify_type, int),
}]
if self.footer:
# Acquire logo URL
logo_url = self.image_url(notify_type, logo=True)
# Set Footer text to our app description
payload['embeds'][0]['footer'] = {
'text': self.app_desc,
}
if self.footer_logo and logo_url:
payload['embeds'][0]['footer']['icon_url'] = logo_url
if self.include_image and image_url:
payload['embeds'][0]['thumbnail'] = {
'url': image_url,
'height': 256,
'width': 256,
}
if self.fields:
# Break titles out so that we can sort them in embeds
description, fields = self.extract_markdown_sections(body)
# Swap first entry for description
payload['embeds'][0]['description'] = description
if fields:
# Apply our additional parsing for a better presentation
payload['embeds'][0]['fields'] = \
fields[:self.discord_max_fields]
# Remove entry from head of fields
fields = fields[self.discord_max_fields:]
else:
# not markdown
payload['content'] = \
body if not title else "{}\r\n{}".format(title, body)
if self.avatar and (image_url or self.avatar_url):
payload['avatar_url'] = \
self.avatar_url if self.avatar_url else image_url
@ -298,22 +273,84 @@ class NotifyDiscord(NotifyBase):
# Optionally override the default username of the webhook
payload['username'] = self.user
# Associate our thread_id with our message
params = {'thread_id': self.thread_id} if self.thread_id else None
if not self._send(payload, params=params):
# We failed to post our message
return False
# Process any remaining fields IF set
if fields:
payload['embeds'][0]['description'] = ''
for i in range(0, len(fields), self.discord_max_fields):
payload['embeds'][0]['fields'] = \
fields[i:i + self.discord_max_fields]
if not self._send(payload):
# We failed to post our message
return False
if body:
# our fields variable
fields = []
if self.notify_format == NotifyFormat.MARKDOWN:
# Use embeds for payload
payload['embeds'] = [{
'author': {
'name': self.app_id,
'url': self.app_url,
},
'title': title,
'description': body,
# Our color associated with our notification
'color': self.color(notify_type, int),
}]
if self.href:
payload['embeds'][0]['url'] = self.href
if self.footer:
# Acquire logo URL
logo_url = self.image_url(notify_type, logo=True)
# Set Footer text to our app description
payload['embeds'][0]['footer'] = {
'text': self.app_desc,
}
if self.footer_logo and logo_url:
payload['embeds'][0]['footer']['icon_url'] = logo_url
if self.include_image and image_url:
payload['embeds'][0]['thumbnail'] = {
'url': image_url,
'height': 256,
'width': 256,
}
if self.fields:
# Break titles out so that we can sort them in embeds
description, fields = self.extract_markdown_sections(body)
# Swap first entry for description
payload['embeds'][0]['description'] = description
if fields:
# Apply our additional parsing for a better
# presentation
payload['embeds'][0]['fields'] = \
fields[:self.discord_max_fields]
# Remove entry from head of fields
fields = fields[self.discord_max_fields:]
if attach:
else:
# not markdown
payload['content'] = \
body if not title else "{}\r\n{}".format(title, body)
if not self._send(payload, params=params):
# We failed to post our message
return False
# Process any remaining fields IF set
if fields:
payload['embeds'][0]['description'] = ''
for i in range(0, len(fields), self.discord_max_fields):
payload['embeds'][0]['fields'] = \
fields[i:i + self.discord_max_fields]
if not self._send(payload):
# We failed to post our message
return False
if attach and self.attachment_support:
# Update our payload; the idea is to preserve it's other detected
# and assigned values for re-use here too
payload.update({
@ -336,14 +373,15 @@ class NotifyDiscord(NotifyBase):
for attachment in attach:
self.logger.info(
'Posting Discord Attachment {}'.format(attachment.name))
if not self._send(payload, attach=attachment):
if not self._send(payload, params=params, attach=attachment):
# We failed to post our message
return False
# Otherwise return
return True
def _send(self, payload, attach=None, params=None, **kwargs):
def _send(self, payload, attach=None, params=None, rate_limit=1,
**kwargs):
"""
Wrapper to the requests (post) object
"""
@ -365,8 +403,25 @@ class NotifyDiscord(NotifyBase):
))
self.logger.debug('Discord Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
# By default set wait to None
wait = None
if self.ratelimit_remaining <= 0.0:
# Determine how long we should wait for or if we should wait at
# all. This isn't fool-proof because we can't be sure the client
# time (calling this script) is completely synced up with the
# Discord server. One would hope we're on NTP and our clocks are
# the same allowing this to role smoothly:
now = datetime.now(timezone.utc).replace(tzinfo=None)
if now < self.ratelimit_reset:
# We need to throttle for the difference in seconds
wait = abs(
(self.ratelimit_reset - now + self.clock_skew)
.total_seconds())
# Always call throttle before any remote server i/o is made;
self.throttle(wait=wait)
# Perform some simple error checking
if isinstance(attach, AttachBase):
@ -401,6 +456,22 @@ class NotifyDiscord(NotifyBase):
verify=self.verify_certificate,
timeout=self.request_timeout,
)
# Handle rate limiting (if specified)
try:
# Store our rate limiting (if provided)
self.ratelimit_remaining = \
float(r.headers.get(
'X-RateLimit-Remaining'))
self.ratelimit_reset = datetime.fromtimestamp(
int(r.headers.get('X-RateLimit-Reset')),
timezone.utc).replace(tzinfo=None)
except (TypeError, ValueError):
# This is returned if we could not retrieve this
# information gracefully accept this state and move on
pass
if r.status_code not in (
requests.codes.ok, requests.codes.no_content):
@ -408,6 +479,20 @@ class NotifyDiscord(NotifyBase):
status_str = \
NotifyBase.http_response_code_lookup(r.status_code)
if r.status_code == requests.codes.too_many_requests \
and rate_limit > 0:
# handle rate limiting
self.logger.warning(
'Discord rate limiting in effect; '
'blocking for %.2f second(s)',
self.ratelimit_remaining)
# Try one more time before failing
return self._send(
payload=payload, attach=attach, params=params,
rate_limit=rate_limit - 1, **kwargs)
self.logger.warning(
'Failed to send {}to Discord notification: '
'{}{}error={}.'.format(
@ -465,6 +550,9 @@ class NotifyDiscord(NotifyBase):
if self.avatar_url:
params['avatar_url'] = self.avatar_url
if self.href:
params['href'] = self.href
if self.thread_id:
params['thread'] = self.thread_id
@ -536,10 +624,23 @@ class NotifyDiscord(NotifyBase):
results['avatar_url'] = \
NotifyDiscord.unquote(results['qsd']['avatar_url'])
# Extract url if it was specified
if 'href' in results['qsd']:
results['href'] = \
NotifyDiscord.unquote(results['qsd']['href'])
elif 'url' in results['qsd']:
results['href'] = \
NotifyDiscord.unquote(results['qsd']['url'])
# Markdown is implied
results['format'] = NotifyFormat.MARKDOWN
# Extract thread id if it was specified
if 'thread' in results['qsd']:
results['thread'] = \
NotifyDiscord.unquote(results['qsd']['thread'])
# Markdown is implied
results['format'] = NotifyFormat.MARKDOWN
return results

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -43,6 +39,7 @@ from email import charset
from socket import error as SocketError
from datetime import datetime
from datetime import timezone
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
@ -340,6 +337,9 @@ class NotifyEmail(NotifyBase):
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_email'
# Support attachments
attachment_support = True
# Default Notify Format
notify_format = NotifyFormat.HTML
@ -384,8 +384,13 @@ class NotifyEmail(NotifyBase):
'min': 1,
'max': 65535,
},
'target_email': {
'name': _('Target Email'),
'type': 'string',
'map_to': 'targets',
},
'targets': {
'name': _('Target Emails'),
'name': _('Targets'),
'type': 'list:string',
},
})
@ -764,7 +769,7 @@ class NotifyEmail(NotifyBase):
else:
base = MIMEText(body, 'plain', 'utf-8')
if attach:
if attach and self.attachment_support:
mixed = MIMEMultipart("mixed")
mixed.attach(base)
# Now store our attachments
@ -805,7 +810,8 @@ class NotifyEmail(NotifyBase):
base['To'] = formataddr((to_name, to_addr), charset='utf-8')
base['Message-ID'] = make_msgid(domain=self.smtp_host)
base['Date'] = \
datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
datetime.now(timezone.utc)\
.strftime("%a, %d %b %Y %H:%M:%S +0000")
base['X-Application'] = self.app_id
if cc:
@ -1030,6 +1036,10 @@ class NotifyEmail(NotifyBase):
# add one to ourselves
results['targets'] = NotifyEmail.split_path(results['fullpath'])
# Attempt to detect 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].append(results['qsd']['to'])
# Attempt to detect 'from' email address
if 'from' in results['qsd'] and len(results['qsd']['from']):
from_addr = NotifyEmail.unquote(results['qsd']['from'])
@ -1048,10 +1058,6 @@ class NotifyEmail(NotifyBase):
# Extract from name to associate with from address
from_addr = NotifyEmail.unquote(results['qsd']['name'])
# Attempt to detect 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].append(results['qsd']['to'])
# Store SMTP Host if specified
if 'smtp' in results['qsd'] and len(results['qsd']['smtp']):
# Extract the smtp server

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -157,7 +153,6 @@ class NotifyFCM(NotifyBase):
'project': {
'name': _('Project ID'),
'type': 'string',
'required': True,
},
'target_device': {
'name': _('Target Device'),
@ -173,6 +168,7 @@ class NotifyFCM(NotifyBase):
'targets': {
'name': _('Targets'),
'type': 'list:string',
'required': True,
},
})

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -47,6 +43,7 @@ from cryptography.hazmat.primitives import asymmetric
from cryptography.exceptions import UnsupportedAlgorithm
from datetime import datetime
from datetime import timedelta
from datetime import timezone
from json.decoder import JSONDecodeError
from urllib.parse import urlencode as _urlencode
@ -106,7 +103,7 @@ class GoogleOAuth:
# Our keys we build using the provided content
self.__refresh_token = None
self.__access_token = None
self.__access_token_expiry = datetime.utcnow()
self.__access_token_expiry = datetime.now(timezone.utc)
def load(self, path):
"""
@ -117,7 +114,7 @@ class GoogleOAuth:
self.content = None
self.private_key = None
self.__access_token = None
self.__access_token_expiry = datetime.utcnow()
self.__access_token_expiry = datetime.now(timezone.utc)
try:
with open(path, mode="r", encoding=self.encoding) as fp:
@ -199,7 +196,7 @@ class GoogleOAuth:
'token with.')
return None
if self.__access_token_expiry > datetime.utcnow():
if self.__access_token_expiry > datetime.now(timezone.utc):
# Return our no-expired key
return self.__access_token
@ -209,7 +206,7 @@ class GoogleOAuth:
key_identifier = self.content.get('private_key_id')
# Generate our Assertion
now = datetime.utcnow()
now = datetime.now(timezone.utc)
expiry = now + self.access_token_lifetime_sec
payload = {
@ -301,7 +298,7 @@ class GoogleOAuth:
if 'expires_in' in response:
delta = timedelta(seconds=int(response['expires_in']))
self.__access_token_expiry = \
delta + datetime.utcnow() - self.clock_skew
delta + datetime.now(timezone.utc) - self.clock_skew
else:
# Allow some grace before we expire

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -97,8 +93,8 @@ class NotifyFlock(NotifyBase):
# Define object templates
templates = (
'{schema}://{token}',
'{schema}://{user}@{token}',
'{schema}://{user}@{token}/{targets}',
'{schema}://{botname}@{token}',
'{schema}://{botname}@{token}/{targets}',
'{schema}://{token}/{targets}',
)
@ -111,9 +107,10 @@ class NotifyFlock(NotifyBase):
'private': True,
'required': True,
},
'user': {
'botname': {
'name': _('Bot Name'),
'type': 'string',
'map_to': 'user',
},
'to_user': {
'name': _('To User ID'),

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# BSD 3-Clause License
# BSD 2-Clause License
#
# Apprise - Push Notification Library.
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
@ -14,10 +14,6 @@
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
@ -99,6 +95,9 @@ class NotifyForm(NotifyBase):
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_Form'
# Support attachments
attachment_support = True
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_128
@ -345,7 +344,7 @@ class NotifyForm(NotifyBase):
# Track our potential attachments
files = []
if attach:
if attach and self.attachment_support:
for no, attachment in enumerate(attach, start=1):
# Perform some simple error checking
if not attachment:

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save