Merge branch 'development' of https://github.com/morpheus65535/bazarr into development

pull/2687/head
JayZed 8 months ago
commit d875dc7733

@ -1,4 +1,5 @@
bazarr
custom_libs
frontend/build
libs
bazarr.py

@ -27,7 +27,7 @@ jobs:
uses: actions/checkout@v4
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
@ -62,7 +62,7 @@ jobs:
run: npm run build:ci
working-directory: ${{ env.UI_DIRECTORY }}
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"
@ -81,7 +81,7 @@ jobs:
python-version: "3.8"
- name: Install UI
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"

@ -29,7 +29,7 @@ jobs:
git fetch --depth ${{ env.FETCH_DEPTH }} --tags
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}

@ -31,7 +31,7 @@ jobs:
run: git config --global user.name "github-actions"
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}

@ -6,10 +6,13 @@ import signal
import subprocess
import sys
import time
import atexit
from bazarr.app.get_args import args
from bazarr.literals import *
def exit_program(status_code):
print(f'Bazarr exited with status code {status_code}.')
raise SystemExit(status_code)
def check_python_version():
python_version = platform.python_version_tuple()
@ -19,7 +22,7 @@ def check_python_version():
if int(python_version[0]) < minimum_py3_tuple[0]:
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
sys.exit(1)
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
elif int(python_version[0]) == 3 and int(python_version[1]) > 11:
print("Python version greater than 3.11.x is unsupported. Current version is " + platform.python_version() +
". Keep in mind that even if it works, you're on your own.")
@ -27,7 +30,7 @@ def check_python_version():
(int(python_version[0]) != minimum_py3_tuple[0]):
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
sys.exit(1)
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
def get_python_path():
@ -49,55 +52,77 @@ check_python_version()
dir_name = os.path.dirname(__file__)
def start_bazarr():
script = [get_python_path(), "-u", os.path.normcase(os.path.join(dir_name, 'bazarr', 'main.py'))] + sys.argv[1:]
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL)
print(f"Bazarr starting child process with PID {ep.pid}...")
return ep
def terminate_child():
print(f"Terminating child process with PID {child_process.pid}")
child_process.terminate()
def end_child_process(ep):
def get_stop_status_code(input_file):
try:
if os.name != 'nt':
try:
ep.send_signal(signal.SIGINT)
except ProcessLookupError:
pass
else:
import win32api
import win32con
with open(input_file,'r') as file:
# read status code from file, if it exists
line = file.readline()
try:
win32api.GenerateConsoleCtrlEvent(win32con.CTRL_C_EVENT, ep.pid)
except KeyboardInterrupt:
pass
status_code = int(line)
except (ValueError, TypeError):
status_code = EXIT_NORMAL
file.close()
except:
ep.terminate()
def start_bazarr():
script = [get_python_path(), "-u", os.path.normcase(os.path.join(dir_name, 'bazarr', 'main.py'))] + sys.argv[1:]
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL)
atexit.register(end_child_process, ep=ep)
signal.signal(signal.SIGTERM, lambda signal_no, frame: end_child_process(ep))
status_code = EXIT_NORMAL
return status_code
def check_status():
global child_process
if os.path.exists(stopfile):
status_code = get_stop_status_code(stopfile)
try:
print(f"Deleting stop file...")
os.remove(stopfile)
except Exception:
except Exception as e:
print('Unable to delete stop file.')
finally:
print('Bazarr exited.')
sys.exit(0)
terminate_child()
exit_program(status_code)
if os.path.exists(restartfile):
try:
print(f"Deleting restart file...")
os.remove(restartfile)
except Exception:
print('Unable to delete restart file.')
else:
print("Bazarr is restarting...")
start_bazarr()
finally:
terminate_child()
print(f"Bazarr is restarting...")
child_process = start_bazarr()
def interrupt_handler(signum, frame):
# catch and ignore keyboard interrupt Ctrl-C
# the child process Server object will catch SIGINT and perform an orderly shutdown
global interrupted
if not interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
interrupted = True
print('Handling keyboard interrupt...')
else:
print(f"Stop doing that! I heard you the first time!")
if __name__ == '__main__':
restartfile = os.path.join(args.config_dir, 'bazarr.restart')
stopfile = os.path.join(args.config_dir, 'bazarr.stop')
interrupted = False
signal.signal(signal.SIGINT, interrupt_handler)
restartfile = os.path.join(args.config_dir, FILE_RESTART)
stopfile = os.path.join(args.config_dir, FILE_STOP)
os.environ[ENV_STOPFILE] = stopfile
os.environ[ENV_RESTARTFILE] = restartfile
# Cleanup leftover files
try:
@ -111,18 +136,14 @@ if __name__ == '__main__':
pass
# Initial start of main bazarr process
print("Bazarr starting...")
start_bazarr()
child_process = start_bazarr()
# Keep the script running forever until stop is requested through term or keyboard interrupt
# Keep the script running forever until stop is requested through term, special files or keyboard interrupt
while True:
check_status()
try:
if sys.platform.startswith('win'):
time.sleep(5)
else:
os.wait()
time.sleep(1)
time.sleep(5)
except (KeyboardInterrupt, SystemExit, ChildProcessError):
print('Bazarr exited.')
sys.exit(0)
# this code should never be reached, if signal handling is working properly
print(f'Bazarr exited main script file via keyboard interrupt.')
exit_program(EXIT_INTERRUPT)

@ -1,14 +1,15 @@
# coding=utf-8
import io
import os
import re
from flask_restx import Resource, Namespace, fields, marshal
from app.config import settings
from app.logger import empty_log
from app.get_args import args
from utilities.central import get_log_file_path
from ..utils import authenticate
api_ns_system_logs = Namespace('System Logs', description='List log file entries or empty log file')
@ -54,7 +55,7 @@ class SystemLogs(Resource):
include = include.casefold()
exclude = exclude.casefold()
with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file:
with io.open(get_log_file_path(), encoding='UTF-8') as file:
raw_lines = file.read()
lines = raw_lines.split('|\n')
for line in lines:

@ -34,6 +34,7 @@ def create_app():
else:
app.config["DEBUG"] = False
from engineio.async_drivers import threading # noqa W0611 # required to prevent an import exception in engineio
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling', engineio_logger=False)

@ -7,6 +7,8 @@ import logging
import re
from urllib.parse import quote_plus
from literals import EXIT_VALIDATION_ERROR
from utilities.central import stop_bazarr
from subliminal.cache import region
from dynaconf import Dynaconf, Validator as OriginalValidator
from dynaconf.loaders.yaml_loader import write
@ -163,7 +165,7 @@ validators = [
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
@ -186,7 +188,7 @@ validators = [
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
@ -410,8 +412,9 @@ while failed_validator:
settings[current_validator_details.names[0]] = current_validator_details.default
else:
logging.critical(f"Value for {current_validator_details.names[0]} doesn't pass validation and there's no "
f"default value. This issue must be reported. Bazarr won't works until it's been fixed.")
os._exit(0)
f"default value. This issue must be reported to and fixed by the development team. "
f"Bazarr won't work until it's been fixed.")
stop_bazarr(EXIT_VALIDATION_ERROR)
def write_config():
@ -438,7 +441,7 @@ array_keys = ['excluded_tags',
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod', 'log_include_filter', 'log_exclude_filter']
str_keys = ['chmod', 'log_include_filter', 'log_exclude_filter', 'password', 'f_password', 'hashed_password']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if settings.sonarr.series_sync < 15:

@ -18,6 +18,7 @@ def clean_libs():
def set_libs():
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../custom_libs/'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../libs/'))

@ -8,6 +8,7 @@ import platform
import warnings
from logging.handlers import TimedRotatingFileHandler
from utilities.central import get_log_file_path
from pytz_deprecation_shim import PytzUsageWarning
from .get_args import args
@ -58,19 +59,22 @@ class NoExceptionFormatter(logging.Formatter):
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug is True:
if settings.general.debug:
# no filtering in debug mode
return True
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found'],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'"],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"']
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found' ],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'" ],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"' ],
"Exception when servicing %r",
[],
]
wanted = True
@ -79,7 +83,7 @@ class UnwantedWaitressMessageFilter(logging.Filter):
if record.msg == unwantedMessages[i]:
exceptionTuple = record.exc_info
if exceptionTuple is not None:
if str(exceptionTuple[1]) in unwantedMessages[i+1]:
if len(unwantedMessages[i+1]) == 0 or str(exceptionTuple[1]) in unwantedMessages[i+1]:
wanted = False
break
@ -112,10 +116,10 @@ def configure_logging(debug=False):
# File Logging
global fh
if sys.version_info >= (3, 9):
fh = PatchedTimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight",
fh = PatchedTimedRotatingFileHandler(get_log_file_path(), when="midnight",
interval=1, backupCount=7, delay=True, encoding='utf-8')
else:
fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1,
fh = TimedRotatingFileHandler(get_log_file_path(), when="midnight", interval=1,
backupCount=7, delay=True, encoding='utf-8')
f = FileHandlerFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|',
'%Y-%m-%d %H:%M:%S')

@ -10,9 +10,13 @@ from apscheduler.triggers.date import DateTrigger
from apscheduler.events import EVENT_JOB_SUBMITTED, EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
from datetime import datetime, timedelta
from calendar import day_name
from math import floor
from random import randrange
from tzlocal import get_localzone
from tzlocal.utils import ZoneInfoNotFoundError
try:
import zoneinfo # pragma: no cover
except ImportError:
from backports import zoneinfo # pragma: no cover
from dateutil import tz
import logging
@ -60,7 +64,7 @@ class Scheduler:
try:
self.timezone = get_localzone()
except ZoneInfoNotFoundError as e:
except zoneinfo.ZoneInfoNotFoundError as e:
logging.error(f"BAZARR cannot use specified timezone: {e}")
self.timezone = tz.gettz("UTC")
@ -219,8 +223,9 @@ class Scheduler:
trigger = CronTrigger(day_of_week=settings.backup.day, hour=settings.backup.hour)
elif backup == "Manually":
trigger = CronTrigger(year=in_a_century())
self.aps_scheduler.add_job(backup_to_zip, trigger, max_instances=1, coalesce=True, misfire_grace_time=15,
id='backup', name='Backup Database and Configuration File', replace_existing=True)
self.aps_scheduler.add_job(backup_to_zip, trigger,
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup Database and Configuration File', replace_existing=True)
def __sonarr_full_update_task(self):
if settings.general.use_sonarr:

@ -1,10 +1,11 @@
# coding=utf-8
import signal
import warnings
import logging
import os
import io
import errno
from literals import EXIT_INTERRUPT, EXIT_NORMAL
from utilities.central import restart_bazarr, stop_bazarr
from waitress.server import create_server
from time import sleep
@ -37,6 +38,7 @@ class Server:
self.connected = False
self.address = str(settings.general.ip)
self.port = int(args.port) if args.port else int(settings.general.port)
self.interrupted = False
while not self.connected:
sleep(0.1)
@ -62,9 +64,17 @@ class Server:
logging.exception("BAZARR cannot start because of unhandled exception.")
self.shutdown()
def interrupt_handler(self, signum, frame):
# print('Server signal interrupt handler called with signal', signum)
if not self.interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
self.interrupted = True
self.shutdown(EXIT_INTERRUPT)
def start(self):
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
f'{self.server.effective_port}')
signal.signal(signal.SIGINT, self.interrupt_handler)
try:
self.server.run()
except (KeyboardInterrupt, SystemExit):
@ -72,31 +82,19 @@ class Server:
except Exception:
pass
def shutdown(self):
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
close_database()
self.server.close()
os._exit(0)
def close_all(self):
print(f"Closing database...")
close_database()
print(f"Closing webserver...")
self.server.close()
def shutdown(self, status=EXIT_NORMAL):
self.close_all()
stop_bazarr(status, False)
def restart(self):
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
close_database()
self.server.close()
os._exit(0)
self.close_all()
restart_bazarr()
webserver = Server()

@ -9,9 +9,11 @@ from functools import wraps
from urllib.parse import unquote
from constants import headers
from literals import FILE_LOG
from sonarr.info import url_api_sonarr
from radarr.info import url_api_radarr
from utilities.helper import check_credentials
from utilities.central import get_log_file_path
from .config import settings, base_url
from .database import System
@ -98,9 +100,9 @@ def catch_all(path):
@check_login
@ui_bp.route('/bazarr.log')
@ui_bp.route('/' + FILE_LOG)
def download_log():
return send_file(os.path.join(args.config_dir, 'log', 'bazarr.log'), max_age=0, as_attachment=True)
return send_file(get_log_file_path(), max_age=0, as_attachment=True)
@check_login
@ -162,7 +164,7 @@ def configured():
@ui_bp.route('/test/<protocol>/<path:url>', methods=['GET'])
def proxy(protocol, url):
if protocol.lower() not in ['http', 'https']:
return dict(status=False, error='Unsupported protocol')
return dict(status=False, error='Unsupported protocol', code=0)
url = f'{protocol}://{unquote(url)}'
params = request.args
try:
@ -173,14 +175,14 @@ def proxy(protocol, url):
if result.status_code == 200:
try:
version = result.json()['version']
return dict(status=True, version=version)
return dict(status=True, version=version, code=result.status_code)
except Exception:
return dict(status=False, error='Error Occurred. Check your settings.')
return dict(status=False, error='Error Occurred. Check your settings.', code=result.status_code)
elif result.status_code == 401:
return dict(status=False, error='Access Denied. Check API key.')
return dict(status=False, error='Access Denied. Check API key.', code=result.status_code)
elif result.status_code == 404:
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?')
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?', code=result.status_code)
elif 300 <= result.status_code <= 399:
return dict(status=False, error='Wrong URL Base.')
return dict(status=False, error='Wrong URL Base.', code=result.status_code)
else:
return dict(status=False, error=result.raise_for_status())
return dict(status=False, error=result.raise_for_status(), code=result.status_code)

@ -1,7 +1,6 @@
# coding=utf-8
import os
import io
import sys
import subprocess
import subliminal
@ -20,6 +19,9 @@ from utilities.backup import restore_from_backup
from app.database import init_db
from literals import *
from utilities.central import make_bazarr_dir, restart_bazarr, stop_bazarr
# set start time global variable as epoch
global startTime
startTime = time.time()
@ -37,20 +39,15 @@ if not os.path.exists(args.config_dir):
os.mkdir(os.path.join(args.config_dir))
except OSError:
print("BAZARR The configuration directory doesn't exist and Bazarr cannot create it (permission issue?).")
exit(2)
if not os.path.exists(os.path.join(args.config_dir, 'config')):
os.mkdir(os.path.join(args.config_dir, 'config'))
if not os.path.exists(os.path.join(args.config_dir, 'db')):
os.mkdir(os.path.join(args.config_dir, 'db'))
if not os.path.exists(os.path.join(args.config_dir, 'log')):
os.mkdir(os.path.join(args.config_dir, 'log'))
if not os.path.exists(os.path.join(args.config_dir, 'cache')):
os.mkdir(os.path.join(args.config_dir, 'cache'))
if not os.path.exists(os.path.join(args.config_dir, 'backup')):
os.mkdir(os.path.join(args.config_dir, 'backup'))
if not os.path.exists(os.path.join(args.config_dir, 'restore')):
os.mkdir(os.path.join(args.config_dir, 'restore'))
stop_bazarr(EXIT_CONFIG_CREATE_ERROR)
os.environ[ENV_BAZARR_ROOT_DIR] = os.path.join(args.config_dir)
make_bazarr_dir(DIR_BACKUP)
make_bazarr_dir(DIR_CACHE)
make_bazarr_dir(DIR_CONFIG)
make_bazarr_dir(DIR_DB)
make_bazarr_dir(DIR_LOG)
make_bazarr_dir(DIR_RESTORE)
# set subliminal_patch hearing-impaired extension to use when naming subtitles
os.environ["SZ_HI_EXTENSION"] = settings.general.hi_extension
@ -99,19 +96,11 @@ if not args.no_update:
subprocess.check_output(pip_command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
logging.exception(f'BAZARR requirements.txt installation result: {e.stdout}')
os._exit(1)
os._exit(EXIT_REQUIREMENTS_ERROR)
else:
logging.info('BAZARR requirements installed.')
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
os._exit(0)
restart_bazarr()
# change default base_url to ''
settings.general.base_url = settings.general.base_url.rstrip('/')

@ -0,0 +1,30 @@
# coding=utf-8
# only primitive types can be specified here
# for other derived values, use constants.py
# bazarr environment variable names
ENV_STOPFILE = 'STOPFILE'
ENV_RESTARTFILE = 'RESTARTFILE'
ENV_BAZARR_ROOT_DIR = 'BAZARR_ROOT'
# bazarr subdirectories
DIR_BACKUP = 'backup'
DIR_CACHE = 'cache'
DIR_CONFIG = 'config'
DIR_DB = 'db'
DIR_LOG = 'log'
DIR_RESTORE = 'restore'
# bazarr special files
FILE_LOG = 'bazarr.log'
FILE_RESTART = 'bazarr.restart'
FILE_STOP = 'bazarr.stop'
# bazarr exit codes
EXIT_NORMAL = 0
EXIT_INTERRUPT = -100
EXIT_VALIDATION_ERROR = -101
EXIT_CONFIG_CREATE_ERROR = -102
EXIT_PYTHON_UPGRADE_NEEDED = -103
EXIT_REQUIREMENTS_ERROR = -104

@ -44,16 +44,8 @@ from app.server import webserver, app # noqa E402
from app.announcements import get_announcements_to_file # noqa E402
if args.create_db_revision:
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
create_db_revision(app)
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
os._exit(0)
create_db_revision(app)
stop_bazarr(EXIT_NORMAL)
else:
migrate_db(app)

@ -75,8 +75,8 @@ def check_radarr_rootfolder():
if not os.path.isdir(path_mappings.path_replace_movie(root_path)):
database.execute(
update(TableMoviesRootfolder)
.values(accessible=0, error='This Radarr root directory does not seems to be accessible by Please '
'check path mapping.')
.values(accessible=0, error='This Radarr root directory does not seem to be accessible by Bazarr. '
'Please check path mapping or if directory/drive is online.')
.where(TableMoviesRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace_movie(root_path), os.W_OK):
database.execute(

@ -197,14 +197,14 @@ def update_movies(send_event=True):
if send_event:
hide_progress(id='movies_progress')
trace(f"Skipped {files_missing} file missing movies out of {i}")
trace(f"Skipped {files_missing} file missing movies out of {movies_count}")
if sync_monitored:
trace(f"Skipped {skipped_count} unmonitored movies out of {i}")
trace(f"Processed {i - files_missing - skipped_count} movies out of {i} "
trace(f"Skipped {skipped_count} unmonitored movies out of {movies_count}")
trace(f"Processed {movies_count - files_missing - skipped_count} movies out of {movies_count} "
f"with {len(movies_added)} added, {len(movies_updated)} updated and "
f"{len(movies_deleted)} deleted")
else:
trace(f"Processed {i - files_missing} movies out of {i} with {len(movies_added)} added and "
trace(f"Processed {movies_count - files_missing} movies out of {movies_count} with {len(movies_added)} added and "
f"{len(movies_updated)} updated")
logging.debug('BAZARR All movies synced from Radarr into database.')

@ -75,8 +75,8 @@ def check_sonarr_rootfolder():
if not os.path.isdir(path_mappings.path_replace(root_path)):
database.execute(
update(TableShowsRootfolder)
.values(accessible=0, error='This Sonarr root directory does not seems to be accessible by Bazarr. '
'Please check path mapping.')
.values(accessible=0, error='This Sonarr root directory does not seem to be accessible by Bazarr. '
'Please check path mapping or if directory/drive is online.')
.where(TableShowsRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace(root_path), os.W_OK):
database.execute(

@ -264,7 +264,10 @@ def list_missing_subtitles_movies(no=None, send_event=True):
event_stream(type='badges')
def movies_full_scan_subtitles(use_cache=settings.radarr.use_ffprobe_cache):
def movies_full_scan_subtitles(use_cache=None):
if use_cache is None:
use_cache = settings.radarr.use_ffprobe_cache
movies = database.execute(
select(TableMovies.path))\
.all()

@ -266,7 +266,10 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
event_stream(type='badges')
def series_full_scan_subtitles(use_cache=settings.sonarr.use_ffprobe_cache):
def series_full_scan_subtitles(use_cache=None):
if use_cache is None:
use_cache = settings.sonarr.use_ffprobe_cache
episodes = database.execute(
select(TableEpisodes.path))\
.all()

@ -26,8 +26,19 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, percent_score, sonarr
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
subsync = SubSyncer()
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang,
sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, radarr_id=radarr_id)
sync_kwargs = {
'video_path': video_path,
'srt_path': srt_path,
'srt_lang': srt_lang,
'max_offset_seconds': str(settings.subsync.max_offset_seconds),
'no_fix_framerate': settings.subsync.no_fix_framerate,
'gss': settings.subsync.gss,
'reference': None, # means choose automatically within video file
'sonarr_series_id': sonarr_series_id,
'sonarr_episode_id': sonarr_episode_id,
'radarr_id': radarr_id,
}
subsync.sync(**sync_kwargs)
del subsync
gc.collect()
return True

@ -30,9 +30,9 @@ class SubSyncer:
self.vad = 'subs_then_webrtc'
self.log_dir_path = os.path.join(args.config_dir, 'log')
def sync(self, video_path, srt_path, srt_lang, sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None,
reference=None, max_offset_seconds=str(settings.subsync.max_offset_seconds),
no_fix_framerate=settings.subsync.no_fix_framerate, gss=settings.subsync.gss):
def sync(self, video_path, srt_path, srt_lang,
max_offset_seconds, no_fix_framerate, gss, reference=None,
sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None):
self.reference = video_path
self.srtin = srt_path
if self.srtin.casefold().endswith('.ass'):

@ -1,7 +1,6 @@
# coding=utf-8
import os
import io
import sqlite3
import shutil
import logging
@ -12,6 +11,7 @@ from glob import glob
from app.get_args import args
from app.config import settings
from utilities.central import restart_bazarr
def get_backup_path():
@ -133,16 +133,7 @@ def restore_from_backup():
logging.exception(f'Unable to delete {dest_database_path}')
logging.info('Backup restored successfully. Bazarr will restart.')
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write('')
restart_file.close()
os._exit(0)
restart_bazarr()
elif os.path.isfile(restore_config_path) or os.path.isfile(restore_database_path):
logging.debug('Cannot restore a partial backup. You must have both config and database.')
else:

@ -0,0 +1,49 @@
# coding=utf-8
# only methods can be specified here that do not cause other moudules to be loaded
# for other methods that use settings, etc., use utilities/helper.py
import logging
import os
from pathlib import Path
from literals import *
def get_bazarr_dir(sub_dir):
path = os.path.join(os.environ[ENV_BAZARR_ROOT_DIR], sub_dir)
return path
def make_bazarr_dir(sub_dir):
path = get_bazarr_dir(sub_dir)
if not os.path.exists(path):
os.mkdir(path)
def get_log_file_path():
path = os.path.join(get_bazarr_dir(DIR_LOG), FILE_LOG)
return path
def get_stop_file_path():
return os.environ[ENV_STOPFILE]
def get_restart_file_path():
return os.environ[ENV_RESTARTFILE]
def stop_bazarr(status_code=EXIT_NORMAL, exit_main=True):
try:
with open(get_stop_file_path(),'w', encoding='UTF-8') as file:
# write out status code for final exit
file.write(f'{status_code}\n')
file.close()
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
logging.info('Bazarr is being shutdown...')
if exit_main:
raise SystemExit(status_code)
def restart_bazarr():
try:
Path(get_restart_file_path()).touch()
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
logging.info('Bazarr is being restarted...')
raise SystemExit(EXIT_NORMAL)

@ -0,0 +1,19 @@
# Bazarr dependencies
subliminal_patch
subzero
py-pretty==1 # modified version to support Python 3
# Bazarr modified dependencies
signalr-client-threads==0.0.12 # Modified to work with Sonarr v3. Not used anymore with v4
Flask-Compress==1.14 # modified to import brotli only if required
# Required-by: signalr-client-threads
sseclient==0.0.27 # Modified to work with Sonarr v3
# Required-by: subliminal_patch
deathbycaptcha # unknown version, only found on gist
git+https://github.com/pannal/libfilebot#egg=libfilebot
git+https://github.com/RobinDavid/pyADS.git@28a2f6dbfb357f85b2c2f49add770b336e88840d#egg=pyads
py7zr==0.7.0 # modified to prevent importing of modules that can't be vendored
subscene-api==1.0.0 # modified specificaly for Bazarr
subliminal==2.1.0 # modified specifically for Bazarr

@ -63,9 +63,14 @@ class Compress(object):
def init_app(self, app):
defaults = [
('COMPRESS_MIMETYPES', ['text/html', 'text/css', 'text/xml',
'application/json',
'application/javascript']),
('COMPRESS_MIMETYPES', [
'application/javascript', # Obsolete (RFC 9239)
'application/json',
'text/css',
'text/html',
'text/javascript',
'text/xml',
]),
('COMPRESS_LEVEL', 6),
('COMPRESS_BR_LEVEL', 4),
('COMPRESS_BR_MODE', 0),

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
from babelfish import LanguageReverseConverter
from ..exceptions import ConfigurationError
class LegendasTVConverter(LanguageReverseConverter):
def __init__(self):
self.from_legendastv = {1: ('por', 'BR'), 2: ('eng',), 3: ('spa',), 4: ('fra',), 5: ('deu',), 6: ('jpn',),
7: ('dan',), 8: ('nor',), 9: ('swe',), 10: ('por',), 11: ('ara',), 12: ('ces',),
13: ('zho',), 14: ('kor',), 15: ('bul',), 16: ('ita',), 17: ('pol',)}
self.to_legendastv = {v: k for k, v in self.from_legendastv.items()}
self.codes = set(self.from_legendastv.keys())
def convert(self, alpha3, country=None, script=None):
if (alpha3, country) in self.to_legendastv:
return self.to_legendastv[(alpha3, country)]
if (alpha3,) in self.to_legendastv:
return self.to_legendastv[(alpha3,)]
raise ConfigurationError('Unsupported language code for legendastv: %s, %s, %s' % (alpha3, country, script))
def reverse(self, legendastv):
if legendastv in self.from_legendastv:
return self.from_legendastv[legendastv]
raise ConfigurationError('Unsupported language number for legendastv: %s' % legendastv)

@ -0,0 +1,229 @@
# -*- coding: utf-8 -*-
from rebulk.loose import ensure_list
from .score import get_equivalent_release_groups, score_keys
from .video import Episode, Movie
from .utils import sanitize, sanitize_release_group
def series_matches(video, title=None, **kwargs):
"""Whether the `video` matches the series title.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str title: the series name.
:return: whether there's a match
:rtype: bool
"""
if isinstance(video, Episode):
return video.series and sanitize(title) in (
sanitize(name) for name in [video.series] + video.alternative_series
)
def title_matches(video, title=None, episode_title=None, **kwargs):
"""Whether the movie matches the movie `title` or the series matches the `episode_title`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str title: the movie title.
:param str episode_title: the series episode title.
:return: whether there's a match
:rtype: bool
"""
if isinstance(video, Episode):
return video.title and sanitize(episode_title) == sanitize(video.title)
if isinstance(video, Movie):
return video.title and sanitize(title) == sanitize(video.title)
def season_matches(video, season=None, **kwargs):
"""Whether the episode matches the `season`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param int season: the episode season.
:return: whether there's a match
:rtype: bool
"""
if isinstance(video, Episode):
return video.season and season == video.season
def episode_matches(video, episode=None, **kwargs):
"""Whether the episode matches the `episode`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param episode: the episode season.
:type: list of int or int
:return: whether there's a match
:rtype: bool
"""
if isinstance(video, Episode):
return video.episodes and ensure_list(episode) == video.episodes
def year_matches(video, year=None, partial=False, **kwargs):
"""Whether the video matches the `year`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param int year: the video year.
:param bool partial: whether or not the guess is partial.
:return: whether there's a match
:rtype: bool
"""
if video.year and year == video.year:
return True
if isinstance(video, Episode):
# count "no year" as an information
return not partial and video.original_series and not year
def country_matches(video, country=None, partial=False, **kwargs):
"""Whether the video matches the `country`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param country: the video country.
:type country: :class:`~babelfish.country.Country`
:param bool partial: whether or not the guess is partial.
:return: whether there's a match
:rtype: bool
"""
if video.country and country == video.country:
return True
if isinstance(video, Episode):
# count "no country" as an information
return not partial and video.original_series and not country
if isinstance(video, Movie):
# count "no country" as an information
return not video.country and not country
def release_group_matches(video, release_group=None, **kwargs):
"""Whether the video matches the `release_group`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str release_group: the video release group.
:return: whether there's a match
:rtype: bool
"""
return (video.release_group and release_group and
any(r in sanitize_release_group(release_group)
for r in get_equivalent_release_groups(sanitize_release_group(video.release_group))))
def streaming_service_matches(video, streaming_service=None, **kwargs):
"""Whether the video matches the `streaming_service`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str streaming_service: the video streaming service
:return: whether there's a match
:rtype: bool
"""
return video.streaming_service and streaming_service == video.streaming_service
def resolution_matches(video, screen_size=None, **kwargs):
"""Whether the video matches the `resolution`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str screen_size: the video resolution
:return: whether there's a match
:rtype: bool
"""
return video.resolution and screen_size == video.resolution
def source_matches(video, source=None, **kwargs):
"""Whether the video matches the `source`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str source: the video source
:return: whether there's a match
:rtype: bool
"""
return video.source and source == video.source
def video_codec_matches(video, video_codec=None, **kwargs):
"""Whether the video matches the `video_codec`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str video_codec: the video codec
:return: whether there's a match
:rtype: bool
"""
return video.video_codec and video_codec == video.video_codec
def audio_codec_matches(video, audio_codec=None, **kwargs):
"""Whether the video matches the `audio_codec`.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param str audio_codec: the video audio codec
:return: whether there's a match
:rtype: bool
"""
return video.audio_codec and audio_codec == video.audio_codec
#: Available matches functions
matches_manager = {
'series': series_matches,
'title': title_matches,
'season': season_matches,
'episode': episode_matches,
'year': year_matches,
'country': country_matches,
'release_group': release_group_matches,
'streaming_service': streaming_service_matches,
'resolution': resolution_matches,
'source': source_matches,
'video_codec': video_codec_matches,
'audio_codec': audio_codec_matches
}
def guess_matches(video, guess, partial=False):
"""Get matches between a `video` and a `guess`.
If a guess is `partial`, the absence information won't be counted as a match.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param guess: the guess.
:type guess: dict
:param bool partial: whether or not the guess is partial.
:return: matches between the `video` and the `guess`.
:rtype: set
"""
matches = set()
for key in score_keys:
if key in matches_manager and matches_manager[key](video, partial=partial, **guess):
matches.add(key)
return matches

@ -0,0 +1,135 @@
# -*- coding: utf-8 -*-
import io
import json
import logging
from zipfile import ZipFile
from babelfish import Language
from guessit import guessit
from requests import Session
from six.moves import urllib
from . import Provider
from ..cache import EPISODE_EXPIRATION_TIME, region
from ..exceptions import ProviderError
from ..matches import guess_matches
from ..subtitle import Subtitle, fix_line_ending
from ..video import Episode
logger = logging.getLogger(__name__)
class ArgenteamSubtitle(Subtitle):
provider_name = 'argenteam'
def __init__(self, language, download_link, series, season, episode, release, version):
super(ArgenteamSubtitle, self).__init__(language, download_link)
self.download_link = download_link
self.series = series
self.season = season
self.episode = episode
self.release = release
self.version = version
@property
def id(self):
return self.download_link
@property
def info(self):
return urllib.parse.unquote(self.download_link.rsplit('/')[-1])
def get_matches(self, video):
matches = guess_matches(video, {
'title': self.series,
'season': self.season,
'episode': self.episode,
'release_group': self.version
})
# resolution
if video.resolution and self.version and video.resolution in self.version.lower():
matches.add('resolution')
matches |= guess_matches(video, guessit(self.version, {'type': 'episode'}), partial=True)
return matches
class ArgenteamProvider(Provider):
provider_name = 'argenteam'
language = Language.fromalpha2('es')
languages = {language}
video_types = (Episode,)
server_url = "http://argenteam.net/api/v1/"
subtitle_class = ArgenteamSubtitle
def __init__(self):
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
def terminate(self):
self.session.close()
@region.cache_on_arguments(expiration_time=EPISODE_EXPIRATION_TIME, should_cache_fn=lambda value: value)
def search_episode_id(self, series, season, episode):
"""Search the episode id from the `series`, `season` and `episode`.
:param str series: series of the episode.
:param int season: season of the episode.
:param int episode: episode number.
:return: the episode id, if any.
:rtype: int or None
"""
# make the search
query = '%s S%#02dE%#02d' % (series, season, episode)
logger.info('Searching episode id for %r', query)
r = self.session.get(self.server_url + 'search', params={'q': query}, timeout=10)
r.raise_for_status()
results = json.loads(r.text)
if results['total'] == 1:
return results['results'][0]['id']
logger.error('No episode id found for %r', series)
def query(self, series, season, episode):
episode_id = self.search_episode_id(series, season, episode)
if episode_id is None:
return []
response = self.session.get(self.server_url + 'episode', params={'id': episode_id}, timeout=10)
response.raise_for_status()
content = json.loads(response.text)
subtitles = []
for r in content['releases']:
for s in r['subtitles']:
subtitle = self.subtitle_class(self.language, s['uri'], series, season, episode, r['team'], r['tags'])
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
titles = [video.series] + video.alternative_series
for title in titles:
subs = self.query(title, video.season, video.episode)
if subs:
return subs
return []
def download_subtitle(self, subtitle):
# download as a zip
logger.info('Downloading subtitle %r', subtitle)
r = self.session.get(subtitle.download_link, timeout=10)
r.raise_for_status()
# open the zip
with ZipFile(io.BytesIO(r.content)) as zf:
if len(zf.namelist()) > 1:
raise ProviderError('More than one file to unzip')
subtitle.content = fix_line_ending(zf.read(zf.namelist()[0]))

@ -0,0 +1,514 @@
# -*- coding: utf-8 -*-
import io
import json
import logging
import os
import re
from babelfish import Language, language_converters
from datetime import datetime, timedelta
from dogpile.cache.api import NO_VALUE
from guessit import guessit
import pytz
import rarfile
from rarfile import RarFile, is_rarfile
from rebulk.loose import ensure_list
from requests import Session
from zipfile import ZipFile, is_zipfile
from . import ParserBeautifulSoup, Provider
from ..cache import SHOW_EXPIRATION_TIME, region
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError, ServiceUnavailable
from ..matches import guess_matches
from ..subtitle import SUBTITLE_EXTENSIONS, Subtitle, fix_line_ending
from ..utils import sanitize
from ..video import Episode, Movie
logger = logging.getLogger(__name__)
language_converters.register('legendastv = subliminal.converters.legendastv:LegendasTVConverter')
# Configure :mod:`rarfile` to use the same path separator as :mod:`zipfile`
rarfile.PATH_SEP = '/'
#: Conversion map for types
type_map = {'M': 'movie', 'S': 'episode', 'C': 'episode'}
#: BR title season parsing regex
season_re = re.compile(r' - (?P<season>\d+)(\xaa|a|st|nd|rd|th) (temporada|season)', re.IGNORECASE)
#: Downloads parsing regex
downloads_re = re.compile(r'(?P<downloads>\d+) downloads')
#: Rating parsing regex
rating_re = re.compile(r'nota (?P<rating>\d+)')
#: Timestamp parsing regex
timestamp_re = re.compile(r'(?P<day>\d+)/(?P<month>\d+)/(?P<year>\d+) - (?P<hour>\d+):(?P<minute>\d+)')
#: Title with year/country regex
title_re = re.compile(r'^(?P<series>.*?)(?: \((?:(?P<year>\d{4})|(?P<country>[A-Z]{2}))\))?$')
#: Cache key for releases
releases_key = __name__ + ':releases|{archive_id}|{archive_name}'
class LegendasTVArchive(object):
"""LegendasTV Archive.
:param str id: identifier.
:param str name: name.
:param bool pack: contains subtitles for multiple episodes.
:param bool pack: featured.
:param str link: link.
:param int downloads: download count.
:param int rating: rating (0-10).
:param timestamp: timestamp.
:type timestamp: datetime.datetime
"""
def __init__(self, id, name, pack, featured, link, downloads=0, rating=0, timestamp=None):
#: Identifier
self.id = id
#: Name
self.name = name
#: Pack
self.pack = pack
#: Featured
self.featured = featured
#: Link
self.link = link
#: Download count
self.downloads = downloads
#: Rating (0-10)
self.rating = rating
#: Timestamp
self.timestamp = timestamp
#: Compressed content as :class:`rarfile.RarFile` or :class:`zipfile.ZipFile`
self.content = None
def __repr__(self):
return '<%s [%s] %r>' % (self.__class__.__name__, self.id, self.name)
class LegendasTVSubtitle(Subtitle):
"""LegendasTV Subtitle."""
provider_name = 'legendastv'
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
super(LegendasTVSubtitle, self).__init__(language, page_link=archive.link)
self.type = type
self.title = title
self.year = year
self.imdb_id = imdb_id
self.season = season
self.archive = archive
self.name = name
@property
def id(self):
return '%s-%s' % (self.archive.id, self.name.lower())
@property
def info(self):
return self.name
def get_matches(self, video, hearing_impaired=False):
matches = guess_matches(video, {
'title': self.title,
'year': self.year
})
# episode
if isinstance(video, Episode) and self.type == 'episode':
# imdb_id
if video.series_imdb_id and self.imdb_id == video.series_imdb_id:
matches.add('series_imdb_id')
# movie
elif isinstance(video, Movie) and self.type == 'movie':
# imdb_id
if video.imdb_id and self.imdb_id == video.imdb_id:
matches.add('imdb_id')
# name
matches |= guess_matches(video, guessit(self.name, {'type': self.type}))
return matches
class LegendasTVProvider(Provider):
"""LegendasTV Provider.
:param str username: username.
:param str password: password.
"""
languages = {Language.fromlegendastv(l) for l in language_converters['legendastv'].codes}
server_url = 'http://legendas.tv/'
subtitle_class = LegendasTVSubtitle
def __init__(self, username=None, password=None):
# Provider needs UNRAR installed. If not available raise ConfigurationError
try:
rarfile.custom_check([rarfile.UNRAR_TOOL], True)
except rarfile.RarExecError:
raise ConfigurationError('UNRAR tool not available')
if any((username, password)) and not all((username, password)):
raise ConfigurationError('Username and password must be specified')
self.username = username
self.password = password
self.logged_in = False
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
# login
if self.username and self.password:
logger.info('Logging in')
data = {'_method': 'POST', 'data[User][username]': self.username, 'data[User][password]': self.password}
r = self.session.post(self.server_url + 'login', data, allow_redirects=False, timeout=10)
raise_for_status(r)
soup = ParserBeautifulSoup(r.content, ['html.parser'])
if soup.find('div', {'class': 'alert-error'}, string=re.compile(u'Usuário ou senha inválidos')):
raise AuthenticationError(self.username)
logger.debug('Logged in')
self.logged_in = True
def terminate(self):
# logout
if self.logged_in:
logger.info('Logging out')
r = self.session.get(self.server_url + 'users/logout', allow_redirects=False, timeout=10)
raise_for_status(r)
logger.debug('Logged out')
self.logged_in = False
self.session.close()
@staticmethod
def is_valid_title(title, title_id, sanitized_title, season, year):
"""Check if is a valid title."""
sanitized_result = sanitize(title['title'])
if sanitized_result != sanitized_title:
logger.debug("Mismatched title, discarding title %d (%s)",
title_id, sanitized_result)
return
# episode type
if season:
# discard mismatches on type
if title['type'] != 'episode':
logger.debug("Mismatched 'episode' type, discarding title %d (%s)", title_id, sanitized_result)
return
# discard mismatches on season
if 'season' not in title or title['season'] != season:
logger.debug('Mismatched season %s, discarding title %d (%s)',
title.get('season'), title_id, sanitized_result)
return
# movie type
else:
# discard mismatches on type
if title['type'] != 'movie':
logger.debug("Mismatched 'movie' type, discarding title %d (%s)", title_id, sanitized_result)
return
# discard mismatches on year
if year is not None and 'year' in title and title['year'] != year:
logger.debug("Mismatched movie year, discarding title %d (%s)", title_id, sanitized_result)
return
return True
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
def search_titles(self, title, season, title_year):
"""Search for titles matching the `title`.
For episodes, each season has it own title
:param str title: the title to search for.
:param int season: season of the title
:param int title_year: year of the title
:return: found titles.
:rtype: dict
"""
titles = {}
sanitized_titles = [sanitize(title)]
ignore_characters = {'\'', '.'}
if any(c in title for c in ignore_characters):
sanitized_titles.append(sanitize(title, ignore_characters=ignore_characters))
for sanitized_title in sanitized_titles:
# make the query
if season:
logger.info('Searching episode title %r for season %r', sanitized_title, season)
else:
logger.info('Searching movie title %r', sanitized_title)
r = self.session.get(self.server_url + 'legenda/sugestao/{}'.format(sanitized_title), timeout=10)
raise_for_status(r)
results = json.loads(r.text)
# loop over results
for result in results:
source = result['_source']
# extract id
title_id = int(source['id_filme'])
# extract type
title = {'type': type_map[source['tipo']]}
# extract title, year and country
name, year, country = title_re.match(source['dsc_nome']).groups()
title['title'] = name
# extract imdb_id
if source['id_imdb'] != '0':
if not source['id_imdb'].startswith('tt'):
title['imdb_id'] = 'tt' + source['id_imdb'].zfill(7)
else:
title['imdb_id'] = source['id_imdb']
# extract season
if title['type'] == 'episode':
if source['temporada'] and source['temporada'].isdigit():
title['season'] = int(source['temporada'])
else:
match = season_re.search(source['dsc_nome_br'])
if match:
title['season'] = int(match.group('season'))
else:
logger.debug('No season detected for title %d (%s)', title_id, name)
# extract year
if year:
title['year'] = int(year)
elif source['dsc_data_lancamento'] and source['dsc_data_lancamento'].isdigit():
# year is based on season air date hence the adjustment
title['year'] = int(source['dsc_data_lancamento']) - title.get('season', 1) + 1
# add title only if is valid
# Check against title without ignored chars
if self.is_valid_title(title, title_id, sanitized_titles[0], season, title_year):
titles[title_id] = title
logger.debug('Found %d titles', len(titles))
return titles
@region.cache_on_arguments(expiration_time=timedelta(minutes=15).total_seconds())
def get_archives(self, title_id, language_code, title_type, season, episodes):
"""Get the archive list from a given `title_id`, `language_code`, `title_type`, `season` and `episode`.
:param int title_id: title id.
:param int language_code: language code.
:param str title_type: episode or movie
:param int season: season
:param list episodes: episodes
:return: the archives.
:rtype: list of :class:`LegendasTVArchive`
"""
archives = []
page = 0
while True:
# get the archive page
url = self.server_url + 'legenda/busca/-/{language}/-/{page}/{title}'.format(
language=language_code, page=page, title=title_id)
r = self.session.get(url)
raise_for_status(r)
# parse the results
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
for archive_soup in soup.select('div.list_element > article > div > div.f_left'):
# create archive
archive = LegendasTVArchive(archive_soup.a['href'].split('/')[2],
archive_soup.a.text,
'pack' in archive_soup.parent['class'],
'destaque' in archive_soup.parent['class'],
self.server_url + archive_soup.a['href'][1:])
# clean name of path separators and pack flags
clean_name = archive.name.replace('/', '-')
if archive.pack and clean_name.startswith('(p)'):
clean_name = clean_name[3:]
# guess from name
guess = guessit(clean_name, {'type': title_type})
# episode
if season and episodes:
# discard mismatches on episode in non-pack archives
# Guessit may return int for single episode or list for multi-episode
# Check if archive name has multiple episodes releases on it
if not archive.pack and 'episode' in guess:
wanted_episode = set(episodes)
archive_episode = set(ensure_list(guess['episode']))
if not wanted_episode.intersection(archive_episode):
logger.debug('Mismatched episode %s, discarding archive: %s', guess['episode'], clean_name)
continue
# extract text containing downloads, rating and timestamp
data_text = archive_soup.find('p', class_='data').text
# match downloads
archive.downloads = int(downloads_re.search(data_text).group('downloads'))
# match rating
match = rating_re.search(data_text)
if match:
archive.rating = int(match.group('rating'))
# match timestamp and validate it
time_data = {k: int(v) for k, v in timestamp_re.search(data_text).groupdict().items()}
archive.timestamp = pytz.timezone('America/Sao_Paulo').localize(datetime(**time_data))
if archive.timestamp > datetime.utcnow().replace(tzinfo=pytz.utc):
raise ProviderError('Archive timestamp is in the future')
# add archive
logger.info('Found archive for title %d and language %d at page %s: %s',
title_id, language_code, page, archive)
archives.append(archive)
# stop on last page
if soup.find('a', attrs={'class': 'load_more'}, string='carregar mais') is None:
break
# increment page count
page += 1
logger.debug('Found %d archives', len(archives))
return archives
def download_archive(self, archive):
"""Download an archive's :attr:`~LegendasTVArchive.content`.
:param archive: the archive to download :attr:`~LegendasTVArchive.content` of.
:type archive: :class:`LegendasTVArchive`
"""
logger.info('Downloading archive %s', archive.id)
r = self.session.get(self.server_url + 'downloadarquivo/{}'.format(archive.id))
raise_for_status(r)
# open the archive
archive_stream = io.BytesIO(r.content)
if is_rarfile(archive_stream):
logger.debug('Identified rar archive')
archive.content = RarFile(archive_stream)
elif is_zipfile(archive_stream):
logger.debug('Identified zip archive')
archive.content = ZipFile(archive_stream)
else:
raise ValueError('Not a valid archive')
def query(self, language, title, season=None, episodes=None, year=None):
# search for titles
titles = self.search_titles(title, season, year)
subtitles = []
# iterate over titles
for title_id, t in titles.items():
logger.info('Getting archives for title %d and language %d', title_id, language.legendastv)
archives = self.get_archives(title_id, language.legendastv, t['type'], season, episodes or [])
if not archives:
logger.info('No archives found for title %d and language %d', title_id, language.legendastv)
# iterate over title's archives
for a in archives:
# compute an expiration time based on the archive timestamp
expiration_time = (datetime.utcnow().replace(tzinfo=pytz.utc) - a.timestamp).total_seconds()
# attempt to get the releases from the cache
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
releases = region.get(cache_key, expiration_time=expiration_time)
# the releases are not in cache or cache is expired
if releases == NO_VALUE:
logger.info('Releases not found in cache')
# download archive
self.download_archive(a)
# extract the releases
releases = []
for name in a.content.namelist():
# discard the legendastv file
if name.startswith('Legendas.tv'):
continue
# discard hidden files
if os.path.split(name)[-1].startswith('.'):
continue
# discard non-subtitle files
if not name.lower().endswith(SUBTITLE_EXTENSIONS):
continue
releases.append(name)
# cache the releases
region.set(cache_key, releases)
# iterate over releases
for r in releases:
subtitle = self.subtitle_class(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
t.get('season'), a, r)
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
season = None
episodes = []
if isinstance(video, Episode):
titles = [video.series] + video.alternative_series
season = video.season
episodes = video.episodes
else:
titles = [video.title] + video.alternative_titles
for title in titles:
subtitles = [s for l in languages for s in
self.query(l, title, season=season, episodes=episodes, year=video.year)]
if subtitles:
return subtitles
return []
def download_subtitle(self, subtitle):
# download archive in case we previously hit the releases cache and didn't download it
if subtitle.archive.content is None:
self.download_archive(subtitle.archive)
# extract subtitle's content
subtitle.content = fix_line_ending(subtitle.archive.content.read(subtitle.name))
def raise_for_status(r):
# When site is under maintaince and http status code 200.
if 'Em breve estaremos de volta' in r.text:
raise ServiceUnavailable
else:
r.raise_for_status()

@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
import logging
from ..extensions import provider_manager, default_providers
from ..utils import hash_napiprojekt, hash_opensubtitles, hash_shooter, hash_thesubdb
logger = logging.getLogger(__name__)
hash_functions = {
'napiprojekt': hash_napiprojekt,
'opensubtitles': hash_opensubtitles,
'opensubtitlesvip': hash_opensubtitles,
'shooter': hash_shooter,
'thesubdb': hash_thesubdb
}
def refine(video, providers=None, languages=None, **kwargs):
"""Refine a video computing required hashes for the given providers.
The following :class:`~subliminal.video.Video` attribute can be found:
* :attr:`~subliminal.video.Video.hashes`
"""
if video.size <= 10485760:
logger.warning('Size is lower than 10MB: hashes not computed')
return
logger.debug('Computing hashes for %r', video.name)
for name in providers or default_providers:
provider = provider_manager[name].plugin
if name not in hash_functions:
continue
if not provider.check_types(video):
continue
if languages and not provider.check_languages(languages):
continue
video.hashes[name] = hash_functions[name](video.name)
logger.debug('Computed hashes %r', video.hashes)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save