Improved readability and maintenance by splitting get_subtitle.py into multiple files

pull/1664/head
morpheus65535 3 years ago committed by GitHub
parent c83d661fb1
commit 722014d822
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -7,7 +7,7 @@ import os
import logging import logging
import codecs import codecs
from pyga.requests import Event, Page, Tracker, Session, Visitor, Config from pyga.requests import Event, Tracker, Session, Visitor, Config
from pyga.entities import CustomVariable from pyga.entities import CustomVariable
from get_args import args from get_args import args
@ -39,7 +39,7 @@ def track_event(category=None, action=None, label=None):
else: else:
visitor = Visitor() visitor = Visitor()
visitor.unique_id = random.randint(0, 0x7fffffff) visitor.unique_id = random.randint(0, 0x7fffffff)
except: except Exception:
visitor = Visitor() visitor = Visitor()
visitor.unique_id = random.randint(0, 0x7fffffff) visitor.unique_id = random.randint(0, 0x7fffffff)
@ -61,7 +61,7 @@ def track_event(category=None, action=None, label=None):
try: try:
tracker.track_event(event, session, visitor) tracker.track_event(event, session, visitor)
except: except Exception:
logging.debug("BAZARR unable to track event.") logging.debug("BAZARR unable to track event.")
pass pass
else: else:

@ -16,7 +16,7 @@ from ..utils import authenticate
class Badges(Resource): class Badges(Resource):
@authenticate @authenticate
def get(self): def get(self):
episodes_conditions = [(TableEpisodes.missing_subtitles is not None), episodes_conditions = [(TableEpisodes.missing_subtitles.is_null(False)),
(TableEpisodes.missing_subtitles != '[]')] (TableEpisodes.missing_subtitles != '[]')]
episodes_conditions += get_exclusion_clause('series') episodes_conditions += get_exclusion_clause('series')
missing_episodes = TableEpisodes.select(TableShows.tags, missing_episodes = TableEpisodes.select(TableShows.tags,
@ -26,7 +26,7 @@ class Badges(Resource):
.where(reduce(operator.and_, episodes_conditions))\ .where(reduce(operator.and_, episodes_conditions))\
.count() .count()
movies_conditions = [(TableMovies.missing_subtitles is not None), movies_conditions = [(TableMovies.missing_subtitles.is_null(False)),
(TableMovies.missing_subtitles != '[]')] (TableMovies.missing_subtitles != '[]')]
movies_conditions += get_exclusion_clause('movie') movies_conditions += get_exclusion_clause('movie')
missing_movies = TableMovies.select(TableMovies.tags, missing_movies = TableMovies.select(TableMovies.tags,

@ -10,7 +10,7 @@ from database import TableEpisodes, TableShows, TableBlacklist
from ..utils import authenticate, postprocessEpisode from ..utils import authenticate, postprocessEpisode
from utils import blacklist_log, delete_subtitles, blacklist_delete_all, blacklist_delete from utils import blacklist_log, delete_subtitles, blacklist_delete_all, blacklist_delete
from helper import path_mappings from helper import path_mappings
from get_subtitle import episode_download_subtitles from get_subtitle.mass_download import episode_download_subtitles
from event_handler import event_stream from event_handler import event_stream

@ -9,8 +9,8 @@ from subliminal_patch.core import SUBTITLE_EXTENSIONS
from database import TableEpisodes, get_audio_profile_languages, get_profile_id from database import TableEpisodes, get_audio_profile_languages, get_profile_id
from ..utils import authenticate from ..utils import authenticate
from helper import path_mappings from helper import path_mappings
from get_providers import get_providers, get_providers_auth from get_subtitle.upload import manual_upload_subtitle
from get_subtitle import generate_subtitles, manual_upload_subtitle from get_subtitle.download import generate_subtitles
from utils import history_log, delete_subtitles from utils import history_log, delete_subtitles
from notifier import send_notifications from notifier import send_notifications
from list_subtitles import store_subtitles from list_subtitles import store_subtitles
@ -36,9 +36,7 @@ class EpisodesSubtitles(Resource):
title = episodeInfo['title'] title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path']) episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] sceneName = episodeInfo['scene_name'] or "None"
audio_language = episodeInfo['audio_language']
if sceneName is None: sceneName = "None"
language = request.form.get('language') language = request.form.get('language')
hi = request.form.get('hi').capitalize() hi = request.form.get('hi').capitalize()
@ -94,9 +92,8 @@ class EpisodesSubtitles(Resource):
title = episodeInfo['title'] title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path']) episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] sceneName = episodeInfo['scene_name'] or "None"
audio_language = episodeInfo['audio_language'] audio_language = episodeInfo['audio_language']
if sceneName is None: sceneName = "None"
language = request.form.get('language') language = request.form.get('language')
forced = True if request.form.get('forced') == 'true' else False forced = True if request.form.get('forced') == 'true' else False

@ -37,7 +37,7 @@ class EpisodesHistory(Resource):
upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)), upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)),
(TableHistory.timestamp > minimum_timestamp), (TableHistory.timestamp > minimum_timestamp),
(TableHistory.score is not None)] (TableHistory.score.is_null(False))]
upgradable_episodes_conditions += get_exclusion_clause('series') upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path, upgradable_episodes = TableHistory.select(TableHistory.video_path,
fn.MAX(TableHistory.timestamp).alias('timestamp'), fn.MAX(TableHistory.timestamp).alias('timestamp'),
@ -61,7 +61,7 @@ class EpisodesHistory(Resource):
if int(upgradable_episode['score']) < 360: if int(upgradable_episode['score']) < 360:
upgradable_episodes_not_perfect.append(upgradable_episode) upgradable_episodes_not_perfect.append(upgradable_episode)
query_conditions = [(TableEpisodes.title is not None)] query_conditions = [(TableEpisodes.title.is_null(False))]
if episodeid: if episodeid:
query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid)) query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid))
query_condition = reduce(operator.and_, query_conditions) query_condition = reduce(operator.and_, query_conditions)
@ -100,7 +100,7 @@ class EpisodesHistory(Resource):
item.update({"upgradable": False}) item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored']), "tags": str(item['tags']), "monitored": str(item['monitored']),
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: "seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: # noqa: E129
if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])): if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])):
item.update({"upgradable": True}) item.update({"upgradable": True})
@ -128,6 +128,6 @@ class EpisodesHistory(Resource):
count = TableHistory.select()\ count = TableHistory.select()\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(TableEpisodes.title is not None).count() .where(TableEpisodes.title.is_null(False)).count()
return jsonify(data=episode_history, total=count) return jsonify(data=episode_history, total=count)

@ -10,7 +10,7 @@ from database import TableMovies, TableBlacklistMovie
from ..utils import authenticate, postprocessMovie from ..utils import authenticate, postprocessMovie
from utils import blacklist_log_movie, delete_subtitles, blacklist_delete_all_movie, blacklist_delete_movie from utils import blacklist_log_movie, delete_subtitles, blacklist_delete_all_movie, blacklist_delete_movie
from helper import path_mappings from helper import path_mappings
from get_subtitle import movies_download_subtitles from get_subtitle.mass_download import movies_download_subtitles
from event_handler import event_stream from event_handler import event_stream

@ -38,7 +38,7 @@ class MoviesHistory(Resource):
upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)), upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)),
(TableHistoryMovie.timestamp > minimum_timestamp), (TableHistoryMovie.timestamp > minimum_timestamp),
(TableHistoryMovie.score is not None)] (TableHistoryMovie.score.is_null(False))]
upgradable_movies_conditions += get_exclusion_clause('movie') upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path, upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'), fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
@ -61,7 +61,7 @@ class MoviesHistory(Resource):
if int(upgradable_movie['score']) < 120: if int(upgradable_movie['score']) < 120:
upgradable_movies_not_perfect.append(upgradable_movie) upgradable_movies_not_perfect.append(upgradable_movie)
query_conditions = [(TableMovies.title is not None)] query_conditions = [(TableMovies.title.is_null(False))]
if radarrid: if radarrid:
query_conditions.append((TableMovies.radarrId == radarrid)) query_conditions.append((TableMovies.radarrId == radarrid))
query_condition = reduce(operator.and_, query_conditions) query_condition = reduce(operator.and_, query_conditions)
@ -95,7 +95,7 @@ class MoviesHistory(Resource):
# Mark movies as upgradable or not # Mark movies as upgradable or not
item.update({"upgradable": False}) item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: "tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])): if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])):
item.update({"upgradable": True}) item.update({"upgradable": True})
@ -117,13 +117,13 @@ class MoviesHistory(Resource):
if item['action'] not in [0, 4, 5]: if item['action'] not in [0, 4, 5]:
for blacklisted_item in blacklist_db: for blacklisted_item in blacklist_db:
if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[ if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[
'subs_id']: 'subs_id']: # noqa: E125
item.update({"blacklisted": True}) item.update({"blacklisted": True})
break break
count = TableHistoryMovie.select()\ count = TableHistoryMovie.select()\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(TableMovies.title is not None)\ .where(TableMovies.title.is_null(False))\
.count() .count()
return jsonify(data=movie_history, total=count) return jsonify(data=movie_history, total=count)

@ -7,7 +7,8 @@ from database import TableMovies
from ..utils import authenticate, postprocessMovie, None_Keys from ..utils import authenticate, postprocessMovie, None_Keys
from list_subtitles import list_missing_subtitles_movies, movies_scan_subtitles from list_subtitles import list_missing_subtitles_movies, movies_scan_subtitles
from event_handler import event_stream from event_handler import event_stream
from get_subtitle import movies_download_subtitles, wanted_search_missing_subtitles_movies from get_subtitle.wanted import wanted_search_missing_subtitles_movies
from get_subtitle.mass_download import movies_download_subtitles
class Movies(Resource): class Movies(Resource):

@ -9,8 +9,8 @@ from subliminal_patch.core import SUBTITLE_EXTENSIONS
from database import TableMovies, get_audio_profile_languages, get_profile_id from database import TableMovies, get_audio_profile_languages, get_profile_id
from ..utils import authenticate from ..utils import authenticate
from helper import path_mappings from helper import path_mappings
from get_providers import get_providers, get_providers_auth from get_subtitle.upload import manual_upload_subtitle
from get_subtitle import manual_upload_subtitle, generate_subtitles from get_subtitle.download import generate_subtitles
from utils import history_log_movie, delete_subtitles from utils import history_log_movie, delete_subtitles
from notifier import send_notifications_movie from notifier import send_notifications_movie
from list_subtitles import store_subtitles_movie from list_subtitles import store_subtitles_movie
@ -36,8 +36,7 @@ class MoviesSubtitles(Resource):
.get() .get()
moviePath = path_mappings.path_replace_movie(movieInfo['path']) moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] sceneName = movieInfo['sceneName'] or 'None'
if sceneName is None: sceneName = 'None'
title = movieInfo['title'] title = movieInfo['title']
audio_language = movieInfo['audio_language'] audio_language = movieInfo['audio_language']
@ -46,9 +45,6 @@ class MoviesSubtitles(Resource):
hi = request.form.get('hi').capitalize() hi = request.form.get('hi').capitalize()
forced = request.form.get('forced').capitalize() forced = request.form.get('forced').capitalize()
providers_list = get_providers()
providers_auth = get_providers_auth()
audio_language_list = get_audio_profile_languages(movie_id=radarrId) audio_language_list = get_audio_profile_languages(movie_id=radarrId)
if len(audio_language_list) > 0: if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name'] audio_language = audio_language_list[0]['name']
@ -97,8 +93,7 @@ class MoviesSubtitles(Resource):
.get() .get()
moviePath = path_mappings.path_replace_movie(movieInfo['path']) moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] sceneName = movieInfo['sceneName'] or 'None'
if sceneName is None: sceneName = 'None'
title = movieInfo['title'] title = movieInfo['title']
audioLanguage = movieInfo['audio_language'] audioLanguage = movieInfo['audio_language']

@ -15,10 +15,10 @@ class Providers(Resource):
history = request.args.get('history') history = request.args.get('history')
if history and history not in False_Keys: if history and history not in False_Keys:
providers = list(TableHistory.select(TableHistory.provider) providers = list(TableHistory.select(TableHistory.provider)
.where(TableHistory.provider != None and TableHistory.provider != "manual") .where(TableHistory.provider is not None and TableHistory.provider != "manual")
.dicts()) .dicts())
providers += list(TableHistoryMovie.select(TableHistoryMovie.provider) providers += list(TableHistoryMovie.select(TableHistoryMovie.provider)
.where(TableHistoryMovie.provider != None and TableHistoryMovie.provider != "manual") .where(TableHistoryMovie.provider is not None and TableHistoryMovie.provider != "manual")
.dicts()) .dicts())
providers_list = list(set([x['provider'] for x in providers])) providers_list = list(set([x['provider'] for x in providers]))
providers_dicts = [] providers_dicts = []

@ -6,7 +6,7 @@ from flask_restful import Resource
from database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id from database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id
from helper import path_mappings from helper import path_mappings
from get_providers import get_providers, get_providers_auth from get_providers import get_providers, get_providers_auth
from get_subtitle import manual_search, manual_download_subtitle from get_subtitle.manual import manual_search, manual_download_subtitle
from utils import history_log from utils import history_log
from config import settings from config import settings
from notifier import send_notifications from notifier import send_notifications
@ -31,9 +31,8 @@ class ProviderEpisodes(Resource):
title = episodeInfo['title'] title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path']) episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] sceneName = episodeInfo['scene_name'] or "None"
profileId = episodeInfo['profileId'] profileId = episodeInfo['profileId']
if sceneName is None: sceneName = "None"
providers_list = get_providers() providers_list = get_providers()
providers_auth = get_providers_auth() providers_auth = get_providers_auth()
@ -58,8 +57,7 @@ class ProviderEpisodes(Resource):
title = episodeInfo['title'] title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path']) episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] sceneName = episodeInfo['scene_name'] or "None"
if sceneName is None: sceneName = "None"
language = request.form.get('language') language = request.form.get('language')
hi = request.form.get('hi').capitalize() hi = request.form.get('hi').capitalize()

@ -6,7 +6,7 @@ from flask_restful import Resource
from database import TableMovies, get_audio_profile_languages, get_profile_id from database import TableMovies, get_audio_profile_languages, get_profile_id
from helper import path_mappings from helper import path_mappings
from get_providers import get_providers, get_providers_auth from get_providers import get_providers, get_providers_auth
from get_subtitle import manual_search, manual_download_subtitle from get_subtitle.manual import manual_search, manual_download_subtitle
from utils import history_log_movie from utils import history_log_movie
from config import settings from config import settings
from notifier import send_notifications_movie from notifier import send_notifications_movie
@ -30,9 +30,8 @@ class ProviderMovies(Resource):
title = movieInfo['title'] title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path']) moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] sceneName = movieInfo['sceneName'] or "None"
profileId = movieInfo['profileId'] profileId = movieInfo['profileId']
if sceneName is None: sceneName = "None"
providers_list = get_providers() providers_list = get_providers()
providers_auth = get_providers_auth() providers_auth = get_providers_auth()
@ -57,9 +56,7 @@ class ProviderMovies(Resource):
title = movieInfo['title'] title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path']) moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] sceneName = movieInfo['sceneName'] or "None"
if sceneName is None: sceneName = "None"
audio_language = movieInfo['audio_language']
language = request.form.get('language') language = request.form.get('language')
hi = request.form.get('hi').capitalize() hi = request.form.get('hi').capitalize()

@ -8,7 +8,8 @@ from functools import reduce
from database import get_exclusion_clause, TableEpisodes, TableShows from database import get_exclusion_clause, TableEpisodes, TableShows
from list_subtitles import list_missing_subtitles, series_scan_subtitles from list_subtitles import list_missing_subtitles, series_scan_subtitles
from get_subtitle import series_download_subtitles, wanted_search_missing_subtitles_series from get_subtitle.mass_download import series_download_subtitles
from get_subtitle.wanted import wanted_search_missing_subtitles_series
from ..utils import authenticate, postprocessSeries, None_Keys from ..utils import authenticate, postprocessSeries, None_Keys
from event_handler import event_stream from event_handler import event_stream

@ -11,7 +11,7 @@ from helper import path_mappings
from ..utils import authenticate from ..utils import authenticate
from subsyncer import subsync from subsyncer import subsync
from utils import translate_subtitles_file, subtitles_apply_mods from utils import translate_subtitles_file, subtitles_apply_mods
from get_subtitle import store_subtitles, store_subtitles_movie from list_subtitles import store_subtitles, store_subtitles_movie
from config import settings from config import settings

@ -3,6 +3,7 @@
from flask import request, jsonify from flask import request, jsonify
from flask_restful import Resource from flask_restful import Resource
from subliminal_patch.core import guessit from subliminal_patch.core import guessit
from ..utils import authenticate from ..utils import authenticate

@ -16,12 +16,12 @@ class Languages(Resource):
history = request.args.get('history') history = request.args.get('history')
if history and history not in False_Keys: if history and history not in False_Keys:
languages = list(TableHistory.select(TableHistory.language) languages = list(TableHistory.select(TableHistory.language)
.where(TableHistory.language != None) .where(TableHistory.language.is_null(False))
.dicts()) .dicts())
languages += list(TableHistoryMovie.select(TableHistoryMovie.language) languages += list(TableHistoryMovie.select(TableHistoryMovie.language)
.where(TableHistoryMovie.language != None) .where(TableHistoryMovie.language.is_null(False))
.dicts()) .dicts())
languages_list = list(set([l['language'].split(':')[0] for l in languages])) languages_list = list(set([lang['language'].split(':')[0] for lang in languages]))
languages_dicts = [] languages_dicts = []
for language in languages_list: for language in languages_list:
code2 = None code2 = None
@ -40,7 +40,7 @@ class Languages(Resource):
# Compatibility: Use false temporarily # Compatibility: Use false temporarily
'enabled': False 'enabled': False
}) })
except: except Exception:
continue continue
return jsonify(sorted(languages_dicts, key=itemgetter('name'))) return jsonify(sorted(languages_dicts, key=itemgetter('name')))

@ -37,5 +37,4 @@ class Searches(Resource):
movies = list(movies) movies = list(movies)
search_list += movies search_list += movies
return jsonify(search_list) return jsonify(search_list)

@ -10,7 +10,7 @@ from flask_restful import Resource
from bs4 import BeautifulSoup as bso from bs4 import BeautifulSoup as bso
from database import TableEpisodes, TableShows, TableMovies from database import TableEpisodes, TableShows, TableMovies
from get_subtitle import episode_download_subtitles, movies_download_subtitles from get_subtitle.mass_download import episode_download_subtitles, movies_download_subtitles
from ..utils import authenticate from ..utils import authenticate
@ -47,7 +47,7 @@ class WebHooksPlex(Resource):
headers={"User-Agent": os.environ["SZ_USER_AGENT"]}) headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
soup = bso(r.content, "html.parser") soup = bso(r.content, "html.parser")
series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2] series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2]
except: except Exception:
return '', 404 return '', 404
else: else:
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \ sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \
@ -63,7 +63,7 @@ class WebHooksPlex(Resource):
else: else:
try: try:
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0] movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
except: except Exception:
return '', 404 return '', 404
else: else:
radarrId = TableMovies.select(TableMovies.radarrId)\ radarrId = TableMovies.select(TableMovies.radarrId)\

@ -1,6 +1,6 @@
# coding=utf-8 # coding=utf-8
from flask import Flask, redirect, render_template, request, url_for from flask import Flask
from flask_socketio import SocketIO from flask_socketio import SocketIO
import os import os

@ -1,7 +1,6 @@
# coding=utf-8 # coding=utf-8
import os import os
import shutil
import re import re
import logging import logging
import json import json
@ -102,7 +101,7 @@ def download_release(url):
update_dir = os.path.join(args.config_dir, 'update') update_dir = os.path.join(args.config_dir, 'update')
try: try:
os.makedirs(update_dir, exist_ok=True) os.makedirs(update_dir, exist_ok=True)
except Exception as e: except Exception:
logging.debug('BAZARR unable to create update directory {}'.format(update_dir)) logging.debug('BAZARR unable to create update directory {}'.format(update_dir))
else: else:
logging.debug('BAZARR downloading release from Github: {}'.format(url)) logging.debug('BAZARR downloading release from Github: {}'.format(url))
@ -111,7 +110,7 @@ def download_release(url):
try: try:
with open(os.path.join(update_dir, 'bazarr.zip'), 'wb') as f: with open(os.path.join(update_dir, 'bazarr.zip'), 'wb') as f:
f.write(r.content) f.write(r.content)
except Exception as e: except Exception:
logging.exception('BAZARR unable to download new release and save it to disk') logging.exception('BAZARR unable to download new release and save it to disk')
else: else:
apply_update() apply_update()
@ -136,7 +135,7 @@ def apply_update():
if os.path.isdir(build_dir): if os.path.isdir(build_dir):
try: try:
rmtree(build_dir, ignore_errors=True) rmtree(build_dir, ignore_errors=True)
except Exception as e: except Exception:
logging.exception( logging.exception(
'BAZARR was unable to delete the previous build directory during upgrade process.') 'BAZARR was unable to delete the previous build directory during upgrade process.')
@ -149,7 +148,7 @@ def apply_update():
if not os.path.isdir(file_path): if not os.path.isdir(file_path):
with open(file_path, 'wb+') as f: with open(file_path, 'wb+') as f:
f.write(archive.read(file)) f.write(archive.read(file))
except Exception as e: except Exception:
logging.exception('BAZARR unable to unzip release') logging.exception('BAZARR unable to unzip release')
else: else:
is_updated = True is_updated = True
@ -157,7 +156,7 @@ def apply_update():
logging.debug('BAZARR successfully unzipped new release and will now try to delete the leftover ' logging.debug('BAZARR successfully unzipped new release and will now try to delete the leftover '
'files.') 'files.')
update_cleaner(zipfile=bazarr_zip, bazarr_dir=bazarr_dir, config_dir=args.config_dir) update_cleaner(zipfile=bazarr_zip, bazarr_dir=bazarr_dir, config_dir=args.config_dir)
except: except Exception:
logging.exception('BAZARR unable to cleanup leftover files after upgrade.') logging.exception('BAZARR unable to cleanup leftover files after upgrade.')
else: else:
logging.debug('BAZARR successfully deleted leftover files.') logging.debug('BAZARR successfully deleted leftover files.')
@ -242,5 +241,5 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
rmtree(filepath, ignore_errors=True) rmtree(filepath, ignore_errors=True)
else: else:
os.remove(filepath) os.remove(filepath)
except Exception as e: except Exception:
logging.debug('BAZARR upgrade leftover cleaner cannot delete {}'.format(filepath)) logging.debug('BAZARR upgrade leftover cleaner cannot delete {}'.format(filepath))

@ -244,19 +244,19 @@ settings.general.base_url = settings.general.base_url if settings.general.base_u
base_url = settings.general.base_url.rstrip('/') base_url = settings.general.base_url.rstrip('/')
ignore_keys = ['flask_secret_key', ignore_keys = ['flask_secret_key',
'page_size', 'page_size',
'page_size_manual_search', 'page_size_manual_search',
'throtteled_providers'] 'throtteled_providers']
raw_keys = ['movie_default_forced', 'serie_default_forced'] raw_keys = ['movie_default_forced', 'serie_default_forced']
array_keys = ['excluded_tags', array_keys = ['excluded_tags',
'exclude', 'exclude',
'subzero_mods', 'subzero_mods',
'excluded_series_types', 'excluded_series_types',
'enabled_providers', 'enabled_providers',
'path_mappings', 'path_mappings',
'path_mappings_movie'] 'path_mappings_movie']
str_keys = ['chmod'] str_keys = ['chmod']
@ -309,17 +309,15 @@ def get_settings():
value = int(value) value = int(value)
except ValueError: except ValueError:
pass pass
values_dict[key] = value values_dict[key] = value
result[sec] = values_dict result[sec] = values_dict
return result return result
def save_settings(settings_items): def save_settings(settings_items):
from database import database
configure_debug = False configure_debug = False
configure_captcha = False configure_captcha = False
update_schedule = False update_schedule = False
@ -341,7 +339,7 @@ def save_settings(settings_items):
for key, value in settings_items: for key, value in settings_items:
settings_keys = key.split('-') settings_keys = key.split('-')
# Make sure that text based form values aren't pass as list # Make sure that text based form values aren't pass as list
if isinstance(value, list) and len(value) == 1 and settings_keys[-1] not in array_keys: if isinstance(value, list) and len(value) == 1 and settings_keys[-1] not in array_keys:
value = value[0] value = value[0]
@ -349,7 +347,7 @@ def save_settings(settings_items):
value = None value = None
# Make sure empty language list are stored correctly # Make sure empty language list are stored correctly
if settings_keys[-1] in array_keys and value[0] in empty_values : if settings_keys[-1] in array_keys and value[0] in empty_values:
value = [] value = []
# Handle path mappings settings since they are array in array # Handle path mappings settings since they are array in array
@ -362,7 +360,7 @@ def save_settings(settings_items):
value = 'False' value = 'False'
if key == 'settings-auth-password': if key == 'settings-auth-password':
if value != settings.auth.password and value != None: if value != settings.auth.password and value is not None:
value = hashlib.md5(value.encode('utf-8')).hexdigest() value = hashlib.md5(value.encode('utf-8')).hexdigest()
if key == 'settings-general-debug': if key == 'settings-general-debug':
@ -481,14 +479,14 @@ def save_settings(settings_items):
from signalr_client import sonarr_signalr_client from signalr_client import sonarr_signalr_client
try: try:
sonarr_signalr_client.restart() sonarr_signalr_client.restart()
except: except Exception:
pass pass
if radarr_changed: if radarr_changed:
from signalr_client import radarr_signalr_client from signalr_client import radarr_signalr_client
try: try:
radarr_signalr_client.restart() radarr_signalr_client.restart()
except: except Exception:
pass pass
if update_path_map: if update_path_map:

@ -2,12 +2,10 @@ import os
import atexit import atexit
import json import json
import ast import ast
import logging
import time import time
from peewee import * from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField
from playhouse.sqliteq import SqliteQueueDatabase from playhouse.sqliteq import SqliteQueueDatabase
from playhouse.shortcuts import model_to_dict from playhouse.migrate import SqliteMigrator, migrate
from playhouse.migrate import *
from playhouse.sqlite_ext import RowIDField from playhouse.sqlite_ext import RowIDField
from helper import path_mappings from helper import path_mappings
@ -251,8 +249,8 @@ class TableCustomScoreProfiles(BaseModel):
class TableCustomScoreProfileConditions(BaseModel): class TableCustomScoreProfileConditions(BaseModel):
profile_id = ForeignKeyField(TableCustomScoreProfiles, to_field="id") profile_id = ForeignKeyField(TableCustomScoreProfiles, to_field="id")
type = TextField(null=True) # provider, uploader, regex, etc type = TextField(null=True) # provider, uploader, regex, etc
value = TextField(null=True) # opensubtitles, jane_doe, [a-z], etc value = TextField(null=True) # opensubtitles, jane_doe, [a-z], etc
required = BooleanField(default=False) required = BooleanField(default=False)
negate = BooleanField(default=False) negate = BooleanField(default=False)
@ -285,7 +283,7 @@ def init_db():
try: try:
if not System.select().count(): if not System.select().count():
System.insert({System.configured: '0', System.updated: '0'}).execute() System.insert({System.configured: '0', System.updated: '0'}).execute()
except: except Exception:
time.sleep(0.1) time.sleep(0.1)
else: else:
tables_created = True tables_created = True

@ -6,7 +6,6 @@ import pickle
from knowit import api from knowit import api
import enzyme import enzyme
from enzyme.exceptions import MalformedMKVError from enzyme.exceptions import MalformedMKVError
from enzyme.exceptions import MalformedMKVError
from custom_lang import CustomLanguage from custom_lang import CustomLanguage
from database import TableEpisodes, TableMovies from database import TableEpisodes, TableMovies
from helper import path_mappings from helper import path_mappings
@ -31,7 +30,7 @@ def embedded_subs_reader(file, file_size, episode_file_id=None, movie_file_id=No
subtitles_list = [] subtitles_list = []
if data["ffprobe"] and "subtitle" in data["ffprobe"]: if data["ffprobe"] and "subtitle" in data["ffprobe"]:
for detected_language in data["ffprobe"]["subtitle"]: for detected_language in data["ffprobe"]["subtitle"]:
if not "language" in detected_language: if "language" not in detected_language:
continue continue
# Avoid commentary subtitles # Avoid commentary subtitles
@ -93,7 +92,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
try: try:
# Unpickle ffprobe cache # Unpickle ffprobe cache
cached_value = pickle.loads(cache_key['ffprobe_cache']) cached_value = pickle.loads(cache_key['ffprobe_cache'])
except: except Exception:
pass pass
else: else:
# Check if file size and file id matches and if so, we return the cached value # Check if file size and file id matches and if so, we return the cached value

@ -10,7 +10,7 @@ parser = argparse.ArgumentParser()
def get_args(): def get_args():
parser.register('type', bool, strtobool) parser.register('type', bool, strtobool)
config_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data')) config_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
parser.add_argument('-c', '--config', default=config_dir, type=str, metavar="DIR", parser.add_argument('-c', '--config', default=config_dir, type=str, metavar="DIR",
dest="config_dir", help="Directory containing the configuration (default: %s)" % config_dir) dest="config_dir", help="Directory containing the configuration (default: %s)" % config_dir)
@ -26,7 +26,7 @@ def get_args():
help="Enable developer mode (default: False)") help="Enable developer mode (default: False)")
parser.add_argument('--no-tasks', default=False, type=bool, const=True, metavar="BOOL", nargs="?", parser.add_argument('--no-tasks', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Disable all tasks (default: False)") help="Disable all tasks (default: False)")
return parser.parse_args() return parser.parse_args()

@ -5,11 +5,11 @@ import requests
import logging import logging
from peewee import DoesNotExist from peewee import DoesNotExist
from database import get_exclusion_clause, TableEpisodes, TableShows from database import TableEpisodes, TableShows
from config import settings, url_sonarr from config import settings, url_sonarr
from helper import path_mappings from helper import path_mappings
from list_subtitles import store_subtitles, series_full_scan_subtitles from list_subtitles import store_subtitles, series_full_scan_subtitles
from get_subtitle import episode_download_subtitles from get_subtitle.mass_download import episode_download_subtitles
from event_handler import event_stream, show_progress, hide_progress from event_handler import event_stream, show_progress, hide_progress
from utils import get_sonarr_info from utils import get_sonarr_info
@ -24,7 +24,7 @@ def update_all_episodes():
def sync_episodes(series_id=None, send_event=True): def sync_episodes(series_id=None, send_event=True):
logging.debug('BAZARR Starting episodes sync from Sonarr.') logging.debug('BAZARR Starting episodes sync from Sonarr.')
apikey_sonarr = settings.sonarr.apikey apikey_sonarr = settings.sonarr.apikey
# Get current episodes id in DB # Get current episodes id in DB
current_episodes_db = TableEpisodes.select(TableEpisodes.sonarrEpisodeId, current_episodes_db = TableEpisodes.select(TableEpisodes.sonarrEpisodeId,
TableEpisodes.path, TableEpisodes.path,
@ -38,7 +38,7 @@ def sync_episodes(series_id=None, send_event=True):
episodes_to_update = [] episodes_to_update = []
episodes_to_add = [] episodes_to_add = []
altered_episodes = [] altered_episodes = []
# Get sonarrId for each series from database # Get sonarrId for each series from database
seriesIdList = get_series_from_sonarr_api(series_id=series_id, url=url_sonarr(), apikey_sonarr=apikey_sonarr,) seriesIdList = get_series_from_sonarr_api(series_id=series_id, url=url_sonarr(), apikey_sonarr=apikey_sonarr,)
@ -299,11 +299,11 @@ def episodeParser(episode):
try: try:
video_format, video_resolution = episode['episodeFile']['quality']['quality']['name'].split('-') video_format, video_resolution = episode['episodeFile']['quality']['quality']['name'].split('-')
except: except Exception:
video_format = episode['episodeFile']['quality']['quality']['name'] video_format = episode['episodeFile']['quality']['quality']['name']
try: try:
video_resolution = str(episode['episodeFile']['quality']['quality']['resolution']) + 'p' video_resolution = str(episode['episodeFile']['quality']['quality']['resolution']) + 'p'
except: except Exception:
video_resolution = None video_resolution = None
return {'sonarrSeriesId': episode['seriesId'], return {'sonarrSeriesId': episode['seriesId'],

@ -3,8 +3,6 @@
import os import os
import requests import requests
import logging import logging
import operator
from functools import reduce
from peewee import DoesNotExist from peewee import DoesNotExist
from config import settings, url_radarr from config import settings, url_radarr
@ -13,8 +11,8 @@ from utils import get_radarr_info
from list_subtitles import store_subtitles_movie, movies_full_scan_subtitles from list_subtitles import store_subtitles_movie, movies_full_scan_subtitles
from get_rootfolder import check_radarr_rootfolder from get_rootfolder import check_radarr_rootfolder
from get_subtitle import movies_download_subtitles from get_subtitle.mass_download import movies_download_subtitles
from database import get_exclusion_clause, TableMovies from database import TableMovies
from event_handler import event_stream, show_progress, hide_progress from event_handler import event_stream, show_progress, hide_progress
from get_languages import language_from_alpha2 from get_languages import language_from_alpha2
@ -45,7 +43,7 @@ def update_movies(send_event=True):
else: else:
audio_profiles = get_profile_list() audio_profiles = get_profile_list()
tagsDict = get_tags() tagsDict = get_tags()
# Get movies data from radarr # Get movies data from radarr
movies = get_movies_from_radarr_api(url=url_radarr(), apikey_radarr=apikey_radarr) movies = get_movies_from_radarr_api(url=url_radarr(), apikey_radarr=apikey_radarr)
if not movies: if not movies:
@ -53,7 +51,7 @@ def update_movies(send_event=True):
else: else:
# Get current movies in DB # Get current movies in DB
current_movies_db = TableMovies.select(TableMovies.tmdbId, TableMovies.path, TableMovies.radarrId).dicts() current_movies_db = TableMovies.select(TableMovies.tmdbId, TableMovies.path, TableMovies.radarrId).dicts()
current_movies_db_list = [x['tmdbId'] for x in current_movies_db] current_movies_db_list = [x['tmdbId'] for x in current_movies_db]
current_movies_radarr = [] current_movies_radarr = []
@ -87,7 +85,7 @@ def update_movies(send_event=True):
tags_dict=tagsDict, tags_dict=tagsDict,
movie_default_profile=movie_default_profile, movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)) audio_profiles=audio_profiles))
if send_event: if send_event:
hide_progress(id='movies_progress') hide_progress(id='movies_progress')
@ -196,10 +194,10 @@ def update_one_movie(movie_id, action):
return return
else: else:
if action == 'updated' and existing_movie: if action == 'updated' and existing_movie:
movie = movieParser(movie_data, action='update', tags_dict=tagsDict, movie = movieParser(movie_data, action='update', tags_dict=tagsDict,
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles) movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
elif action == 'updated' and not existing_movie: elif action == 'updated' and not existing_movie:
movie = movieParser(movie_data, action='insert', tags_dict=tagsDict, movie = movieParser(movie_data, action='insert', tags_dict=tagsDict,
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles) movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
except Exception: except Exception:
logging.debug('BAZARR cannot get movie returned by SignalR feed from Radarr API.') logging.debug('BAZARR cannot get movie returned by SignalR feed from Radarr API.')
@ -253,11 +251,11 @@ def get_profile_list():
try: try:
profiles_json = requests.get(url_radarr_api_movies, timeout=60, verify=False, headers=headers) profiles_json = requests.get(url_radarr_api_movies, timeout=60, verify=False, headers=headers)
except requests.exceptions.ConnectionError as errc: except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get profiles from Radarr. Connection Error.") logging.exception("BAZARR Error trying to get profiles from Radarr. Connection Error.")
except requests.exceptions.Timeout as errt: except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get profiles from Radarr. Timeout Error.") logging.exception("BAZARR Error trying to get profiles from Radarr. Timeout Error.")
except requests.exceptions.RequestException as err: except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get profiles from Radarr.") logging.exception("BAZARR Error trying to get profiles from Radarr.")
else: else:
# Parsing data returned from radarr # Parsing data returned from radarr
@ -381,16 +379,16 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
try: try:
overview = str(movie['overview']) overview = str(movie['overview'])
except: except Exception:
overview = "" overview = ""
try: try:
poster_big = movie['images'][0]['url'] poster_big = movie['images'][0]['url']
poster = os.path.splitext(poster_big)[0] + '-500' + os.path.splitext(poster_big)[1] poster = os.path.splitext(poster_big)[0] + '-500' + os.path.splitext(poster_big)[1]
except: except Exception:
poster = "" poster = ""
try: try:
fanart = movie['images'][1]['url'] fanart = movie['images'][1]['url']
except: except Exception:
fanart = "" fanart = ""
if 'sceneName' in movie['movieFile']: if 'sceneName' in movie['movieFile']:
@ -413,44 +411,41 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
try: try:
format, resolution = movie['movieFile']['quality']['quality']['name'].split('-') format, resolution = movie['movieFile']['quality']['quality']['name'].split('-')
except: except Exception:
format = movie['movieFile']['quality']['quality']['name'] format = movie['movieFile']['quality']['quality']['name']
try: try:
resolution = str(movie['movieFile']['quality']['quality']['resolution']) + 'p' resolution = str(movie['movieFile']['quality']['quality']['resolution']) + 'p'
except: except Exception:
resolution = None resolution = None
if 'mediaInfo' in movie['movieFile']: if 'mediaInfo' in movie['movieFile']:
videoFormat = videoCodecID = videoProfile = videoCodecLibrary = None videoFormat = videoCodecID = videoCodecLibrary = None
if get_radarr_info.is_legacy(): if get_radarr_info.is_legacy():
if 'videoFormat' in movie['movieFile']['mediaInfo']: videoFormat = \ if 'videoFormat' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['videoFormat'] videoFormat = movie['movieFile']['mediaInfo']['videoFormat']
else: else:
if 'videoCodec' in movie['movieFile']['mediaInfo']: videoFormat = \ if 'videoCodec' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['videoCodec'] videoFormat = movie['movieFile']['mediaInfo']['videoCodec']
if 'videoCodecID' in movie['movieFile']['mediaInfo']: videoCodecID = \ if 'videoCodecID' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['videoCodecID'] videoCodecID = movie['movieFile']['mediaInfo']['videoCodecID']
if 'videoProfile' in movie['movieFile']['mediaInfo']: videoProfile = \ if 'videoCodecLibrary' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['videoProfile'] videoCodecLibrary = movie['movieFile']['mediaInfo']['videoCodecLibrary']
if 'videoCodecLibrary' in movie['movieFile']['mediaInfo']: videoCodecLibrary = \
movie['movieFile']['mediaInfo']['videoCodecLibrary']
videoCodec = RadarrFormatVideoCodec(videoFormat, videoCodecID, videoCodecLibrary) videoCodec = RadarrFormatVideoCodec(videoFormat, videoCodecID, videoCodecLibrary)
audioFormat = audioCodecID = audioProfile = audioAdditionalFeatures = None audioFormat = audioCodecID = audioProfile = audioAdditionalFeatures = None
if get_radarr_info.is_legacy(): if get_radarr_info.is_legacy():
if 'audioFormat' in movie['movieFile']['mediaInfo']: audioFormat = \ if 'audioFormat' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['audioFormat'] audioFormat = movie['movieFile']['mediaInfo']['audioFormat']
else: else:
if 'audioCodec' in movie['movieFile']['mediaInfo']: audioFormat = \ if 'audioCodec' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['audioCodec'] audioFormat = movie['movieFile']['mediaInfo']['audioCodec']
if 'audioCodecID' in movie['movieFile']['mediaInfo']: audioCodecID = \ if 'audioCodecID' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['audioCodecID'] audioCodecID = movie['movieFile']['mediaInfo']['audioCodecID']
if 'audioProfile' in movie['movieFile']['mediaInfo']: audioProfile = \ if 'audioProfile' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['audioProfile'] audioProfile = movie['movieFile']['mediaInfo']['audioProfile']
if 'audioAdditionalFeatures' in movie['movieFile']['mediaInfo']: audioAdditionalFeatures = \ if 'audioAdditionalFeatures' in movie['movieFile']['mediaInfo']:
movie['movieFile']['mediaInfo']['audioAdditionalFeatures'] audioAdditionalFeatures = movie['movieFile']['mediaInfo']['audioAdditionalFeatures']
audioCodec = RadarrFormatAudioCodec(audioFormat, audioCodecID, audioProfile, audioCodec = RadarrFormatAudioCodec(audioFormat, audioCodecID, audioProfile, audioAdditionalFeatures)
audioAdditionalFeatures)
else: else:
videoCodec = None videoCodec = None
audioCodec = None audioCodec = None
@ -478,27 +473,27 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']] tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
if action == 'update': if action == 'update':
return {'radarrId': int(movie["id"]), return {'radarrId': int(movie["id"]),
'title': movie["title"], 'title': movie["title"],
'path': movie["path"] + separator + movie['movieFile']['relativePath'], 'path': movie["path"] + separator + movie['movieFile']['relativePath'],
'tmdbId': str(movie["tmdbId"]), 'tmdbId': str(movie["tmdbId"]),
'poster': poster, 'poster': poster,
'fanart': fanart, 'fanart': fanart,
'audio_language': str(audio_language), 'audio_language': str(audio_language),
'sceneName': sceneName, 'sceneName': sceneName,
'monitored': str(bool(movie['monitored'])), 'monitored': str(bool(movie['monitored'])),
'year': str(movie['year']), 'year': str(movie['year']),
'sortTitle': movie['sortTitle'], 'sortTitle': movie['sortTitle'],
'alternativeTitles': alternativeTitles, 'alternativeTitles': alternativeTitles,
'format': format, 'format': format,
'resolution': resolution, 'resolution': resolution,
'video_codec': videoCodec, 'video_codec': videoCodec,
'audio_codec': audioCodec, 'audio_codec': audioCodec,
'overview': overview, 'overview': overview,
'imdbId': imdbId, 'imdbId': imdbId,
'movie_file_id': int(movie['movieFile']['id']), 'movie_file_id': int(movie['movieFile']['id']),
'tags': str(tags), 'tags': str(tags),
'file_size': movie['movieFile']['size']} 'file_size': movie['movieFile']['size']}
else: else:
return {'radarrId': int(movie["id"]), return {'radarrId': int(movie["id"]),
'title': movie["title"], 'title': movie["title"],
@ -538,16 +533,16 @@ def get_movies_from_radarr_api(url, apikey_radarr, radarr_id=None):
if r.status_code == 404: if r.status_code == 404:
return return
r.raise_for_status() r.raise_for_status()
except requests.exceptions.HTTPError as errh: except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get movies from Radarr. Http error.") logging.exception("BAZARR Error trying to get movies from Radarr. Http error.")
return return
except requests.exceptions.ConnectionError as errc: except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get movies from Radarr. Connection Error.") logging.exception("BAZARR Error trying to get movies from Radarr. Connection Error.")
return return
except requests.exceptions.Timeout as errt: except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get movies from Radarr. Timeout Error.") logging.exception("BAZARR Error trying to get movies from Radarr. Timeout Error.")
return return
except requests.exceptions.RequestException as err: except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get movies from Radarr.") logging.exception("BAZARR Error trying to get movies from Radarr.")
return return
else: else:

@ -30,8 +30,10 @@ def time_until_end_of_day(dt=None):
tomorrow = dt + datetime.timedelta(days=1) tomorrow = dt + datetime.timedelta(days=1)
return datetime.datetime.combine(tomorrow, datetime.time.min) - dt return datetime.datetime.combine(tomorrow, datetime.time.min) - dt
# Titulky resets its download limits at the start of a new day from its perspective - the Europe/Prague timezone # Titulky resets its download limits at the start of a new day from its perspective - the Europe/Prague timezone
titulky_server_local_time = datetime.datetime.now(tz=pytz.timezone('Europe/Prague')).replace(tzinfo=None) # Needs to convert to offset-naive dt # Needs to convert to offset-naive dt
titulky_server_local_time = datetime.datetime.now(tz=pytz.timezone('Europe/Prague')).replace(tzinfo=None)
titulky_limit_reset_datetime = time_until_end_of_day(dt=titulky_server_local_time) titulky_limit_reset_datetime = time_until_end_of_day(dt=titulky_server_local_time)
hours_until_end_of_day = time_until_end_of_day().seconds // 3600 + 1 hours_until_end_of_day = time_until_end_of_day().seconds // 3600 + 1
@ -42,41 +44,41 @@ VALID_COUNT_EXCEPTIONS = ('TooManyRequests', 'ServiceUnavailable', 'APIThrottled
requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, socket.timeout) requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, socket.timeout)
PROVIDER_THROTTLE_MAP = { PROVIDER_THROTTLE_MAP = {
"default" : { "default": {
TooManyRequests : (datetime.timedelta(hours=1), "1 hour"), TooManyRequests: (datetime.timedelta(hours=1), "1 hour"),
DownloadLimitExceeded : (datetime.timedelta(hours=3), "3 hours"), DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"),
ServiceUnavailable : (datetime.timedelta(minutes=20), "20 minutes"), ServiceUnavailable: (datetime.timedelta(minutes=20), "20 minutes"),
APIThrottled : (datetime.timedelta(minutes=10), "10 minutes"), APIThrottled: (datetime.timedelta(minutes=10), "10 minutes"),
ParseResponseError : (datetime.timedelta(hours=6), "6 hours"), ParseResponseError: (datetime.timedelta(hours=6), "6 hours"),
requests.exceptions.Timeout : (datetime.timedelta(hours=1), "1 hour"), requests.exceptions.Timeout: (datetime.timedelta(hours=1), "1 hour"),
socket.timeout : (datetime.timedelta(hours=1), "1 hour"), socket.timeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ConnectTimeout: (datetime.timedelta(hours=1), "1 hour"), requests.exceptions.ConnectTimeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ReadTimeout : (datetime.timedelta(hours=1), "1 hour"), requests.exceptions.ReadTimeout: (datetime.timedelta(hours=1), "1 hour"),
}, },
"opensubtitles" : { "opensubtitles": {
TooManyRequests : (datetime.timedelta(hours=3), "3 hours"), TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"), DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"),
DownloadLimitReached : (datetime.timedelta(hours=6), "6 hours"), DownloadLimitReached: (datetime.timedelta(hours=6), "6 hours"),
APIThrottled : (datetime.timedelta(seconds=15), "15 seconds"), APIThrottled: (datetime.timedelta(seconds=15), "15 seconds"),
}, },
"opensubtitlescom": { "opensubtitlescom": {
TooManyRequests : (datetime.timedelta(minutes=1), "1 minute"), TooManyRequests: (datetime.timedelta(minutes=1), "1 minute"),
DownloadLimitExceeded: (datetime.timedelta(hours=24), "24 hours"), DownloadLimitExceeded: (datetime.timedelta(hours=24), "24 hours"),
}, },
"addic7ed" : { "addic7ed": {
DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"), DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"),
TooManyRequests : (datetime.timedelta(minutes=5), "5 minutes"), TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
IPAddressBlocked : (datetime.timedelta(hours=1), "1 hours"), IPAddressBlocked: (datetime.timedelta(hours=1), "1 hours"),
}, },
"titulky" : { "titulky": {
DownloadLimitExceeded: (titulky_limit_reset_datetime, f"{titulky_limit_reset_datetime.seconds // 3600 + 1} hours") DownloadLimitExceeded: (titulky_limit_reset_datetime, f"{titulky_limit_reset_datetime.seconds // 3600 + 1} hours")
}, },
"legendasdivx" : { "legendasdivx": {
TooManyRequests : (datetime.timedelta(hours=3), "3 hours"), TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
DownloadLimitExceeded: ( DownloadLimitExceeded: (
datetime.timedelta(hours=hours_until_end_of_day), "{} hours".format(str(hours_until_end_of_day))), datetime.timedelta(hours=hours_until_end_of_day), "{} hours".format(str(hours_until_end_of_day))),
IPAddressBlocked : ( IPAddressBlocked: (
datetime.timedelta(hours=hours_until_end_of_day), "{} hours".format(str(hours_until_end_of_day))), datetime.timedelta(hours=hours_until_end_of_day), "{} hours".format(str(hours_until_end_of_day))),
} }
} }
@ -104,7 +106,7 @@ def get_providers():
now = datetime.datetime.now() now = datetime.datetime.now()
if now < until: if now < until:
logging.debug("Not using %s until %s, because of: %s", provider, logging.debug("Not using %s until %s, because of: %s", provider,
until.strftime("%y/%m/%d %H:%M"), reason) until.strftime("%y/%m/%d %H:%M"), reason)
providers_list.remove(provider) providers_list.remove(provider)
else: else:
logging.info("Using %s again after %s, (disabled because: %s)", provider, throttle_desc, reason) logging.info("Using %s again after %s, (disabled because: %s)", provider, throttle_desc, reason)
@ -123,22 +125,22 @@ def get_providers():
def get_providers_auth(): def get_providers_auth():
return { return {
'addic7ed' : { 'addic7ed': {
'username': settings.addic7ed.username, 'username': settings.addic7ed.username,
'password': settings.addic7ed.password, 'password': settings.addic7ed.password,
'is_vip': settings.addic7ed.getboolean('vip'), 'is_vip': settings.addic7ed.getboolean('vip'),
}, },
'opensubtitles' : { 'opensubtitles': {
'username' : settings.opensubtitles.username, 'username': settings.opensubtitles.username,
'password' : settings.opensubtitles.password, 'password': settings.opensubtitles.password,
'use_tag_search': settings.opensubtitles.getboolean( 'use_tag_search': settings.opensubtitles.getboolean(
'use_tag_search' 'use_tag_search'
), ),
'only_foreign' : False, # fixme 'only_foreign': False, # fixme
'also_foreign' : False, # fixme 'also_foreign': False, # fixme
'is_vip' : settings.opensubtitles.getboolean('vip'), 'is_vip': settings.opensubtitles.getboolean('vip'),
'use_ssl' : settings.opensubtitles.getboolean('ssl'), 'use_ssl': settings.opensubtitles.getboolean('ssl'),
'timeout' : int(settings.opensubtitles.timeout) or 15, 'timeout': int(settings.opensubtitles.timeout) or 15,
'skip_wrong_fps': settings.opensubtitles.getboolean( 'skip_wrong_fps': settings.opensubtitles.getboolean(
'skip_wrong_fps' 'skip_wrong_fps'
), ),
@ -146,56 +148,56 @@ def get_providers_auth():
'opensubtitlescom': {'username': settings.opensubtitlescom.username, 'opensubtitlescom': {'username': settings.opensubtitlescom.username,
'password': settings.opensubtitlescom.password, 'password': settings.opensubtitlescom.password,
'use_hash': settings.opensubtitlescom.getboolean('use_hash'), 'use_hash': settings.opensubtitlescom.getboolean('use_hash'),
'api_key' : 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1' 'api_key': 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1'
}, },
'podnapisi' : { 'podnapisi': {
'only_foreign': False, # fixme 'only_foreign': False, # fixme
'also_foreign': False, # fixme 'also_foreign': False, # fixme
'verify_ssl': settings.podnapisi.getboolean('verify_ssl') 'verify_ssl': settings.podnapisi.getboolean('verify_ssl')
}, },
'subscene' : { 'subscene': {
'username' : settings.subscene.username, 'username': settings.subscene.username,
'password' : settings.subscene.password, 'password': settings.subscene.password,
'only_foreign': False, # fixme 'only_foreign': False, # fixme
}, },
'legendasdivx' : { 'legendasdivx': {
'username' : settings.legendasdivx.username, 'username': settings.legendasdivx.username,
'password' : settings.legendasdivx.password, 'password': settings.legendasdivx.password,
'skip_wrong_fps': settings.legendasdivx.getboolean( 'skip_wrong_fps': settings.legendasdivx.getboolean(
'skip_wrong_fps' 'skip_wrong_fps'
), ),
}, },
'legendastv' : { 'legendastv': {
'username': settings.legendastv.username, 'username': settings.legendastv.username,
'password': settings.legendastv.password, 'password': settings.legendastv.password,
'featured_only': settings.legendastv.getboolean( 'featured_only': settings.legendastv.getboolean(
'featured_only' 'featured_only'
), ),
}, },
'xsubs' : { 'xsubs': {
'username': settings.xsubs.username, 'username': settings.xsubs.username,
'password': settings.xsubs.password, 'password': settings.xsubs.password,
}, },
'assrt' : { 'assrt': {
'token': settings.assrt.token, 'token': settings.assrt.token,
}, },
'napisy24' : { 'napisy24': {
'username': settings.napisy24.username, 'username': settings.napisy24.username,
'password': settings.napisy24.password, 'password': settings.napisy24.password,
}, },
'betaseries' : {'token': settings.betaseries.token}, 'betaseries': {'token': settings.betaseries.token},
'titulky' : { 'titulky': {
'username': settings.titulky.username, 'username': settings.titulky.username,
'password': settings.titulky.password, 'password': settings.titulky.password,
'skip_wrong_fps': settings.titulky.getboolean('skip_wrong_fps'), 'skip_wrong_fps': settings.titulky.getboolean('skip_wrong_fps'),
'approved_only': settings.titulky.getboolean('approved_only'), 'approved_only': settings.titulky.getboolean('approved_only'),
'multithreading': settings.titulky.getboolean('multithreading'), 'multithreading': settings.titulky.getboolean('multithreading'),
}, },
'titlovi' : { 'titlovi': {
'username': settings.titlovi.username, 'username': settings.titlovi.username,
'password': settings.titlovi.password, 'password': settings.titlovi.password,
}, },
'ktuvit' : { 'ktuvit': {
'email': settings.ktuvit.email, 'email': settings.ktuvit.email,
'hashed_password': settings.ktuvit.hashed_password, 'hashed_password': settings.ktuvit.hashed_password,
}, },
@ -231,7 +233,7 @@ def provider_throttle(name, exception):
cls = valid_cls cls = valid_cls
throttle_data = PROVIDER_THROTTLE_MAP.get(name, PROVIDER_THROTTLE_MAP["default"]).get(cls, None) or \ throttle_data = PROVIDER_THROTTLE_MAP.get(name, PROVIDER_THROTTLE_MAP["default"]).get(cls, None) or \
PROVIDER_THROTTLE_MAP["default"].get(cls, None) PROVIDER_THROTTLE_MAP["default"].get(cls, None)
if throttle_data: if throttle_data:
throttle_delta, throttle_description = throttle_data throttle_delta, throttle_description = throttle_data
@ -282,7 +284,6 @@ def throttled_count(name):
def update_throttled_provider(): def update_throttled_provider():
changed = False
existing_providers = provider_registry.names() existing_providers = provider_registry.names()
providers_list = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers] providers_list = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
@ -290,7 +291,6 @@ def update_throttled_provider():
if provider not in providers_list: if provider not in providers_list:
del tp[provider] del tp[provider]
settings.general.throtteled_providers = str(tp) settings.general.throtteled_providers = str(tp)
changed = True
reason, until, throttle_desc = tp.get(provider, (None, None, None)) reason, until, throttle_desc = tp.get(provider, (None, None, None))
@ -341,7 +341,7 @@ def get_throttled_providers():
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as \ with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as \
handle: handle:
providers = eval(handle.read()) providers = eval(handle.read())
except: except Exception:
# set empty content in throttled_providers.dat # set empty content in throttled_providers.dat
logging.error("Invalid content in throttled_providers.dat. Resetting") logging.error("Invalid content in throttled_providers.dat. Resetting")
set_throttled_providers(providers) set_throttled_providers(providers)

@ -61,11 +61,11 @@ def update_series(send_event=True):
current_shows_sonarr.append(show['id']) current_shows_sonarr.append(show['id'])
if show['id'] in current_shows_db_list: if show['id'] in current_shows_db_list:
series_to_update.append(seriesParser(show, action='update', tags_dict=tagsDict, series_to_update.append(seriesParser(show, action='update', tags_dict=tagsDict,
serie_default_profile=serie_default_profile, serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)) audio_profiles=audio_profiles))
else: else:
series_to_add.append(seriesParser(show, action='insert', tags_dict=tagsDict, series_to_add.append(seriesParser(show, action='insert', tags_dict=tagsDict,
serie_default_profile=serie_default_profile, serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)) audio_profiles=audio_profiles))
@ -165,11 +165,11 @@ def update_one_series(series_id, action):
return return
else: else:
if action == 'updated' and existing_series: if action == 'updated' and existing_series:
series = seriesParser(series_data, action='update', tags_dict=tagsDict, series = seriesParser(series_data, action='update', tags_dict=tagsDict,
serie_default_profile=serie_default_profile, serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles) audio_profiles=audio_profiles)
elif action == 'updated' and not existing_series: elif action == 'updated' and not existing_series:
series = seriesParser(series_data, action='insert', tags_dict=tagsDict, series = seriesParser(series_data, action='insert', tags_dict=tagsDict,
serie_default_profile=serie_default_profile, serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles) audio_profiles=audio_profiles)
except Exception: except Exception:

File diff suppressed because it is too large Load Diff

@ -0,0 +1,151 @@
# coding=utf-8
# fmt: off
import ast
import logging
from datetime import datetime, timedelta
from config import settings
def is_search_active(desired_language, attempt_string):
"""
Function to test if it's time to search again after a previous attempt matching the desired language. For 3 weeks,
we search on a scheduled basis but after 3 weeks we start searching only once a week.
@param desired_language: 2 letters language to search for in attempts
@type desired_language: str
@param attempt_string: string representation of a list of lists from database column failedAttempts
@type attempt_string: str
@return: return True if it's time to search again and False if not
@rtype: bool
"""
if settings.general.getboolean('adaptive_searching'):
logging.debug("Adaptive searching is enable, we'll see if it's time to search again...")
try:
# let's try to get a list of lists from the string representation in database
attempts = ast.literal_eval(attempt_string)
if type(attempts) is not list:
# attempts should be a list if not, it's malformed or None
raise ValueError
except ValueError:
logging.debug("Adaptive searching: attempts is malformed. As a failsafe, search will run.")
return True
if not len(attempts):
logging.debug("Adaptive searching: attempts list is empty, search will run.")
return True
# get attempts matching the desired language and sort them by timestamp ascending
matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1])
if not len(matching_attempts):
logging.debug("Adaptive searching: there's no attempts matching desired language, search will run.")
return True
else:
logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}")
# try to get the initial and latest search timestamp from matching attempts
initial_search_attempt = matching_attempts[0]
latest_search_attempt = matching_attempts[-1]
# try to parse the timestamps for those attempts
try:
initial_search_timestamp = datetime.fromtimestamp(initial_search_attempt[1])
latest_search_timestamp = datetime.fromtimestamp(latest_search_attempt[1])
except (OverflowError, ValueError, OSError):
logging.debug("Adaptive searching: unable to parse initial and latest search timestamps, search will run.")
return True
else:
logging.debug(f"Adaptive searching: initial search date for {desired_language} is "
f"{initial_search_timestamp}")
logging.debug(f"Adaptive searching: latest search date for {desired_language} is {latest_search_timestamp}")
# defining basic calculation variables
now = datetime.now()
if settings.general.adaptive_searching_delay.endswith('d'):
extended_search_delay = timedelta(days=int(settings.general.adaptive_searching_delay[:1]))
elif settings.general.adaptive_searching_delay.endswith('w'):
extended_search_delay = timedelta(weeks=int(settings.general.adaptive_searching_delay[:1]))
else:
logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delay from config file: "
f"{settings.general.adaptive_searching_delay}")
return True
logging.debug(f"Adaptive searching: delay after initial search value: {extended_search_delay}")
if settings.general.adaptive_searching_delta.endswith('d'):
extended_search_delta = timedelta(days=int(settings.general.adaptive_searching_delta[:1]))
elif settings.general.adaptive_searching_delta.endswith('w'):
extended_search_delta = timedelta(weeks=int(settings.general.adaptive_searching_delta[:1]))
else:
logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delta from config file: "
f"{settings.general.adaptive_searching_delta}")
return True
logging.debug(f"Adaptive searching: delta between latest search and now value: {extended_search_delta}")
if initial_search_timestamp + extended_search_delay > now:
logging.debug(f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delay} since "
f"initial search, search will run.")
return True
else:
logging.debug(f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delay} since "
f"initial search, let's check if it's time to search again.")
if latest_search_timestamp + extended_search_delta <= now:
logging.debug(
f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delta} since "
f"latest search, search will run.")
return True
else:
logging.debug(
f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delta} since "
f"latest search, we're not ready to search yet.")
return False
logging.debug("adaptive searching is disabled, search will run.")
return True
def updateFailedAttempts(desired_language, attempt_string):
"""
Function to parse attempts and make sure we only keep initial and latest search timestamp for each language.
@param desired_language: 2 letters language to search for in attempts
@type desired_language: str
@param attempt_string: string representation of a list of lists from database column failedAttempts
@type attempt_string: str
@return: return a string representation of a list of lists like [str(language_code), str(attempts)]
@rtype: str
"""
try:
# let's try to get a list of lists from the string representation in database
attempts = ast.literal_eval(attempt_string)
logging.debug(f"Adaptive searching: current attempts value is {attempts}")
if type(attempts) is not list:
# attempts should be a list if not, it's malformed or None
raise ValueError
except ValueError:
logging.debug("Adaptive searching: failed to parse attempts value, we'll use an empty list.")
attempts = []
matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1])
logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}")
filtered_attempts = sorted([x for x in attempts if x[0] != desired_language], key=lambda x: x[1])
logging.debug(f"Adaptive searching: attempts not matching language {desired_language}: {filtered_attempts}")
# get the initial search from attempts if there's one
if len(matching_attempts):
filtered_attempts.append(matching_attempts[0])
# append current attempt with language and timestamp to attempts
filtered_attempts.append([desired_language, datetime.timestamp(datetime.now())])
updated_attempts = sorted(filtered_attempts, key=lambda x: x[0])
logging.debug(f"Adaptive searching: updated attempts that will be saved to database is {updated_attempts}")
return str(updated_attempts)

@ -0,0 +1,240 @@
# coding=utf-8
# fmt: off
import os
import sys
import logging
import subliminal
from subzero.language import Language
from subliminal_patch.core import save_subtitles
from subliminal_patch.core_persistent import download_best_subtitles
from subliminal_patch.score import compute_score
from config import settings, get_array_from
from helper import path_mappings, pp_replace, get_target_folder, force_unicode
from get_languages import alpha3_from_alpha2, alpha2_from_alpha3, alpha2_from_language, alpha3_from_language, \
language_from_alpha3
from database import TableEpisodes, TableMovies
from analytics import track_event
from score import movie_score, series_score
from utils import notify_sonarr, notify_radarr
from event_handler import event_stream
from .pool import update_pools, _get_pool
from .utils import get_video, _get_lang_obj, _get_scores, _get_download_code3
from .sync import sync_subtitles
from .post_processing import postprocessing
@update_pools
def generate_subtitles(path, languages, audio_language, sceneName, title, media_type,
forced_minimum_score=None, is_upgrade=False, profile_id=None):
if not languages:
return None
if settings.general.getboolean('utf8_encode'):
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
language_set = set()
if not isinstance(languages, (set, list)):
languages = [languages]
pool = _get_pool(media_type, profile_id)
providers = pool.providers
for language in languages:
lang, hi_item, forced_item = language
logging.debug('BAZARR Searching subtitles for this file: ' + path)
if hi_item == "True":
hi = "force HI"
else:
hi = "force non-HI"
# Fixme: This block should be updated elsewhere
if forced_item == "True":
pool.provider_configs['podnapisi']['only_foreign'] = True
pool.provider_configs['subscene']['only_foreign'] = True
pool.provider_configs['opensubtitles']['only_foreign'] = True
else:
pool.provider_configs['podnapisi']['only_foreign'] = False
pool.provider_configs['subscene']['only_foreign'] = False
pool.provider_configs['opensubtitles']['only_foreign'] = False
# Always use alpha2 in API Request
lang = alpha3_from_alpha2(lang)
lang_obj = _get_lang_obj(lang)
if forced_item == "True":
lang_obj = Language.rebuild(lang_obj, forced=True)
if hi == "force HI":
lang_obj = Language.rebuild(lang_obj, hi=True)
language_set.add(lang_obj)
minimum_score = settings.general.minimum_score
minimum_score_movie = settings.general.minimum_score_movie
use_postprocessing = settings.general.getboolean('use_postprocessing')
postprocessing_cmd = settings.general.postprocessing_cmd
single = settings.general.getboolean('single_language')
# todo:
"""
AsyncProviderPool:
implement:
blacklist=None,
pre_download_hook=None,
post_download_hook=None,
language_hook=None
"""
video = get_video(force_unicode(path), title, sceneName, providers=providers,
media_type=media_type)
if video:
handler = series_score if media_type == "series" else movie_score
min_score, max_score, scores = _get_scores(media_type, minimum_score_movie, minimum_score)
if providers:
if forced_minimum_score:
min_score = int(forced_minimum_score) + 1
downloaded_subtitles = download_best_subtitles({video}, language_set, pool,
int(min_score), hi,
compute_score=compute_score,
throttle_time=None, # fixme
score_obj=handler)
else:
downloaded_subtitles = None
logging.info("BAZARR All providers are throttled")
return None
subz_mods = get_array_from(settings.general.subzero_mods)
saved_any = False
if downloaded_subtitles:
for video, subtitles in downloaded_subtitles.items():
if not subtitles:
continue
for s in subtitles:
s.mods = subz_mods
try:
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
saved_subtitles = save_subtitles(video.original_path, subtitles, single=single,
tags=None, # fixme
directory=fld,
chmod=chmod,
# formats=("srt", "vtt")
path_decoder=force_unicode
)
except Exception as e:
logging.exception(
'BAZARR Error saving Subtitles file to disk for this file:' + path + ': ' + repr(e))
pass
else:
saved_any = True
for subtitle in saved_subtitles:
downloaded_provider = subtitle.provider_name
downloaded_language_code3 = _get_download_code3(subtitle)
downloaded_language = language_from_alpha3(downloaded_language_code3)
downloaded_language_code2 = alpha2_from_alpha3(downloaded_language_code3)
audio_language_code2 = alpha2_from_language(audio_language)
audio_language_code3 = alpha3_from_language(audio_language)
downloaded_path = subtitle.storage_path
subtitle_id = subtitle.id
if subtitle.language.hi:
modifier_string = " HI"
elif subtitle.language.forced:
modifier_string = " forced"
else:
modifier_string = ""
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
if is_upgrade:
action = "upgraded"
else:
action = "downloaded"
percent_score = round(subtitle.score * 100 / max_score, 2)
message = downloaded_language + modifier_string + " subtitles " + action + " from " + \
downloaded_provider + " with a score of " + str(percent_score) + "%."
if media_type == 'series':
episode_metadata = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path))\
.dicts()\
.get()
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
sonarr_series_id=episode_metadata['sonarrSeriesId'],
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
else:
movie_metadata = TableMovies.select(TableMovies.radarrId)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()\
.get()
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
radarr_id=movie_metadata['radarrId'])
if use_postprocessing is True:
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
downloaded_language_code2, downloaded_language_code3, audio_language,
audio_language_code2, audio_language_code3, subtitle.language.forced,
percent_score, subtitle_id, downloaded_provider, series_id, episode_id,
subtitle.language.hi)
if media_type == 'series':
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold')
pp_threshold = int(settings.general.postprocessing_threshold)
else:
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold_movie')
pp_threshold = int(settings.general.postprocessing_threshold_movie)
if not use_pp_threshold or (use_pp_threshold and percent_score < pp_threshold):
logging.debug("BAZARR Using post-processing command: {}".format(command))
postprocessing(command, path)
else:
logging.debug("BAZARR post-processing skipped because subtitles score isn't below this "
"threshold value: " + str(pp_threshold) + "%")
# fixme: support multiple languages at once
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)
reversed_subtitles_path = path_mappings.path_replace_reverse(downloaded_path)
notify_sonarr(episode_metadata['sonarrSeriesId'])
event_stream(type='series', action='update', payload=episode_metadata['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete',
payload=episode_metadata['sonarrEpisodeId'])
else:
reversed_path = path_mappings.path_replace_reverse_movie(path)
reversed_subtitles_path = path_mappings.path_replace_reverse_movie(downloaded_path)
notify_radarr(movie_metadata['radarrId'])
event_stream(type='movie-wanted', action='delete', payload=movie_metadata['radarrId'])
track_event(category=downloaded_provider, action=action, label=downloaded_language)
yield message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score, \
subtitle.language.forced, subtitle.id, reversed_subtitles_path, subtitle.language.hi
if not saved_any:
logging.debug('BAZARR No Subtitles were found for this file: ' + path)
return None
subliminal.region.backend.sync()
logging.debug('BAZARR Ended searching Subtitles for file: ' + path)

@ -0,0 +1,339 @@
# coding=utf-8
# fmt: off
import os
import sys
import logging
import pickle
import codecs
import subliminal
from subzero.language import Language
from subliminal_patch.core import save_subtitles
from subliminal_patch.core_persistent import list_all_subtitles, download_subtitles
from subliminal_patch.score import compute_score
from get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2, alpha2_from_language, \
alpha3_from_language
from config import settings, get_array_from
from helper import path_mappings, pp_replace, get_target_folder, force_unicode
from utils import notify_sonarr, notify_radarr
from database import get_profiles_list, TableEpisodes, TableMovies
from analytics import track_event
from score import movie_score, series_score
from .pool import update_pools, _get_pool, _init_pool
from .utils import get_video, _get_lang_obj, _get_scores, _get_download_code3
from .sync import sync_subtitles
from .post_processing import postprocessing
@update_pools
def manual_search(path, profile_id, providers, providers_auth, sceneName, title, media_type):
logging.debug('BAZARR Manually searching subtitles for this file: ' + path)
final_subtitles = []
initial_language_set = set()
language_set = set()
# where [3] is items list of dict(id, lang, forced, hi)
language_items = get_profiles_list(profile_id=int(profile_id))['items']
pool = _get_pool(media_type, profile_id)
for language in language_items:
forced = language['forced']
hi = language['hi']
language = language['language']
lang = alpha3_from_alpha2(language)
lang_obj = _get_lang_obj(lang)
if forced == "True":
lang_obj = Language.rebuild(lang_obj, forced=True)
pool.provider_configs['podnapisi']['also_foreign'] = True
pool.provider_configs['opensubtitles']['also_foreign'] = True
if hi == "True":
lang_obj = Language.rebuild(lang_obj, hi=True)
initial_language_set.add(lang_obj)
language_set = initial_language_set.copy()
for language in language_set.copy():
lang_obj_for_hi = language
if not language.forced and not language.hi:
lang_obj_hi = Language.rebuild(lang_obj_for_hi, hi=True)
elif not language.forced and language.hi:
lang_obj_hi = Language.rebuild(lang_obj_for_hi, hi=False)
else:
continue
language_set.add(lang_obj_hi)
minimum_score = settings.general.minimum_score
minimum_score_movie = settings.general.minimum_score_movie
if providers:
video = get_video(force_unicode(path), title, sceneName, providers=providers,
media_type=media_type)
else:
logging.info("BAZARR All providers are throttled")
return None
if video:
handler = series_score if media_type == "series" else movie_score
min_score, max_score, scores = _get_scores(media_type, minimum_score_movie, minimum_score)
try:
if providers:
subtitles = list_all_subtitles([video], language_set, pool)
if 'subscene' in providers:
s_pool = _init_pool("movie", profile_id, {"subscene"})
subscene_language_set = set()
for language in language_set:
if language.forced:
subscene_language_set.add(language)
if len(subscene_language_set):
s_pool.provider_configs['subscene'] = {}
s_pool.provider_configs['subscene']['only_foreign'] = True
subtitles_subscene = list_all_subtitles([video], subscene_language_set, s_pool)
s_pool.provider_configs['subscene']['only_foreign'] = False
subtitles[video] += subtitles_subscene[video]
else:
subtitles = []
logging.info("BAZARR All providers are throttled")
return None
except Exception:
logging.exception("BAZARR Error trying to get Subtitle list from provider for this file: " + path)
else:
subtitles_list = []
for s in subtitles[video]:
try:
matches = s.get_matches(video)
except AttributeError:
continue
# skip wrong season/episodes
if media_type == "series":
can_verify_series = True
if not s.hash_verifiable and "hash" in matches:
can_verify_series = False
if can_verify_series and not {"series", "season", "episode"}.issubset(matches):
logging.debug(u"BAZARR Skipping %s, because it doesn't match our series/episode", s)
continue
initial_hi = None
initial_hi_match = False
for language in initial_language_set:
if s.language.basename == language.basename and \
s.language.forced == language.forced and \
s.language.hi == language.hi:
initial_hi = language.hi
initial_hi_match = True
break
if not initial_hi_match:
initial_hi = None
score, score_without_hash = compute_score(matches, s, video, hearing_impaired=initial_hi, score_obj=handler)
if 'hash' not in matches:
not_matched = scores - matches
s.score = score_without_hash
else:
s.score = score
not_matched = set()
if s.hearing_impaired == initial_hi:
matches.add('hearing_impaired')
else:
not_matched.add('hearing_impaired')
releases = []
if hasattr(s, 'release_info'):
if s.release_info is not None:
for s_item in s.release_info.split(','):
if s_item.strip():
releases.append(s_item)
if s.uploader and s.uploader.strip():
s_uploader = s.uploader.strip()
else:
s_uploader = None
subtitles_list.append(
dict(score=round((score / max_score * 100), 2),
orig_score=score,
score_without_hash=score_without_hash,
forced=str(s.language.forced),
language=str(s.language.basename),
hearing_impaired=str(s.hearing_impaired),
provider=s.provider_name,
subtitle=codecs.encode(pickle.dumps(s.make_picklable()), "base64").decode(),
url=s.page_link,
matches=list(matches),
dont_matches=list(not_matched),
release_info=releases,
uploader=s_uploader))
final_subtitles = sorted(subtitles_list, key=lambda x: (x['orig_score'], x['score_without_hash']),
reverse=True)
logging.debug('BAZARR ' + str(len(final_subtitles)) + " Subtitles have been found for this file: " + path)
logging.debug('BAZARR Ended searching Subtitles for this file: ' + path)
subliminal.region.backend.sync()
return final_subtitles
@update_pools
def manual_download_subtitle(path, language, audio_language, hi, forced, subtitle, provider, providers_auth, sceneName,
title, media_type, profile_id):
logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path)
if settings.general.getboolean('utf8_encode'):
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
if hi == 'True':
subtitle.language.hi = True
else:
subtitle.language.hi = False
if forced == 'True':
subtitle.language.forced = True
else:
subtitle.language.forced = False
subtitle.mods = get_array_from(settings.general.subzero_mods)
use_postprocessing = settings.general.getboolean('use_postprocessing')
postprocessing_cmd = settings.general.postprocessing_cmd
single = settings.general.getboolean('single_language')
video = get_video(force_unicode(path), title, sceneName, providers={provider},
media_type=media_type)
if video:
min_score, max_score, scores = _get_scores(media_type)
try:
if provider:
download_subtitles([subtitle], _get_pool(media_type, profile_id))
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
else:
logging.info("BAZARR All providers are throttled")
return None
except Exception:
logging.exception('BAZARR Error downloading Subtitles for this file ' + path)
return None
else:
if not subtitle.is_valid():
logging.exception('BAZARR No valid Subtitles file found for this file: ' + path)
return
try:
score = round(subtitle.score / max_score * 100, 2)
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
saved_subtitles = save_subtitles(video.original_path, [subtitle], single=single,
tags=None, # fixme
directory=fld,
chmod=chmod,
# formats=("srt", "vtt")
path_decoder=force_unicode)
except Exception:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
return
else:
if saved_subtitles:
for saved_subtitle in saved_subtitles:
downloaded_provider = saved_subtitle.provider_name
downloaded_language_code3 = _get_download_code3(subtitle)
downloaded_language = language_from_alpha3(downloaded_language_code3)
downloaded_language_code2 = alpha2_from_alpha3(downloaded_language_code3)
audio_language_code2 = alpha2_from_language(audio_language)
audio_language_code3 = alpha3_from_language(audio_language)
downloaded_path = saved_subtitle.storage_path
subtitle_id = subtitle.id
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
if subtitle.language.hi:
modifier_string = " HI"
elif subtitle.language.forced:
modifier_string = " forced"
else:
modifier_string = ""
message = downloaded_language + modifier_string + " subtitles downloaded from " + \
downloaded_provider + " with a score of " + str(score) + "% using manual search."
if media_type == 'series':
episode_metadata = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path))\
.dicts()\
.get()
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=score,
sonarr_series_id=episode_metadata['sonarrSeriesId'],
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
else:
movie_metadata = TableMovies.select(TableMovies.radarrId)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()\
.get()
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=score, radarr_id=movie_metadata['radarrId'])
if use_postprocessing:
percent_score = round(subtitle.score * 100 / max_score, 2)
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
downloaded_language_code2, downloaded_language_code3, audio_language,
audio_language_code2, audio_language_code3, subtitle.language.forced,
percent_score, subtitle_id, downloaded_provider, series_id, episode_id,
subtitle.language.hi)
if media_type == 'series':
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold')
pp_threshold = settings.general.postprocessing_threshold
else:
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold_movie')
pp_threshold = settings.general.postprocessing_threshold_movie
if not use_pp_threshold or (use_pp_threshold and score < float(pp_threshold)):
logging.debug("BAZARR Using post-processing command: {}".format(command))
postprocessing(command, path)
else:
logging.debug("BAZARR post-processing skipped because subtitles score isn't below this "
"threshold value: " + pp_threshold + "%")
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)
reversed_subtitles_path = path_mappings.path_replace_reverse(downloaded_path)
notify_sonarr(episode_metadata['sonarrSeriesId'])
else:
reversed_path = path_mappings.path_replace_reverse_movie(path)
reversed_subtitles_path = path_mappings.path_replace_reverse_movie(downloaded_path)
notify_radarr(movie_metadata['radarrId'])
track_event(category=downloaded_provider, action="manually_downloaded",
label=downloaded_language)
return message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score, \
subtitle.language.forced, subtitle.id, reversed_subtitles_path, subtitle.language.hi
else:
logging.error(
"BAZARR Tried to manually download a Subtitles for file: " + path
+ " but we weren't able to do (probably throttled by " + str(subtitle.provider_name)
+ ". Please retry later or select a Subtitles from another provider.")
return None
subliminal.region.backend.sync()
logging.debug('BAZARR Ended manually downloading Subtitles for file: ' + path)

@ -0,0 +1,2 @@
from .movies import movies_download_subtitles # noqa: W0611
from .series import series_download_subtitles, episode_download_subtitles # noqa: W0611

@ -0,0 +1,104 @@
# coding=utf-8
# fmt: off
import ast
import logging
import operator
from functools import reduce
from helper import path_mappings
from list_subtitles import store_subtitles_movie
from utils import history_log_movie
from notifier import send_notifications_movie
from get_providers import get_providers
from database import get_exclusion_clause, get_audio_profile_languages, TableMovies
from event_handler import show_progress, hide_progress
from ..download import generate_subtitles
def movies_download_subtitles(no):
conditions = [(TableMovies.radarrId == no)]
conditions += get_exclusion_clause('movie')
movies = TableMovies.select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.audio_language,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.title,
TableMovies.tags,
TableMovies.monitored)\
.where(reduce(operator.and_, conditions))\
.dicts()
if not len(movies):
logging.debug("BAZARR no movie with that radarrId can be found in database:", str(no))
return
else:
movie = movies[0]
if ast.literal_eval(movie['missing_subtitles']):
count_movie = len(ast.literal_eval(movie['missing_subtitles']))
else:
count_movie = 0
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
providers_list = None
for i, language in enumerate(ast.literal_eval(movie['missing_subtitles'])):
providers_list = get_providers()
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
if providers_list:
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == movie['radarrId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
show_progress(id='movie_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name=movie['title'],
value=i,
count=count_movie)
if providers_list:
for result in generate_subtitles(path_mappings.path_replace_movie(movie['path']),
languages,
audio_language,
str(movie['sceneName']),
movie['title'],
'movie'):
if result:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
history_log_movie(1, no, message, path, language_code, provider, score, subs_id, subs_path)
send_notifications_movie(no, message)
else:
logging.info("BAZARR All providers are throttled")
hide_progress(id='movie_search_progress_{}'.format(no))

@ -0,0 +1,203 @@
# coding=utf-8
# fmt: off
import ast
import logging
import operator
from functools import reduce
from helper import path_mappings
from list_subtitles import store_subtitles
from utils import history_log
from notifier import send_notifications
from get_providers import get_providers
from database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes
from event_handler import show_progress, hide_progress
from ..download import generate_subtitles
def series_download_subtitles(no):
conditions = [(TableEpisodes.sonarrSeriesId == no),
(TableEpisodes.missing_subtitles != '[]')]
conditions += get_exclusion_clause('series')
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name,
TableShows.tags,
TableShows.seriesType,
TableEpisodes.audio_language,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle')) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, conditions)) \
.dicts()
if not episodes_details:
logging.debug("BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
"ignored because of monitored status, series type or series tags: {}".format(no))
return
count_episodes_details = len(episodes_details)
for i, episode in enumerate(episodes_details):
providers_list = get_providers()
if providers_list:
show_progress(id='series_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
value=i,
count=count_episodes_details)
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
if not languages:
continue
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
episode['title'], 'series'):
if result:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, no, episode['sonarrEpisodeId'], message, path, language_code, provider, score,
subs_id, subs_path)
send_notifications(no, episode['sonarrEpisodeId'], message)
else:
logging.info("BAZARR All providers are throttled")
break
hide_progress(id='series_search_progress_{}'.format(no))
def episode_download_subtitles(no, send_progress=False):
conditions = [(TableEpisodes.sonarrEpisodeId == no)]
conditions += get_exclusion_clause('series')
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name,
TableShows.tags,
TableShows.title,
TableShows.sonarrSeriesId,
TableEpisodes.audio_language,
TableShows.seriesType,
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.season,
TableEpisodes.episode) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, conditions)) \
.dicts()
if not episodes_details:
logging.debug("BAZARR no episode with that sonarrEpisodeId can be found in database:", str(no))
return
for episode in episodes_details:
providers_list = get_providers()
if providers_list:
if send_progress:
show_progress(id='episode_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
value=0,
count=1)
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
if not languages:
continue
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
episode['title'],
'series'):
if result:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
language_code, provider, score, subs_id, subs_path)
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
if send_progress:
hide_progress(id='episode_search_progress_{}'.format(no))
else:
logging.info("BAZARR All providers are throttled")
break

@ -0,0 +1,85 @@
# coding=utf-8
# fmt: off
import logging
import time
from inspect import getfullargspec
from utils import get_blacklist
from get_providers import get_providers, get_providers_auth, provider_throttle, provider_pool
from .utils import get_ban_list
# fmt: on
def _init_pool(media_type, profile_id=None, providers=None):
pool = provider_pool()
return pool(
providers=providers or get_providers(),
provider_configs=get_providers_auth(),
blacklist=get_blacklist(media_type),
throttle_callback=provider_throttle,
ban_list=get_ban_list(profile_id),
language_hook=None,
)
_pools = {}
def _get_pool(media_type, profile_id=None):
try:
return _pools[f'{media_type}_{profile_id or ""}']
except KeyError:
_update_pool(media_type, profile_id)
return _pools[f'{media_type}_{profile_id or ""}']
def _update_pool(media_type, profile_id=None):
pool_key = f'{media_type}_{profile_id or ""}'
logging.debug("BAZARR updating pool: %s", pool_key)
# Init a new pool if not present
if pool_key not in _pools:
logging.debug("BAZARR pool not initialized: %s. Initializing", pool_key)
_pools[pool_key] = _init_pool(media_type, profile_id)
pool = _pools[pool_key]
if pool is None:
return False
return pool.update(
get_providers(),
get_providers_auth(),
get_blacklist(media_type),
get_ban_list(profile_id),
)
def update_pools(f):
"""Decorator that ensures all pools are updated on each function run.
It will detect any config changes in Bazarr"""
def decorated(*args, **kwargs):
logging.debug("BAZARR updating pools: %s", _pools)
start = time.time()
args_spec = getfullargspec(f).args
try:
profile_id = args[args_spec.index("profile_id")]
except (IndexError, ValueError):
profile_id = None
updated = _update_pool(args[args_spec.index("media_type")], profile_id)
if updated:
logging.debug(
"BAZARR pools update elapsed time: %sms",
round((time.time() - start) * 1000, 2),
)
return f(*args, **kwargs)
return decorated

@ -0,0 +1,38 @@
# coding=utf-8
# fmt: off
import os
import logging
import subprocess
from locale import getpreferredencoding
def postprocessing(command, path):
try:
encoding = getpreferredencoding()
if os.name == 'nt':
codepage = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, encoding=getpreferredencoding())
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(':')[-1].strip()
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, encoding=encoding)
# wait for the process to terminate
out, err = process.communicate()
out = out.replace('\n', ' ').replace('\r', ' ')
except Exception as e:
logging.error('BAZARR Post-processing failed for file ' + path + ' : ' + repr(e))
else:
if out == "":
logging.info(
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
elif err:
logging.error(
'BAZARR Post-processing result for file ' + path + ' : ' + err.replace('\n', ' ').replace('\r', ' '))
else:
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)

@ -0,0 +1,2 @@
from .database import refine_from_db # noqa: W0611
from .ffprobe import refine_from_ffprobe # noqa: W0611

@ -0,0 +1,90 @@
# coding=utf-8
# fmt: off
import ast
import re
from subliminal import Episode, Movie
from helper import path_mappings
from database import TableShows, TableEpisodes, TableMovies
from ..utils import convert_to_guessit
def refine_from_db(path, video):
if isinstance(video, Episode):
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle'),
TableShows.year,
TableShows.tvdbId,
TableShows.alternateTitles,
TableEpisodes.format,
TableEpisodes.resolution,
TableEpisodes.video_codec,
TableEpisodes.audio_codec,
TableEpisodes.path,
TableShows.imdbId)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where((TableEpisodes.path == path_mappings.path_replace_reverse(path)))\
.dicts()
if len(data):
data = data[0]
video.series = re.sub(r'\s(\(\d\d\d\d\))', '', data['seriesTitle'])
video.season = int(data['season'])
video.episode = int(data['episode'])
video.title = data['episodeTitle']
# Commented out because Sonarr provided so much bad year
# if data['year']:
# if int(data['year']) > 0: video.year = int(data['year'])
video.series_tvdb_id = int(data['tvdbId'])
video.alternative_series = ast.literal_eval(data['alternateTitles'])
if data['imdbId'] and not video.series_imdb_id:
video.series_imdb_id = data['imdbId']
if not video.source:
video.source = convert_to_guessit('source', str(data['format']))
if not video.resolution:
video.resolution = str(data['resolution'])
if not video.video_codec:
if data['video_codec']:
video.video_codec = convert_to_guessit('video_codec', data['video_codec'])
if not video.audio_codec:
if data['audio_codec']:
video.audio_codec = convert_to_guessit('audio_codec', data['audio_codec'])
elif isinstance(video, Movie):
data = TableMovies.select(TableMovies.title,
TableMovies.year,
TableMovies.alternativeTitles,
TableMovies.format,
TableMovies.resolution,
TableMovies.video_codec,
TableMovies.audio_codec,
TableMovies.imdbId)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()
if len(data):
data = data[0]
video.title = re.sub(r'\s(\(\d\d\d\d\))', '', data['title'])
# Commented out because Radarr provided so much bad year
# if data['year']:
# if int(data['year']) > 0: video.year = int(data['year'])
if data['imdbId'] and not video.imdb_id:
video.imdb_id = data['imdbId']
video.alternative_titles = ast.literal_eval(data['alternativeTitles'])
if not video.source:
if data['format']:
video.source = convert_to_guessit('source', data['format'])
if not video.resolution:
if data['resolution']:
video.resolution = data['resolution']
if not video.video_codec:
if data['video_codec']:
video.video_codec = convert_to_guessit('video_codec', data['video_codec'])
if not video.audio_codec:
if data['audio_codec']:
video.audio_codec = convert_to_guessit('audio_codec', data['audio_codec'])
return video

@ -0,0 +1,67 @@
# coding=utf-8
# fmt: off
import logging
from subliminal import Movie
from helper import path_mappings
from database import TableEpisodes, TableMovies
from embedded_subs_reader import parse_video_metadata
def refine_from_ffprobe(path, video):
if isinstance(video, Movie):
file_id = TableMovies.select(TableMovies.movie_file_id, TableMovies.file_size)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()\
.get()
else:
file_id = TableEpisodes.select(TableEpisodes.episode_file_id, TableEpisodes.file_size)\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path))\
.dicts()\
.get()
if not isinstance(file_id, dict):
return video
if isinstance(video, Movie):
data = parse_video_metadata(file=path, file_size=file_id['file_size'],
movie_file_id=file_id['movie_file_id'])
else:
data = parse_video_metadata(file=path, file_size=file_id['file_size'],
episode_file_id=file_id['episode_file_id'])
if not data['ffprobe']:
logging.debug("No FFprobe available in cache for this file: {}".format(path))
return video
logging.debug('FFprobe found: %s', data['ffprobe'])
if 'video' not in data['ffprobe']:
logging.debug('BAZARR FFprobe was unable to find video tracks in the file!')
else:
if 'resolution' in data['ffprobe']['video'][0]:
if not video.resolution:
video.resolution = data['ffprobe']['video'][0]['resolution']
if 'codec' in data['ffprobe']['video'][0]:
if not video.video_codec:
video.video_codec = data['ffprobe']['video'][0]['codec']
if 'frame_rate' in data['ffprobe']['video'][0]:
if not video.fps:
if isinstance(data['ffprobe']['video'][0]['frame_rate'], float):
video.fps = data['ffprobe']['video'][0]['frame_rate']
else:
video.fps = data['ffprobe']['video'][0]['frame_rate'].magnitude
if 'audio' not in data['ffprobe']:
logging.debug('BAZARR FFprobe was unable to find audio tracks in the file!')
else:
if 'codec' in data['ffprobe']['audio'][0]:
if not video.audio_codec:
video.audio_codec = data['ffprobe']['audio'][0]['codec']
for track in data['ffprobe']['audio']:
if 'language' in track:
video.audio_languages.add(track['language'].alpha3)
return video

@ -0,0 +1,33 @@
# coding=utf-8
# fmt: off
import logging
from config import settings
from subsyncer import subsync
def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_score, sonarr_series_id=None,
sonarr_episode_id=None, radarr_id=None):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
elif not settings.subsync.getboolean('use_subsync'):
logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.')
else:
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
f'subtitles: {srt_path}.')
if media_type == 'series':
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
subsync_threshold = settings.subsync.subsync_threshold
else:
use_subsync_threshold = settings.subsync.getboolean('use_subsync_movie_threshold')
subsync_threshold = settings.subsync.subsync_movie_threshold
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang, media_type=media_type,
sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, radarr_id=radarr_id)
return True
else:
logging.debug("BAZARR subsync skipped because subtitles score isn't below this "
"threshold value: " + subsync_threshold + "%")
return False

@ -0,0 +1,250 @@
# coding=utf-8
# fmt: off
import os
import logging
import operator
from functools import reduce
from peewee import fn
from datetime import datetime, timedelta
from config import settings
from helper import path_mappings
from list_subtitles import store_subtitles, store_subtitles_movie
from utils import history_log, history_log_movie
from notifier import send_notifications, send_notifications_movie
from get_providers import get_providers
from database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes, TableMovies, \
TableHistory, TableHistoryMovie
from event_handler import show_progress, hide_progress
from .download import generate_subtitles
def upgrade_subtitles():
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime(1970, 1, 1)).total_seconds()
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 4, 6]
else:
query_actions = [1, 3]
if settings.general.getboolean('use_sonarr'):
upgradable_episodes_conditions = [(TableHistory.action << query_actions),
(TableHistory.timestamp > minimum_timestamp),
(TableHistory.score is not None)]
upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableShows.profileId,
TableEpisodes.audio_language,
TableEpisodes.scene_name,
TableEpisodes.title,
TableEpisodes.sonarrSeriesId,
TableHistory.action,
TableHistory.subtitles_path,
TableEpisodes.sonarrEpisodeId,
fn.MAX(TableHistory.timestamp).alias('timestamp'),
TableEpisodes.monitored,
TableEpisodes.season,
TableEpisodes.episode,
TableShows.title.alias('seriesTitle'),
TableShows.seriesType)\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(reduce(operator.and_, upgradable_episodes_conditions))\
.group_by(TableHistory.video_path, TableHistory.language)\
.dicts()
upgradable_episodes_not_perfect = []
for upgradable_episode in upgradable_episodes:
if upgradable_episode['timestamp'] > minimum_timestamp:
try:
int(upgradable_episode['score'])
except ValueError:
pass
else:
if int(upgradable_episode['score']) < 360 or (settings.general.getboolean('upgrade_manual') and
upgradable_episode['action'] in [2, 4, 6]):
upgradable_episodes_not_perfect.append(upgradable_episode)
episodes_to_upgrade = []
for episode in upgradable_episodes_not_perfect:
if os.path.exists(path_mappings.path_replace(episode['subtitles_path'])) and int(episode['score']) < 357:
episodes_to_upgrade.append(episode)
count_episode_to_upgrade = len(episodes_to_upgrade)
if settings.general.getboolean('use_radarr'):
upgradable_movies_conditions = [(TableHistoryMovie.action << query_actions),
(TableHistoryMovie.timestamp > minimum_timestamp),
(TableHistoryMovie.score is not None)]
upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
TableHistoryMovie.language,
TableHistoryMovie.score,
TableMovies.profileId,
TableHistoryMovie.action,
TableHistoryMovie.subtitles_path,
TableMovies.audio_language,
TableMovies.sceneName,
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
TableMovies.monitored,
TableMovies.tags,
TableMovies.radarrId,
TableMovies.title)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(reduce(operator.and_, upgradable_movies_conditions))\
.group_by(TableHistoryMovie.video_path, TableHistoryMovie.language)\
.dicts()
upgradable_movies_not_perfect = []
for upgradable_movie in upgradable_movies:
if upgradable_movie['timestamp'] > minimum_timestamp:
try:
int(upgradable_movie['score'])
except ValueError:
pass
else:
if int(upgradable_movie['score']) < 120 or (settings.general.getboolean('upgrade_manual') and
upgradable_movie['action'] in [2, 4, 6]):
upgradable_movies_not_perfect.append(upgradable_movie)
movies_to_upgrade = []
for movie in upgradable_movies_not_perfect:
if os.path.exists(path_mappings.path_replace_movie(movie['subtitles_path'])) and int(movie['score']) < 117:
movies_to_upgrade.append(movie)
count_movie_to_upgrade = len(movies_to_upgrade)
if settings.general.getboolean('use_sonarr'):
for i, episode in enumerate(episodes_to_upgrade):
providers_list = get_providers()
show_progress(id='upgrade_episodes_progress',
header='Upgrading episodes subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['seriesTitle'],
episode['season'],
episode['episode'],
episode['title']),
value=i,
count=count_episode_to_upgrade)
if not providers_list:
logging.info("BAZARR All providers are throttled")
return
if episode['language'].endswith('forced'):
language = episode['language'].split(':')[0]
is_forced = "True"
is_hi = "False"
elif episode['language'].endswith('hi'):
language = episode['language'].split(':')[0]
is_forced = "False"
is_hi = "True"
else:
language = episode['language'].split(':')[0]
is_forced = "False"
is_hi = "False"
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
result = list(generate_subtitles(path_mappings.path_replace(episode['video_path']),
[(language, is_hi, is_forced)],
audio_language,
str(episode['scene_name']),
episode['title'],
'series',
forced_minimum_score=int(episode['score']),
is_upgrade=True))
if result:
result = result[0]
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['video_path'], path_mappings.path_replace(episode['video_path']))
history_log(3, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
language_code, provider, score, subs_id, subs_path)
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
hide_progress(id='upgrade_episodes_progress')
if settings.general.getboolean('use_radarr'):
for i, movie in enumerate(movies_to_upgrade):
providers_list = get_providers()
show_progress(id='upgrade_movies_progress',
header='Upgrading movies subtitles...',
name=movie['title'],
value=i,
count=count_movie_to_upgrade)
if not providers_list:
logging.info("BAZARR All providers are throttled")
return
if movie['language'].endswith('forced'):
language = movie['language'].split(':')[0]
is_forced = "True"
is_hi = "False"
elif movie['language'].endswith('hi'):
language = movie['language'].split(':')[0]
is_forced = "False"
is_hi = "True"
else:
language = movie['language'].split(':')[0]
is_forced = "False"
is_hi = "False"
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
result = list(generate_subtitles(path_mappings.path_replace_movie(movie['video_path']),
[(language, is_hi, is_forced)],
audio_language,
str(movie['sceneName']),
movie['title'],
'movie',
forced_minimum_score=int(movie['score']),
is_upgrade=True))
if result:
result = result[0]
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles_movie(movie['video_path'],
path_mappings.path_replace_movie(movie['video_path']))
history_log_movie(3, movie['radarrId'], message, path, language_code, provider, score, subs_id, subs_path)
send_notifications_movie(movie['radarrId'], message)
hide_progress(id='upgrade_movies_progress')
logging.info('BAZARR Finished searching for Subtitles to upgrade. Check History for more information.')

@ -0,0 +1,137 @@
# coding=utf-8
# fmt: off
import sys
import logging
from subzero.language import Language
from subliminal_patch.core import save_subtitles
from subliminal_patch.subtitle import Subtitle
from get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2, \
alpha2_from_language, alpha3_from_language
from config import settings, get_array_from
from helper import path_mappings, pp_replace, get_target_folder, force_unicode
from utils import notify_sonarr, notify_radarr
from custom_lang import CustomLanguage
from database import TableEpisodes, TableMovies
from event_handler import event_stream
from .sync import sync_subtitles
from .post_processing import postprocessing
def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_type, subtitle, audio_language):
logging.debug('BAZARR Manually uploading subtitles for this file: ' + path)
single = settings.general.getboolean('single_language')
use_postprocessing = settings.general.getboolean('use_postprocessing')
postprocessing_cmd = settings.general.postprocessing_cmd
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
language = alpha3_from_alpha2(language)
custom = CustomLanguage.from_value(language, "alpha3")
if custom is None:
lang_obj = Language(language)
else:
lang_obj = custom.subzero_language()
if forced:
lang_obj = Language.rebuild(lang_obj, forced=True)
sub = Subtitle(
lang_obj,
mods=get_array_from(settings.general.subzero_mods)
)
sub.content = subtitle.read()
if not sub.is_valid():
logging.exception('BAZARR Invalid subtitle file: ' + subtitle.filename)
sub.mods = None
if settings.general.getboolean('utf8_encode'):
sub.set_encoding("utf-8")
saved_subtitles = []
try:
saved_subtitles = save_subtitles(path,
[sub],
single=single,
tags=None, # fixme
directory=get_target_folder(path),
chmod=chmod,
# formats=("srt", "vtt")
path_decoder=force_unicode)
except Exception:
pass
if len(saved_subtitles) < 1:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
return
subtitle_path = saved_subtitles[0].storage_path
if hi:
modifier_string = " HI"
elif forced:
modifier_string = " forced"
else:
modifier_string = ""
message = language_from_alpha3(language) + modifier_string + " Subtitles manually uploaded."
if hi:
modifier_code = ":hi"
elif forced:
modifier_code = ":forced"
else:
modifier_code = ""
uploaded_language_code3 = language + modifier_code
uploaded_language = language_from_alpha3(language) + modifier_string
uploaded_language_code2 = alpha2_from_alpha3(language) + modifier_code
audio_language_code2 = alpha2_from_language(audio_language)
audio_language_code3 = alpha3_from_language(audio_language)
if media_type == 'series':
episode_metadata = TableEpisodes.select(TableEpisodes.sonarrSeriesId, TableEpisodes.sonarrEpisodeId) \
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)) \
.dicts() \
.get()
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'], forced=forced,
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
else:
movie_metadata = TableMovies.select(TableMovies.radarrId) \
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)) \
.dicts() \
.get()
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, radarr_id=movie_metadata['radarrId'], forced=forced)
if use_postprocessing:
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language,
uploaded_language_code2, uploaded_language_code3, audio_language,
audio_language_code2, audio_language_code3, forced, 100, "1", "manual", series_id,
episode_id, hi=hi)
postprocessing(command, path)
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)
reversed_subtitles_path = path_mappings.path_replace_reverse(subtitle_path)
notify_sonarr(episode_metadata['sonarrSeriesId'])
event_stream(type='series', action='update', payload=episode_metadata['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode_metadata['sonarrEpisodeId'])
else:
reversed_path = path_mappings.path_replace_reverse_movie(path)
reversed_subtitles_path = path_mappings.path_replace_reverse_movie(subtitle_path)
notify_radarr(movie_metadata['radarrId'])
event_stream(type='movie', action='update', payload=movie_metadata['radarrId'])
event_stream(type='movie-wanted', action='delete', payload=movie_metadata['radarrId'])
return message, reversed_path, reversed_subtitles_path

@ -0,0 +1,94 @@
# coding=utf-8
# fmt: off
import os
import logging
import copy
from subzero.language import Language
from subzero.video import parse_video
from guessit import guessit
from custom_lang import CustomLanguage
from database import get_profiles_list
from score import movie_score, series_score
def get_video(path, title, sceneName, providers=None, media_type="movie"):
"""
Construct `Video` instance
:param path: path to video
:param title: series/movie title
:param sceneName: sceneName
:param providers: provider list for selective hashing
:param media_type: movie/series
:return: `Video` instance
"""
hints = {"title": title, "type": "movie" if media_type == "movie" else "episode"}
used_scene_name = False
original_path = path
original_name = os.path.basename(path)
hash_from = None
if sceneName != "None":
# use the sceneName but keep the folder structure for better guessing
path = os.path.join(os.path.dirname(path), sceneName + os.path.splitext(path)[1])
used_scene_name = True
hash_from = original_path
try:
video = parse_video(path, hints=hints, providers=providers, dry_run=used_scene_name,
hash_from=hash_from)
video.used_scene_name = used_scene_name
video.original_name = original_name
video.original_path = original_path
from get_subtitle.refiners import refine_from_db
refine_from_db(original_path, video)
from get_subtitle.refiners import refine_from_ffprobe
refine_from_ffprobe(original_path, video)
logging.debug('BAZARR is using these video object properties: %s', vars(copy.deepcopy(video)))
return video
except Exception:
logging.exception("BAZARR Error trying to get video information for this file: " + original_path)
def convert_to_guessit(guessit_key, attr_from_db):
try:
return guessit(attr_from_db)[guessit_key]
except KeyError:
return attr_from_db
def _get_download_code3(subtitle):
custom = CustomLanguage.from_value(subtitle.language, "language")
if custom is None:
return subtitle.language.alpha3
return custom.alpha3
def _get_lang_obj(alpha3):
sub = CustomLanguage.from_value(alpha3, "alpha3")
if sub is None:
return Language(alpha3)
return sub.subzero_language()
def _get_scores(media_type, min_movie=None, min_ep=None):
series = "series" == media_type
handler = series_score if series else movie_score
min_movie = min_movie or (60 * 100 / handler.max_score)
min_ep = min_ep or (240 * 100 / handler.max_score)
min_score_ = int(min_ep if series else min_movie)
return handler.get_scores(min_score_)
def get_ban_list(profile_id):
if profile_id:
profile = get_profiles_list(profile_id)
if profile:
return {'must_contain': profile['mustContain'] or [],
'must_not_contain': profile['mustNotContain'] or []}
return None

@ -0,0 +1,2 @@
from .movies import wanted_download_subtitles_movie, wanted_search_missing_subtitles_movies # noqa: W0611
from .series import wanted_download_subtitles, wanted_search_missing_subtitles_series # noqa: W0611

@ -0,0 +1,131 @@
# coding=utf-8
# fmt: off
import ast
import logging
import operator
from functools import reduce
from helper import path_mappings
from list_subtitles import store_subtitles_movie
from utils import history_log_movie
from notifier import send_notifications_movie
from get_providers import get_providers
from database import get_exclusion_clause, get_audio_profile_languages, TableMovies
from event_handler import event_stream, show_progress, hide_progress
from ..adaptive_searching import is_search_active, updateFailedAttempts
from ..download import generate_subtitles
def _wanted_movie(movie):
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(movie['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == movie['radarrId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
if is_search_active(desired_language=language, attempt_string=movie['failedAttempts']):
TableMovies.update({TableMovies.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=movie['failedAttempts'])}) \
.where(TableMovies.radarrId == movie['radarrId']) \
.execute()
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
else:
logging.info(f"BAZARR Search is throttled by adaptive search for this movie {movie['path']} and "
f"language: {language}")
for result in generate_subtitles(path_mappings.path_replace_movie(movie['path']),
languages,
audio_language,
str(movie['sceneName']),
movie['title'], 'movie'):
if result:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score,
subs_id, subs_path)
event_stream(type='movie-wanted', action='delete', payload=movie['radarrId'])
send_notifications_movie(movie['radarrId'], message)
def wanted_download_subtitles_movie(radarr_id):
movies_details = TableMovies.select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.audio_language,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.title)\
.where((TableMovies.radarrId == radarr_id))\
.dicts()
movies_details = list(movies_details)
for movie in movies_details:
providers_list = get_providers()
if providers_list:
_wanted_movie(movie)
else:
logging.info("BAZARR All providers are throttled")
break
def wanted_search_missing_subtitles_movies():
conditions = [(TableMovies.missing_subtitles != '[]')]
conditions += get_exclusion_clause('movie')
movies = TableMovies.select(TableMovies.radarrId,
TableMovies.tags,
TableMovies.monitored,
TableMovies.title) \
.where(reduce(operator.and_, conditions)) \
.dicts()
movies = list(movies)
count_movies = len(movies)
for i, movie in enumerate(movies):
show_progress(id='wanted_movies_progress',
header='Searching subtitles...',
name=movie['title'],
value=i,
count=count_movies)
providers = get_providers()
if providers:
wanted_download_subtitles_movie(movie['radarrId'])
else:
logging.info("BAZARR All providers are throttled")
return
hide_progress(id='wanted_movies_progress')
logging.info('BAZARR Finished searching for missing Movies Subtitles. Check History for more information.')

@ -0,0 +1,144 @@
# coding=utf-8
# fmt: off
import ast
import logging
import operator
from functools import reduce
from helper import path_mappings
from list_subtitles import store_subtitles
from utils import history_log
from notifier import send_notifications
from get_providers import get_providers
from database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes
from event_handler import event_stream, show_progress, hide_progress
from ..adaptive_searching import is_search_active, updateFailedAttempts
from ..download import generate_subtitles
def _wanted_episode(episode):
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
if is_search_active(desired_language=language, attempt_string=episode['failedAttempts']):
TableEpisodes.update({TableEpisodes.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=episode['failedAttempts'])}) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.execute()
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
else:
logging.debug(
f"BAZARR Search is throttled by adaptive search for this episode {episode['path']} and "
f"language: {language}")
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
episode['title'],
'series'):
if result:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
language_code, provider, score, subs_id, subs_path)
event_stream(type='series', action='update', payload=episode['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId'])
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
def wanted_download_subtitles(sonarr_episode_id):
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.audio_language,
TableEpisodes.scene_name,
TableEpisodes.failedAttempts,
TableShows.title)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where((TableEpisodes.sonarrEpisodeId == sonarr_episode_id))\
.dicts()
episodes_details = list(episodes_details)
for episode in episodes_details:
providers_list = get_providers()
if providers_list:
_wanted_episode(episode)
else:
logging.info("BAZARR All providers are throttled")
break
def wanted_search_missing_subtitles_series():
conditions = [(TableEpisodes.missing_subtitles != '[]')]
conditions += get_exclusion_clause('series')
episodes = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableShows.tags,
TableEpisodes.monitored,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle'),
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, conditions))\
.dicts()
episodes = list(episodes)
count_episodes = len(episodes)
for i, episode in enumerate(episodes):
show_progress(id='wanted_episodes_progress',
header='Searching subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
value=i,
count=count_episodes)
providers = get_providers()
if providers:
wanted_download_subtitles(episode['sonarrEpisodeId'])
else:
logging.info("BAZARR All providers are throttled")
return
hide_progress(id='wanted_episodes_progress')
logging.info('BAZARR Finished searching for missing Series Subtitles. Check History for more information.')

@ -1,6 +1,5 @@
# coding=utf-8 # coding=utf-8
import ast
import os import os
import re import re
import logging import logging
@ -97,23 +96,6 @@ path_mappings = PathMappings()
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language, episode_language_code2, episode_language_code3, forced, score, subtitle_id, provider, series_id, episode_id, hi): def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language, episode_language_code2, episode_language_code3, forced, score, subtitle_id, provider, series_id, episode_id, hi):
if hi:
modifier_string = " HI"
elif forced:
modifier_string = " forced"
else:
modifier_string = ""
if hi:
modifier_code = ":hi"
modifier_code_dot = ".hi"
elif forced:
modifier_code = ":forced"
modifier_code_dot = ".forced"
else:
modifier_code = ""
modifier_code_dot = ""
pp_command = pp_command.replace('{{directory}}', os.path.dirname(episode)) pp_command = pp_command.replace('{{directory}}', os.path.dirname(episode))
pp_command = pp_command.replace('{{episode}}', episode) pp_command = pp_command.replace('{{episode}}', episode)
pp_command = pp_command.replace('{{episode_name}}', os.path.splitext(os.path.basename(episode))[0]) pp_command = pp_command.replace('{{episode_name}}', os.path.splitext(os.path.basename(episode))[0])
@ -144,11 +126,11 @@ def get_target_folder(file_path):
subfolder = settings.general.subfolder subfolder = settings.general.subfolder
fld_custom = str(settings.general.subfolder_custom).strip() \ fld_custom = str(settings.general.subfolder_custom).strip() \
if settings.general.subfolder_custom else None if settings.general.subfolder_custom else None
if subfolder != "current" and fld_custom: if subfolder != "current" and fld_custom:
# specific subFolder requested, create it if it doesn't exist # specific subFolder requested, create it if it doesn't exist
fld_base = os.path.split(file_path)[0] fld_base = os.path.split(file_path)[0]
if subfolder == "absolute": if subfolder == "absolute":
# absolute folder # absolute folder
fld = fld_custom fld = fld_custom
@ -156,18 +138,18 @@ def get_target_folder(file_path):
fld = os.path.join(fld_base, fld_custom) fld = os.path.join(fld_base, fld_custom)
else: else:
fld = None fld = None
fld = force_unicode(fld) fld = force_unicode(fld)
if not os.path.isdir(fld): if not os.path.isdir(fld):
try: try:
os.makedirs(fld) os.makedirs(fld)
except Exception as e: except Exception:
logging.error('BAZARR is unable to create directory to save subtitles: ' + fld) logging.error('BAZARR is unable to create directory to save subtitles: ' + fld)
fld = None fld = None
else: else:
fld = None fld = None
return fld return fld

@ -45,7 +45,7 @@ if not os.path.exists(os.path.join(args.config_dir, 'cache')):
os.mkdir(os.path.join(args.config_dir, 'cache')) os.mkdir(os.path.join(args.config_dir, 'cache'))
configure_logging(settings.general.getboolean('debug') or args.debug) configure_logging(settings.general.getboolean('debug') or args.debug)
import logging import logging # noqa E402
def is_virtualenv(): def is_virtualenv():
@ -59,10 +59,10 @@ def is_virtualenv():
# deploy requirements.txt # deploy requirements.txt
if not args.no_update: if not args.no_update:
try: try:
import lxml, numpy, webrtcvad, setuptools import lxml, numpy, webrtcvad, setuptools # noqa E401
except ImportError: except ImportError:
try: try:
import pip import pip # noqa W0611
except ImportError: except ImportError:
logging.info('BAZARR unable to install requirements (pip not installed).') logging.info('BAZARR unable to install requirements (pip not installed).')
else: else:
@ -136,7 +136,7 @@ if os.path.isfile(package_info_file):
continue continue
if 'branch' in package_info: if 'branch' in package_info:
settings.general.branch = package_info['branch'] settings.general.branch = package_info['branch']
except: except Exception:
pass pass
else: else:
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle: with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
@ -173,24 +173,24 @@ with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini')
def init_binaries(): def init_binaries():
from utils import get_binary from utils import get_binary
exe = get_binary("unrar") exe = get_binary("unrar")
rarfile.UNRAR_TOOL = exe rarfile.UNRAR_TOOL = exe
rarfile.ORIG_UNRAR_TOOL = exe rarfile.ORIG_UNRAR_TOOL = exe
try: try:
rarfile.custom_check([rarfile.UNRAR_TOOL], True) rarfile.custom_check([rarfile.UNRAR_TOOL], True)
except: except Exception:
logging.debug("custom check failed for: %s", exe) logging.debug("custom check failed for: %s", exe)
rarfile.OPEN_ARGS = rarfile.ORIG_OPEN_ARGS rarfile.OPEN_ARGS = rarfile.ORIG_OPEN_ARGS
rarfile.EXTRACT_ARGS = rarfile.ORIG_EXTRACT_ARGS rarfile.EXTRACT_ARGS = rarfile.ORIG_EXTRACT_ARGS
rarfile.TEST_ARGS = rarfile.ORIG_TEST_ARGS rarfile.TEST_ARGS = rarfile.ORIG_TEST_ARGS
logging.debug("Using UnRAR from: %s", exe) logging.debug("Using UnRAR from: %s", exe)
unrar = exe unrar = exe
return unrar return unrar
from database import init_db, migrate_db from database import init_db, migrate_db # noqa E402
init_db() init_db()
migrate_db() migrate_db()
init_binaries() init_binaries()

@ -65,7 +65,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
actual_subtitles.append([lang, None]) actual_subtitles.append([lang, None])
except Exception as error: except Exception as error:
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)", subtitle_language, error) logging.debug("BAZARR unable to index this unrecognized language: %s (%s)", subtitle_language, error)
except Exception as e: except Exception:
logging.exception( logging.exception(
"BAZARR error when trying to analyze this %s file: %s" % (os.path.splitext(reversed_path)[1], "BAZARR error when trying to analyze this %s file: %s" % (os.path.splitext(reversed_path)[1],
reversed_path)) reversed_path))
@ -120,7 +120,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles)) logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
else: else:
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.") logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path) logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
return actual_subtitles return actual_subtitles
@ -163,7 +163,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
lang = lang + ':hi' lang = lang + ':hi'
logging.debug("BAZARR embedded subtitles detected: " + lang) logging.debug("BAZARR embedded subtitles detected: " + lang)
actual_subtitles.append([lang, None]) actual_subtitles.append([lang, None])
except: except Exception:
logging.debug("BAZARR unable to index this unrecognized language: " + subtitle_language) logging.debug("BAZARR unable to index this unrecognized language: " + subtitle_language)
pass pass
except Exception: except Exception:
@ -183,7 +183,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
elif settings.general.subfolder == "relative": elif settings.general.subfolder == "relative":
full_dest_folder_path = os.path.join(os.path.dirname(reversed_path), dest_folder) full_dest_folder_path = os.path.join(os.path.dirname(reversed_path), dest_folder)
subtitles = guess_external_subtitles(full_dest_folder_path, subtitles) subtitles = guess_external_subtitles(full_dest_folder_path, subtitles)
except Exception as e: except Exception:
logging.exception("BAZARR unable to index external subtitles.") logging.exception("BAZARR unable to index external subtitles.")
pass pass
else: else:
@ -206,7 +206,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
language_str = str(language) language_str = str(language)
logging.debug("BAZARR external subtitles detected: " + language_str) logging.debug("BAZARR external subtitles detected: " + language_str)
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path)]) actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path)])
TableMovies.update({TableMovies.subtitles: str(actual_subtitles)})\ TableMovies.update({TableMovies.subtitles: str(actual_subtitles)})\
.where(TableMovies.path == original_path)\ .where(TableMovies.path == original_path)\
.execute() .execute()
@ -220,7 +220,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles)) logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
else: else:
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.") logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path) logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
return actual_subtitles return actual_subtitles
@ -456,7 +456,7 @@ def series_full_scan_subtitles():
use_ffprobe_cache = settings.sonarr.getboolean('use_ffprobe_cache') use_ffprobe_cache = settings.sonarr.getboolean('use_ffprobe_cache')
episodes = TableEpisodes.select(TableEpisodes.path).dicts() episodes = TableEpisodes.select(TableEpisodes.path).dicts()
count_episodes = len(episodes) count_episodes = len(episodes)
for i, episode in enumerate(episodes): for i, episode in enumerate(episodes):
show_progress(id='episodes_disk_scan', show_progress(id='episodes_disk_scan',
@ -467,7 +467,7 @@ def series_full_scan_subtitles():
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=use_ffprobe_cache) store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=use_ffprobe_cache)
hide_progress(id='episodes_disk_scan') hide_progress(id='episodes_disk_scan')
gc.collect() gc.collect()
@ -475,7 +475,7 @@ def movies_full_scan_subtitles():
use_ffprobe_cache = settings.radarr.getboolean('use_ffprobe_cache') use_ffprobe_cache = settings.radarr.getboolean('use_ffprobe_cache')
movies = TableMovies.select(TableMovies.path).dicts() movies = TableMovies.select(TableMovies.path).dicts()
count_movies = len(movies) count_movies = len(movies)
for i, movie in enumerate(movies): for i, movie in enumerate(movies):
show_progress(id='movies_disk_scan', show_progress(id='movies_disk_scan',
@ -496,7 +496,7 @@ def series_scan_subtitles(no):
.where(TableEpisodes.sonarrSeriesId == no)\ .where(TableEpisodes.sonarrSeriesId == no)\
.order_by(TableEpisodes.sonarrEpisodeId)\ .order_by(TableEpisodes.sonarrEpisodeId)\
.dicts() .dicts()
for episode in episodes: for episode in episodes:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=False) store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=False)
@ -506,14 +506,14 @@ def movies_scan_subtitles(no):
.where(TableMovies.radarrId == no)\ .where(TableMovies.radarrId == no)\
.order_by(TableMovies.radarrId)\ .order_by(TableMovies.radarrId)\
.dicts() .dicts()
for movie in movies: for movie in movies:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']), use_cache=False) store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']), use_cache=False)
def get_external_subtitles_path(file, subtitle): def get_external_subtitles_path(file, subtitle):
fld = os.path.dirname(file) fld = os.path.dirname(file)
if settings.general.subfolder == "current": if settings.general.subfolder == "current":
path = os.path.join(fld, subtitle) path = os.path.join(fld, subtitle)
elif settings.general.subfolder == "absolute": elif settings.general.subfolder == "absolute":
@ -534,7 +534,7 @@ def get_external_subtitles_path(file, subtitle):
path = None path = None
else: else:
path = None path = None
return path return path
@ -558,17 +558,18 @@ def guess_external_subtitles(dest_folder, subtitles):
try: try:
text = text.decode('utf-8') text = text.decode('utf-8')
detected_language = guess_language(text) detected_language = guess_language(text)
#add simplified and traditional chinese detection # add simplified and traditional chinese detection
if detected_language == 'zh': if detected_language == 'zh':
traditional_chinese_fuzzy = [u"", u"雙語"] traditional_chinese_fuzzy = [u"", u"雙語"]
traditional_chinese = [".cht", ".tc", ".zh-tw", ".zht",".zh-hant",".zhhant",".zh_hant",".hant", ".big5", ".traditional"] traditional_chinese = [".cht", ".tc", ".zh-tw", ".zht", ".zh-hant", ".zhhant", ".zh_hant",
".hant", ".big5", ".traditional"]
if str(os.path.splitext(subtitle)[0]).lower().endswith(tuple(traditional_chinese)) or (str(subtitle_path).lower())[:-5] in traditional_chinese_fuzzy: if str(os.path.splitext(subtitle)[0]).lower().endswith(tuple(traditional_chinese)) or (str(subtitle_path).lower())[:-5] in traditional_chinese_fuzzy:
detected_language == 'zt' detected_language == 'zt'
except UnicodeDecodeError: except UnicodeDecodeError:
detector = Detector() detector = Detector()
try: try:
guess = detector.detect(text) guess = detector.detect(text)
except: except Exception:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. " logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
"It's probably a binary file: " + subtitle_path) "It's probably a binary file: " + subtitle_path)
continue continue
@ -576,13 +577,13 @@ def guess_external_subtitles(dest_folder, subtitles):
logging.debug('BAZARR detected encoding %r', guess) logging.debug('BAZARR detected encoding %r', guess)
try: try:
text = text.decode(guess) text = text.decode(guess)
except: except Exception:
logging.debug( logging.debug(
"BAZARR skipping this subtitles because we can't decode the file using the " "BAZARR skipping this subtitles because we can't decode the file using the "
"guessed encoding. It's probably a binary file: " + subtitle_path) "guessed encoding. It's probably a binary file: " + subtitle_path)
continue continue
detected_language = guess_language(text) detected_language = guess_language(text)
except: except Exception:
logging.debug('BAZARR was unable to detect encoding for this subtitles file: %r', subtitle_path) logging.debug('BAZARR was unable to detect encoding for this subtitles file: %r', subtitle_path)
finally: finally:
if detected_language: if detected_language:
@ -591,7 +592,7 @@ def guess_external_subtitles(dest_folder, subtitles):
try: try:
subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=False, subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=False,
hi=False) hi=False)
except: except Exception:
pass pass
# If language is still None (undetected), skip it # If language is still None (undetected), skip it
@ -623,7 +624,7 @@ def guess_external_subtitles(dest_folder, subtitles):
detector = Detector() detector = Detector()
try: try:
guess = detector.detect(text) guess = detector.detect(text)
except: except Exception:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. " logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
"It's probably a binary file: " + subtitle_path) "It's probably a binary file: " + subtitle_path)
continue continue
@ -631,7 +632,7 @@ def guess_external_subtitles(dest_folder, subtitles):
logging.debug('BAZARR detected encoding %r', guess) logging.debug('BAZARR detected encoding %r', guess)
try: try:
text = text.decode(guess) text = text.decode(guess)
except: except Exception:
logging.debug("BAZARR skipping this subtitles because we can't decode the file using the " logging.debug("BAZARR skipping this subtitles because we can't decode the file using the "
"guessed encoding. It's probably a binary file: " + subtitle_path) "guessed encoding. It's probably a binary file: " + subtitle_path)
continue continue

@ -48,7 +48,7 @@ class NoExceptionFormatter(logging.Formatter):
def format(self, record): def format(self, record):
record.exc_text = '' # ensure formatException gets called record.exc_text = '' # ensure formatException gets called
return super(NoExceptionFormatter, self).format(record) return super(NoExceptionFormatter, self).format(record)
def formatException(self, record): def formatException(self, record):
return '' return ''
@ -60,20 +60,20 @@ def configure_logging(debug=False):
log_level = "INFO" log_level = "INFO"
else: else:
log_level = "DEBUG" log_level = "DEBUG"
logger.handlers = [] logger.handlers = []
logger.setLevel(log_level) logger.setLevel(log_level)
# Console logging # Console logging
ch = logging.StreamHandler() ch = logging.StreamHandler()
cf = (debug and logging.Formatter or NoExceptionFormatter)( cf = (debug and logging.Formatter or NoExceptionFormatter)(
'%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s') '%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s')
ch.setFormatter(cf) ch.setFormatter(cf)
ch.setLevel(log_level) ch.setLevel(log_level)
logger.addHandler(ch) logger.addHandler(ch)
# File Logging # File Logging
global fh global fh
fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1, fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1,
@ -83,7 +83,7 @@ def configure_logging(debug=False):
fh.setFormatter(f) fh.setFormatter(f)
fh.setLevel(log_level) fh.setLevel(log_level)
logger.addHandler(fh) logger.addHandler(fh)
if debug: if debug:
logging.getLogger("peewee").setLevel(logging.DEBUG) logging.getLogger("peewee").setLevel(logging.DEBUG)
logging.getLogger("apscheduler").setLevel(logging.DEBUG) logging.getLogger("apscheduler").setLevel(logging.DEBUG)

@ -12,31 +12,31 @@ if os.path.isfile(version_file):
os.environ["BAZARR_VERSION"] = bazarr_version.lstrip('v') os.environ["BAZARR_VERSION"] = bazarr_version.lstrip('v')
import libs import libs # noqa W0611
from get_args import args from get_args import args # noqa E402
from config import settings, url_sonarr, url_radarr, configure_proxy_func, base_url from config import settings, url_sonarr, url_radarr, configure_proxy_func, base_url # noqa E402
from init import * from init import * # noqa E402
from database import System from database import System # noqa E402
from notifier import update_notifier from notifier import update_notifier # noqa E402
from urllib.parse import unquote from urllib.parse import unquote # noqa E402
from get_languages import load_language_in_db from get_languages import load_language_in_db # noqa E402
from flask import make_response, request, redirect, abort, render_template, Response, session, flash, url_for, \ from flask import request, redirect, abort, render_template, Response, session, send_file, stream_with_context # noqa E402
send_file, stream_with_context from threading import Thread # noqa E402
from threading import Thread import requests # noqa E402
from get_series import * from get_series import * # noqa E402
from get_episodes import * from get_episodes import * # noqa E402
from get_movies import * from get_movies import * # noqa E402
from signalr_client import sonarr_signalr_client, radarr_signalr_client from signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
from check_update import apply_update, check_if_new_update, check_releases from check_update import apply_update, check_releases # noqa E402
from server import app, webserver from server import app, webserver # noqa E402
from functools import wraps from functools import wraps # noqa E402
from utils import check_credentials, get_sonarr_info, get_radarr_info from utils import check_credentials, get_sonarr_info, get_radarr_info # noqa E402
# Install downloaded update # Install downloaded update
if bazarr_version != '': if bazarr_version != '':
@ -94,7 +94,7 @@ def catch_all(path):
try: try:
updated = System.get().updated updated = System.get().updated
except: except Exception:
updated = '0' updated = '0'
inject = dict() inject = dict()
@ -132,7 +132,7 @@ def series_images(url):
apikey).replace('poster-250', 'poster-500') apikey).replace('poster-250', 'poster-500')
try: try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers) req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
except: except Exception:
return '', 404 return '', 404
else: else:
return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type']) return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type'])
@ -149,7 +149,7 @@ def movies_images(url):
url_image = url_radarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' + apikey url_image = url_radarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' + apikey
try: try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers) req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
except: except Exception:
return '', 404 return '', 404
else: else:
return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type']) return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type'])

@ -4,8 +4,8 @@ from get_episodes import sync_episodes, update_all_episodes
from get_movies import update_movies, update_all_movies from get_movies import update_movies, update_all_movies
from get_series import update_series from get_series import update_series
from config import settings from config import settings
from get_subtitle import wanted_search_missing_subtitles_series, wanted_search_missing_subtitles_movies, \ from get_subtitle.wanted import wanted_search_missing_subtitles_series, wanted_search_missing_subtitles_movies
upgrade_subtitles from get_subtitle.upgrade import upgrade_subtitles
from utils import cache_maintenance, check_health from utils import cache_maintenance, check_health
from get_args import args from get_args import args
if not args.no_update: if not args.no_update:

@ -13,7 +13,7 @@ from database import database
from app import create_app from app import create_app
app = create_app() app = create_app()
from api import api_bp_list from api import api_bp_list # noqa E402
for item in api_bp_list: for item in api_bp_list:
app.register_blueprint(item, url_prefix=base_url.rstrip('/') + '/api') app.register_blueprint(item, url_prefix=base_url.rstrip('/') + '/api')

@ -60,7 +60,7 @@ class SonarrSignalrClientLegacy:
def stop(self, log=True): def stop(self, log=True):
try: try:
self.connection.close() self.connection.close()
except Exception as e: except Exception:
pass pass
if log: if log:
logging.info('BAZARR SignalR client for Sonarr is now disconnected.') logging.info('BAZARR SignalR client for Sonarr is now disconnected.')
@ -70,7 +70,7 @@ class SonarrSignalrClientLegacy:
if self.connection.started: if self.connection.started:
try: try:
self.stop(log=False) self.stop(log=False)
except: except Exception:
self.connection.started = False self.connection.started = False
if settings.general.getboolean('use_sonarr'): if settings.general.getboolean('use_sonarr'):
self.start() self.start()

@ -17,7 +17,7 @@ class SubSyncer:
self.ffmpeg_path = None self.ffmpeg_path = None
self.args = None self.args = None
try: try:
import webrtcvad import webrtcvad # noqa W0611
except ImportError: except ImportError:
self.vad = 'subs_then_auditok' self.vad = 'subs_then_auditok'
else: else:
@ -54,7 +54,7 @@ class SubSyncer:
parser = make_parser() parser = make_parser()
self.args = parser.parse_args(args=unparsed_args) self.args = parser.parse_args(args=unparsed_args)
result = run(self.args) result = run(self.args)
except Exception as e: except Exception:
logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: ' logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: '
'{0}'.format(self.srtin)) '{0}'.format(self.srtin))
else: else:

@ -17,7 +17,7 @@ from custom_lang import CustomLanguage
from database import TableHistory, TableHistoryMovie, TableBlacklist, TableBlacklistMovie, TableShowsRootfolder, \ from database import TableHistory, TableHistoryMovie, TableBlacklist, TableBlacklistMovie, TableShowsRootfolder, \
TableMoviesRootfolder TableMoviesRootfolder
from event_handler import event_stream from event_handler import event_stream
from get_languages import alpha2_from_alpha3, language_from_alpha3, language_from_alpha2, alpha3_from_alpha2 from get_languages import language_from_alpha2, alpha3_from_alpha2
from helper import path_mappings from helper import path_mappings
from list_subtitles import store_subtitles, store_subtitles_movie from list_subtitles import store_subtitles, store_subtitles_movie
from subliminal_patch.subtitle import Subtitle from subliminal_patch.subtitle import Subtitle
@ -293,7 +293,7 @@ def notify_sonarr(sonarr_series_id):
'seriesId': int(sonarr_series_id) 'seriesId': int(sonarr_series_id)
} }
requests.post(url, json=data, timeout=60, verify=False, headers=headers) requests.post(url, json=data, timeout=60, verify=False, headers=headers)
except Exception as e: except Exception:
logging.exception('BAZARR cannot notify Sonarr') logging.exception('BAZARR cannot notify Sonarr')
@ -321,7 +321,7 @@ class GetRadarrInfo:
except json.decoder.JSONDecodeError: except json.decoder.JSONDecodeError:
rv = url_radarr() + "/api/v3/system/status?apikey=" + settings.radarr.apikey rv = url_radarr() + "/api/v3/system/status?apikey=" + settings.radarr.apikey
radarr_version = requests.get(rv, timeout=60, verify=False, headers=headers).json()['version'] radarr_version = requests.get(rv, timeout=60, verify=False, headers=headers).json()['version']
except Exception as e: except Exception:
logging.debug('BAZARR cannot get Radarr version') logging.debug('BAZARR cannot get Radarr version')
radarr_version = 'unknown' radarr_version = 'unknown'
logging.debug('BAZARR got this Radarr version from its API: {}'.format(radarr_version)) logging.debug('BAZARR got this Radarr version from its API: {}'.format(radarr_version))
@ -354,7 +354,7 @@ def notify_radarr(radarr_id):
'movieId': int(radarr_id) 'movieId': int(radarr_id)
} }
requests.post(url, json=data, timeout=60, verify=False, headers=headers) requests.post(url, json=data, timeout=60, verify=False, headers=headers)
except Exception as e: except Exception:
logging.exception('BAZARR cannot notify Radarr') logging.exception('BAZARR cannot notify Radarr')
@ -372,7 +372,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
elif forced in [True, 'true', 'True']: elif forced in [True, 'true', 'True']:
language_log += ':forced' language_log += ':forced'
language_string += ' forced' language_string += ' forced'
result = language_string + " subtitles deleted from disk." result = language_string + " subtitles deleted from disk."
if media_type == 'series': if media_type == 'series':
@ -481,7 +481,7 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
target=language_code_convert_dict.get(lang_obj.alpha2, target=language_code_convert_dict.get(lang_obj.alpha2,
lang_obj.alpha2) lang_obj.alpha2)
).translate(text=block_str) ).translate(text=block_str)
except: except Exception:
return False return False
else: else:
translated_partial_srt_list = translated_partial_srt_text.split('\n\n\n') translated_partial_srt_list = translated_partial_srt_text.split('\n\n\n')

Loading…
Cancel
Save