Merge branch 'development'

# Conflicts:
#	.github/workflows/ci.yml
pull/2313/head
morpheus65535 6 months ago
commit 8282899fac

@ -1,6 +1,6 @@
#!/bin/bash
python3 "${ROOT_DIRECTORY}"/bazarr.py &
python3 "${ROOT_DIRECTORY}"/bazarr.py --no-update &
PID=$!
sleep 30

@ -23,19 +23,8 @@ jobs:
Frontend:
runs-on: ubuntu-latest
steps:
- name: Get source branch name
uses: haya14busa/action-cond@v1
id: branch_ref
with:
cond: ${{ github.event_name == 'pull_request' }}
if_true: ${{ github.head_ref }}
if_false: ${{ github.ref_name }}
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ steps.branch_ref.outputs.value }}
fetch-depth: 1
uses: actions/checkout@v4
- name: Cache node_modules
uses: actions/cache@v3
@ -83,19 +72,8 @@ jobs:
needs: Frontend
steps:
- name: Get source branch name
uses: haya14busa/action-cond@v1
id: branch_ref
with:
cond: ${{ github.event_name == 'pull_request' }}
if_true: ${{ github.head_ref }}
if_false: ${{ github.ref_name }}
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ steps.branch_ref.outputs.value }}
fetch-depth: 1
uses: actions/checkout@v4
- name: Set up Python 3.8
uses: actions/setup-python@v4
@ -113,7 +91,7 @@ jobs:
- name: Unit Tests
run: |
python3 bazarr.py &
python3 bazarr.py --no-update &
PID=$!
sleep 15
if kill -s 0 $PID

@ -18,7 +18,7 @@ jobs:
exit 1
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: ${{ env.FETCH_DEPTH }}
ref: development

@ -22,7 +22,7 @@ jobs:
exit 1
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: development
@ -64,7 +64,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Merge development -> master
uses: devmasx/merge-branch@1.4.0

@ -16,7 +16,7 @@ jobs:
exit 1
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: ${{ env.FETCH_DEPTH }}
ref: development

@ -86,6 +86,7 @@ If you need something that is not already part of Bazarr, feel free to create a
- Titulky.com
- TuSubtitulo
- TVSubtitles
- Whisper (requires [ahmetoner/whisper-asr-webservice](https://github.com/ahmetoner/whisper-asr-webservice))
- Wizdom
- XSubs
- Yavka.net

@ -15,7 +15,7 @@ from subtitles.tools.delete import delete_subtitles
from sonarr.history import history_log
from app.notifier import send_notifications
from subtitles.indexer.series import store_subtitles
from app.event_handler import event_stream
from app.event_handler import event_stream, show_message
from app.config import settings
from ..utils import authenticate
@ -69,6 +69,12 @@ class EpisodesSubtitles(Resource):
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
if hi == 'True':
language_str = f'{language}:hi'
elif forced == 'True':
language_str = f'{language}:forced'
else:
language_str = language
audio_language_list = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language_list) > 0:
@ -88,7 +94,8 @@ class EpisodesSubtitles(Resource):
store_subtitles(result.path, episodePath)
else:
event_stream(type='episode', payload=sonarrEpisodeId)
return 'No subtitles found', 500
show_message(f'No {language_str.upper()} subtitles found')
return '', 204
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
@ -162,7 +169,7 @@ class EpisodesSubtitles(Resource):
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, result, fake_provider=provider, fake_score=score)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
except OSError:

@ -150,7 +150,7 @@ class EpisodesHistory(Resource):
del item['external_subtitles']
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
item['score'] = f"{round((int(item['score']) * 100 / 360), 2)}%"
# Make timestamp pretty
if item['timestamp']:

@ -141,7 +141,7 @@ class MoviesHistory(Resource):
del item['external_subtitles']
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
item['score'] = f"{round((int(item['score']) * 100 / 120), 2)}%"
# Make timestamp pretty
if item['timestamp']:

@ -15,7 +15,7 @@ from subtitles.tools.delete import delete_subtitles
from radarr.history import history_log_movie
from app.notifier import send_notifications_movie
from subtitles.indexer.movies import store_subtitles_movie
from app.event_handler import event_stream
from app.event_handler import event_stream, show_message
from app.config import settings
from ..utils import authenticate
@ -67,6 +67,12 @@ class MoviesSubtitles(Resource):
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
if hi == 'True':
language_str = f'{language}:hi'
elif forced == 'True':
language_str = f'{language}:forced'
else:
language_str = language
audio_language_list = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language_list) > 0:
@ -85,7 +91,8 @@ class MoviesSubtitles(Resource):
store_subtitles_movie(result.path, moviePath)
else:
event_stream(type='movie', payload=radarrId)
return 'No subtitles found', 500
show_message(f'No {language_str.upper()} subtitles found')
return '', 204
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
@ -158,7 +165,7 @@ class MoviesSubtitles(Resource):
provider = "manual"
score = 120
history_log_movie(4, radarrId, result, fake_provider=provider, fake_score=score)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
except OSError:

@ -141,7 +141,7 @@ class ProviderEpisodes(Resource):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
elif isinstance(result, str):

@ -135,7 +135,7 @@ class ProviderMovies(Resource):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log_movie(2, radarrId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
elif isinstance(result, str):

@ -116,7 +116,7 @@ class Subtitles(Resource):
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
if chmod:
os.chmod(subtitles_path, chmod)

@ -24,12 +24,12 @@ class SystemAccount(Resource):
@api_ns_system_account.response(400, 'Unknown action')
@api_ns_system_account.response(403, 'Authentication failed')
@api_ns_system_account.response(406, 'Browser must be closed to invalidate basic authentication')
@api_ns_system_account.response(500, 'Unknown authentication type define in config.ini')
@api_ns_system_account.response(500, 'Unknown authentication type define in config')
def post(self):
"""Login or logout from Bazarr UI when using form login"""
args = self.post_request_parser.parse_args()
if settings.auth.type != 'form':
return 'Unknown authentication type define in config.ini', 500
return 'Unknown authentication type define in config', 500
action = args.get('action')
if action == 'login':

@ -58,5 +58,6 @@ class SystemReleases(Resource):
except Exception:
logging.exception(
'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt'))
f'BAZARR cannot parse releases caching file: '
f'{os.path.join(args.config_dir, "config", "releases.txt")}')
return marshal(filtered_releases, self.get_response_model, envelope='data')

@ -27,7 +27,7 @@ class Searches(Resource):
search_list = []
if query:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
# Get matching series
search_list += database.execute(
select(TableShows.title,
@ -36,7 +36,7 @@ class Searches(Resource):
.order_by(TableShows.title)) \
.all()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
# Get matching movies
search_list += database.execute(
select(TableMovies.title,

@ -4,7 +4,9 @@ import json
from flask import request, jsonify
from flask_restx import Resource, Namespace
from dynaconf.validator import ValidationError
from api.utils import None_Keys
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableSettingsNotifier, \
update_profile_id_list, database, insert, update, delete, select
from app.event_handler import event_stream
@ -65,11 +67,12 @@ class SystemSettings(Resource):
update(TableLanguagesProfiles)
.values(
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] != 'null' else None,
cutoff=item['cutoff'] if item['cutoff'] not in None_Keys else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=item['originalFormat'] if item['originalFormat'] != 'null' else None,
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
)
.where(TableLanguagesProfiles.profileId == item['profileId']))
existing.remove(item['profileId'])
@ -80,11 +83,12 @@ class SystemSettings(Resource):
.values(
profileId=item['profileId'],
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] != 'null' else None,
cutoff=item['cutoff'] if item['cutoff'] not in None_Keys else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=item['originalFormat'] if item['originalFormat'] != 'null' else None,
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
))
for profileId in existing:
# Remove deleted profiles
@ -97,9 +101,9 @@ class SystemSettings(Resource):
event_stream("languages")
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
# Update Notification
@ -112,6 +116,11 @@ class SystemSettings(Resource):
url=item['url'])
.where(TableSettingsNotifier.name == item['name']))
save_settings(zip(request.form.keys(), request.form.listvalues()))
event_stream("settings")
return '', 204
try:
save_settings(zip(request.form.keys(), request.form.listvalues()))
except ValidationError as e:
event_stream("settings")
return e.message, 406
else:
event_stream("settings")
return '', 204

@ -77,7 +77,7 @@ def postprocess(item):
"hi": language[1] == 'hi',
}
)
if settings.general.getboolean('embedded_subs_show_desired') and item.get('profileId'):
if settings.general.embedded_subs_show_desired and item.get('profileId'):
desired_lang_list = get_desired_languages(item['profileId'])
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))

@ -62,7 +62,7 @@ class WebHooksPlex(Resource):
if media_type == 'episode':
try:
episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id),
r = requests.get(f'https://imdb.com/title/{episode_imdb_id}',
headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
soup = bso(r.content, "html.parser")
script_tag = soup.find(id='__NEXT_DATA__')

@ -34,7 +34,7 @@ def create_app():
else:
app.config["DEBUG"] = False
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*',
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling')
@app.errorhandler(404)

@ -24,7 +24,7 @@ def check_releases():
releases = []
url_releases = 'https://api.github.com/repos/morpheus65535/Bazarr/releases?per_page=100'
try:
logging.debug('BAZARR getting releases from Github: {}'.format(url_releases))
logging.debug(f'BAZARR getting releases from Github: {url_releases}')
r = requests.get(url_releases, allow_redirects=True)
r.raise_for_status()
except requests.exceptions.HTTPError:
@ -50,7 +50,7 @@ def check_releases():
'download_link': download_link})
with open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'w') as f:
json.dump(releases, f)
logging.debug('BAZARR saved {} releases to releases.txt'.format(len(r.json())))
logging.debug(f'BAZARR saved {len(r.json())} releases to releases.txt')
def check_if_new_update():
@ -59,9 +59,9 @@ def check_if_new_update():
elif settings.general.branch == 'development':
use_prerelease = True
else:
logging.error('BAZARR unknown branch provided to updater: {}'.format(settings.general.branch))
logging.error(f'BAZARR unknown branch provided to updater: {settings.general.branch}')
return
logging.debug('BAZARR updater is using {} branch'.format(settings.general.branch))
logging.debug(f'BAZARR updater is using {settings.general.branch} branch')
check_releases()
@ -84,7 +84,7 @@ def check_if_new_update():
release = next((item for item in data if not item["prerelease"]), None)
if release and 'name' in release:
logging.debug('BAZARR last release available is {}'.format(release['name']))
logging.debug(f'BAZARR last release available is {release["name"]}')
if deprecated_python_version():
logging.warning('BAZARR is using a deprecated Python version, you must update Python to get latest '
'version available.')
@ -101,12 +101,12 @@ def check_if_new_update():
# skip update process if latest release is v0.9.1.1 which is the latest pre-semver compatible release
if new_version and release['name'] != 'v0.9.1.1':
logging.debug('BAZARR newer release available and will be downloaded: {}'.format(release['name']))
logging.debug(f'BAZARR newer release available and will be downloaded: {release["name"]}')
download_release(url=release['download_link'])
# rolling back from nightly to stable release
elif current_version:
if current_version.prerelease and not use_prerelease:
logging.debug('BAZARR previous stable version will be downloaded: {}'.format(release['name']))
logging.debug(f'BAZARR previous stable version will be downloaded: {release["name"]}')
download_release(url=release['download_link'])
else:
logging.debug('BAZARR no newer release have been found')
@ -122,9 +122,9 @@ def download_release(url):
try:
os.makedirs(update_dir, exist_ok=True)
except Exception:
logging.debug('BAZARR unable to create update directory {}'.format(update_dir))
logging.debug(f'BAZARR unable to create update directory {update_dir}')
else:
logging.debug('BAZARR downloading release from Github: {}'.format(url))
logging.debug(f'BAZARR downloading release from Github: {url}')
r = requests.get(url, allow_redirects=True)
if r:
try:
@ -145,7 +145,7 @@ def apply_update():
if os.path.isdir(update_dir):
if os.path.isfile(bazarr_zip):
logging.debug('BAZARR is trying to unzip this release to {0}: {1}'.format(bazarr_dir, bazarr_zip))
logging.debug(f'BAZARR is trying to unzip this release to {bazarr_dir}: {bazarr_zip}')
try:
with ZipFile(bazarr_zip, 'r') as archive:
zip_root_directory = ''
@ -195,7 +195,7 @@ def apply_update():
def update_cleaner(zipfile, bazarr_dir, config_dir):
with ZipFile(zipfile, 'r') as archive:
file_in_zip = archive.namelist()
logging.debug('BAZARR zip file contain {} directories and files'.format(len(file_in_zip)))
logging.debug(f'BAZARR zip file contain {len(file_in_zip)} directories and files')
separator = os.path.sep
if os.path.sep == '\\':
logging.debug('BAZARR upgrade leftover cleaner is running on Windows. We\'ll fix the zip file separator '
@ -207,33 +207,33 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
logging.debug('BAZARR upgrade leftover cleaner is running on something else than Windows. The zip file '
'separator are fine.')
dir_to_ignore = ['^.' + separator,
'^bin' + separator,
'^venv' + separator,
'^WinPython' + separator,
separator + '__pycache__' + separator + '$']
dir_to_ignore = [f'^.{separator}',
f'^bin{separator}',
f'^venv{separator}',
f'^WinPython{separator}',
f'{separator}__pycache__{separator}$']
if os.path.abspath(bazarr_dir).lower() == os.path.abspath(config_dir).lower():
# for users who installed Bazarr inside the config directory (ie: `%programdata%\Bazarr` on windows)
dir_to_ignore.append('^backup' + separator)
dir_to_ignore.append('^cache' + separator)
dir_to_ignore.append('^config' + separator)
dir_to_ignore.append('^db' + separator)
dir_to_ignore.append('^log' + separator)
dir_to_ignore.append('^restore' + separator)
dir_to_ignore.append('^update' + separator)
dir_to_ignore.append(f'^backup{separator}')
dir_to_ignore.append(f'^cache{separator}')
dir_to_ignore.append(f'^config{separator}')
dir_to_ignore.append(f'^db{separator}')
dir_to_ignore.append(f'^log{separator}')
dir_to_ignore.append(f'^restore{separator}')
dir_to_ignore.append(f'^update{separator}')
elif os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower():
# when config directory is a child of Bazarr installation directory
dir_to_ignore.append('^' + os.path.relpath(config_dir, bazarr_dir) + separator)
dir_to_ignore.append(f'^{os.path.relpath(config_dir, bazarr_dir)}{separator}')
dir_to_ignore_regex_string = '(?:% s)' % '|'.join(dir_to_ignore)
logging.debug(f'BAZARR upgrade leftover cleaner will ignore directories matching this '
f'regex: {dir_to_ignore_regex_string}')
dir_to_ignore_regex = re.compile(dir_to_ignore_regex_string)
file_to_ignore = ['nssm.exe', '7za.exe', 'unins000.exe', 'unins000.dat']
logging.debug('BAZARR upgrade leftover cleaner will ignore those files: {}'.format(', '.join(file_to_ignore)))
logging.debug(f'BAZARR upgrade leftover cleaner will ignore those files: {", ".join(file_to_ignore)}')
extension_to_ignore = ['.pyc']
logging.debug('BAZARR upgrade leftover cleaner will ignore files with those extensions: '
'{}'.format(', '.join(extension_to_ignore)))
logging.debug(
f'BAZARR upgrade leftover cleaner will ignore files with those extensions: {", ".join(extension_to_ignore)}')
file_on_disk = []
folder_list = []
@ -256,14 +256,14 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
filepath = os.path.join(current_dir, file)
if not dir_to_ignore_regex.findall(filepath):
file_on_disk.append(filepath)
logging.debug('BAZARR directory contain {} files'.format(len(file_on_disk)))
logging.debug('BAZARR directory contain {} directories'.format(len(folder_list)))
logging.debug(f'BAZARR directory contain {len(file_on_disk)} files')
logging.debug(f'BAZARR directory contain {len(folder_list)} directories')
file_on_disk += folder_list
logging.debug('BAZARR directory contain {} directories and files'.format(len(file_on_disk)))
logging.debug(f'BAZARR directory contain {len(file_on_disk)} directories and files')
file_to_remove = list(set(file_on_disk) - set(file_in_zip))
logging.debug('BAZARR will delete {} directories and files'.format(len(file_to_remove)))
logging.debug('BAZARR will delete this: {}'.format(', '.join(file_to_remove)))
logging.debug(f'BAZARR will delete {len(file_to_remove)} directories and files')
logging.debug(f'BAZARR will delete this: {", ".join(file_to_remove)}')
for file in file_to_remove:
filepath = os.path.join(bazarr_dir, file)
@ -273,4 +273,4 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
else:
os.remove(filepath)
except Exception:
logging.debug('BAZARR upgrade leftover cleaner cannot delete {}'.format(filepath))
logging.debug(f'BAZARR upgrade leftover cleaner cannot delete {filepath}')

@ -3,21 +3,21 @@
import hashlib
import os
import ast
import logging
from urllib.parse import quote_plus
from subliminal.cache import region
from simpleconfigparser import simpleconfigparser, configparser, NoOptionError
from dynaconf import Dynaconf, Validator as OriginalValidator
from dynaconf.loaders.yaml_loader import write
from dynaconf.validator import ValidationError
from dynaconf.utils.functional import empty
from ipaddress import ip_address
from binascii import hexlify
from types import MappingProxyType
from .get_args import args
class SimpleConfigParser(simpleconfigparser):
def get(self, section, option, raw=False, vars=None):
try:
return configparser.get(self, section, option, raw=raw, vars=vars)
except NoOptionError:
return None
NoneType = type(None)
def base_url_slash_cleaner(uri):
@ -26,275 +26,371 @@ def base_url_slash_cleaner(uri):
return uri
defaults = {
'general': {
'ip': '0.0.0.0',
'port': '6767',
'base_url': '',
'path_mappings': '[]',
'debug': 'False',
'branch': 'master',
'auto_update': 'True',
'single_language': 'False',
'minimum_score': '90',
'use_scenename': 'True',
'use_postprocessing': 'False',
'postprocessing_cmd': '',
'postprocessing_threshold': '90',
'use_postprocessing_threshold': 'False',
'postprocessing_threshold_movie': '70',
'use_postprocessing_threshold_movie': 'False',
'use_sonarr': 'False',
'use_radarr': 'False',
'path_mappings_movie': '[]',
'serie_default_enabled': 'False',
'serie_default_profile': '',
'movie_default_enabled': 'False',
'movie_default_profile': '',
'page_size': '25',
'theme': 'auto',
'page_size_manual_search': '10',
'minimum_score_movie': '70',
'use_embedded_subs': 'True',
'embedded_subs_show_desired': 'True',
'utf8_encode': 'True',
'ignore_pgs_subs': 'False',
'ignore_vobsub_subs': 'False',
'ignore_ass_subs': 'False',
'adaptive_searching': 'True',
'adaptive_searching_delay': '3w',
'adaptive_searching_delta': '1w',
'enabled_providers': '[]',
'multithreading': 'True',
'chmod_enabled': 'False',
'chmod': '0640',
'subfolder': 'current',
'subfolder_custom': '',
'upgrade_subs': 'True',
'upgrade_frequency': '12',
'days_to_upgrade_subs': '7',
'upgrade_manual': 'True',
'anti_captcha_provider': 'None',
'wanted_search_frequency': '6',
'wanted_search_frequency_movie': '6',
'subzero_mods': '[]',
'dont_notify_manual_actions': 'False',
'hi_extension': 'hi',
'embedded_subtitles_parser': 'ffprobe',
'default_und_audio_lang': '',
'default_und_embedded_subtitles_lang': '',
'parse_embedded_audio_track': 'False',
'skip_hashing': 'False',
'language_equals': '[]',
},
'auth': {
'type': 'None',
'username': '',
'password': ''
},
'cors': {
'enabled': 'False'
},
'backup': {
'folder': os.path.join(args.config_dir, 'backup'),
'retention': '31',
'frequency': 'Weekly',
'day': '6',
'hour': '3'
},
'sonarr': {
'ip': '127.0.0.1',
'port': '8989',
'base_url': '/',
'ssl': 'False',
'http_timeout': '60',
'apikey': '',
'full_update': 'Daily',
'full_update_day': '6',
'full_update_hour': '4',
'only_monitored': 'False',
'series_sync': '60',
'episodes_sync': '60',
'excluded_tags': '[]',
'excluded_series_types': '[]',
'use_ffprobe_cache': 'True',
'exclude_season_zero': 'False',
'defer_search_signalr': 'False'
},
'radarr': {
'ip': '127.0.0.1',
'port': '7878',
'base_url': '/',
'ssl': 'False',
'http_timeout': '60',
'apikey': '',
'full_update': 'Daily',
'full_update_day': '6',
'full_update_hour': '5',
'only_monitored': 'False',
'movies_sync': '60',
'excluded_tags': '[]',
'use_ffprobe_cache': 'True',
'defer_search_signalr': 'False'
},
'proxy': {
'type': 'None',
'url': '',
'port': '',
'username': '',
'password': '',
'exclude': '["localhost","127.0.0.1"]'
},
'opensubtitles': {
'username': '',
'password': '',
'use_tag_search': 'False',
'vip': 'False',
'ssl': 'False',
'timeout': '15',
'skip_wrong_fps': 'False'
},
'opensubtitlescom': {
'username': '',
'password': '',
'use_hash': 'True'
},
'addic7ed': {
'username': '',
'password': '',
'cookies': '',
'user_agent': '',
'vip': 'False'
},
'podnapisi': {
'verify_ssl': 'True'
},
'subf2m': {
'verify_ssl': 'True',
'user_agent': ''
},
'whisperai': {
'endpoint': 'http://127.0.0.1:9000',
'timeout': '3600'
},
'legendasdivx': {
'username': '',
'password': '',
'skip_wrong_fps': 'False'
},
'ktuvit': {
'email': '',
'hashed_password': ''
},
'xsubs': {
'username': '',
'password': ''
},
'assrt': {
'token': ''
},
'anticaptcha': {
'anti_captcha_key': ''
},
'deathbycaptcha': {
'username': '',
'password': ''
},
'napisy24': {
'username': '',
'password': ''
},
'subscene': {
'username': '',
'password': ''
},
'betaseries': {
'token': ''
},
'analytics': {
'enabled': 'True'
},
'titlovi': {
'username': '',
'password': ''
},
'titulky': {
'username': '',
'password': '',
'approved_only': 'False'
},
'embeddedsubtitles': {
'included_codecs': '[]',
'hi_fallback': 'False',
'timeout': '600',
'unknown_as_english': 'False',
},
'hdbits': {
'username': '',
'passkey': '',
},
'karagarga': {
'username': '',
'password': '',
'f_username': '',
'f_password': '',
},
'subsync': {
'use_subsync': 'False',
'use_subsync_threshold': 'False',
'subsync_threshold': '90',
'use_subsync_movie_threshold': 'False',
'subsync_movie_threshold': '70',
'debug': 'False',
'force_audio': 'False'
},
'series_scores': {
"hash": 359,
"series": 180,
"year": 90,
"season": 30,
"episode": 30,
"release_group": 14,
"source": 7,
"audio_codec": 3,
"resolution": 2,
"video_codec": 2,
"streaming_service": 1,
"hearing_impaired": 1,
},
'movie_scores': {
"hash": 119,
"title": 60,
"year": 30,
"release_group": 13,
"source": 7,
"audio_codec": 3,
"resolution": 2,
"video_codec": 2,
"streaming_service": 1,
"edition": 1,
"hearing_impaired": 1,
},
'postgresql': {
'enabled': 'False',
'host': 'localhost',
'port': '5432',
'database': '',
'username': '',
'password': '',
},
}
settings = SimpleConfigParser(defaults=defaults, interpolation=None)
settings.read(os.path.join(args.config_dir, 'config', 'config.ini'))
settings.general.base_url = settings.general.base_url if settings.general.base_url else '/'
def validate_ip_address(ip_string):
try:
ip_address(ip_string)
return True
except ValueError:
return False
class Validator(OriginalValidator):
# Give the ability to personalize messages sent by the original dynasync Validator class.
default_messages = MappingProxyType(
{
"must_exist_true": "{name} is required",
"must_exist_false": "{name} cannot exists",
"condition": "{name} invalid for {function}({value})",
"operations": "{name} must {operation} {op_value} but it is {value}",
"combined": "combined validators failed {errors}",
}
)
validators = [
# general section
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
is_type_of=str),
Validator('general.ip', must_exist=True, default='0.0.0.0', is_type_of=str, condition=validate_ip_address),
Validator('general.port', must_exist=True, default=6767, is_type_of=int, gte=1, lte=65535),
Validator('general.base_url', must_exist=True, default='', is_type_of=str),
Validator('general.path_mappings', must_exist=True, default=[], is_type_of=list),
Validator('general.debug', must_exist=True, default=False, is_type_of=bool),
Validator('general.branch', must_exist=True, default='master', is_type_of=str,
is_in=['master', 'development']),
Validator('general.auto_update', must_exist=True, default=True, is_type_of=bool),
Validator('general.single_language', must_exist=True, default=False, is_type_of=bool),
Validator('general.minimum_score', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_scenename', must_exist=True, default=True, is_type_of=bool),
Validator('general.use_postprocessing', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_cmd', must_exist=True, default='', is_type_of=str),
Validator('general.postprocessing_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_postprocessing_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_threshold_movie', must_exist=True, default=70, is_type_of=int, gte=0,
lte=100),
Validator('general.use_postprocessing_threshold_movie', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_sonarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_radarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.path_mappings_movie', must_exist=True, default=[], is_type_of=list),
Validator('general.serie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.serie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.movie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.movie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.page_size', must_exist=True, default=25, is_type_of=int,
is_in=[25, 50, 100, 250, 500, 1000]),
Validator('general.theme', must_exist=True, default='auto', is_type_of=str,
is_in=['auto', 'light', 'dark']),
Validator('general.minimum_score_movie', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('general.use_embedded_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.embedded_subs_show_desired', must_exist=True, default=True, is_type_of=bool),
Validator('general.utf8_encode', must_exist=True, default=True, is_type_of=bool),
Validator('general.ignore_pgs_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_vobsub_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_ass_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.adaptive_searching', must_exist=True, default=True, is_type_of=bool),
Validator('general.adaptive_searching_delay', must_exist=True, default='3w', is_type_of=str,
is_in=['1w', '2w', '3w', '4w']),
Validator('general.adaptive_searching_delta', must_exist=True, default='1w', is_type_of=str,
is_in=['3d', '1w', '2w', '3w', '4w']),
Validator('general.enabled_providers', must_exist=True, default=[], is_type_of=list),
Validator('general.multithreading', must_exist=True, default=True, is_type_of=bool),
Validator('general.chmod_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.chmod', must_exist=True, default='0640', is_type_of=str),
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
Validator('general.anti_captcha_provider', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'anti-captcha', 'death-by-captcha']),
Validator('general.wanted_search_frequency', must_exist=True, default=6, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.wanted_search_frequency_movie', must_exist=True, default=6, is_type_of=int,
is_in=[6, 12, 24]),
Validator('general.subzero_mods', must_exist=True, default='', is_type_of=str),
Validator('general.dont_notify_manual_actions', must_exist=True, default=False, is_type_of=bool),
Validator('general.hi_extension', must_exist=True, default='hi', is_type_of=str, is_in=['hi', 'cc', 'sdh']),
Validator('general.embedded_subtitles_parser', must_exist=True, default='ffprobe', is_type_of=str,
is_in=['ffprobe', 'mediainfo']),
Validator('general.default_und_audio_lang', must_exist=True, default='', is_type_of=str),
Validator('general.default_und_embedded_subtitles_lang', must_exist=True, default='', is_type_of=str),
Validator('general.parse_embedded_audio_track', must_exist=True, default=False, is_type_of=bool),
Validator('general.skip_hashing', must_exist=True, default=False, is_type_of=bool),
Validator('general.language_equals', must_exist=True, default=[], is_type_of=list),
# auth section
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'basic', 'form']),
Validator('auth.username', must_exist=True, default='', is_type_of=str),
Validator('auth.password', must_exist=True, default='', is_type_of=str),
# cors section
Validator('cors.enabled', must_exist=True, default=False, is_type_of=bool),
# backup section
Validator('backup.folder', must_exist=True, default=os.path.join(args.config_dir, 'backup'),
is_type_of=str),
Validator('backup.retention', must_exist=True, default=31, is_type_of=int, gte=0),
Validator('backup.frequency', must_exist=True, default='Weekly', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('backup.day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('backup.hour', must_exist=True, default=3, is_type_of=int, gte=0, lte=23),
# sonarr section
Validator('sonarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('sonarr.port', must_exist=True, default=8989, is_type_of=int, gte=1, lte=65535),
Validator('sonarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('sonarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('sonarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('sonarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('sonarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('sonarr.episodes_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
# radarr section
Validator('radarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('radarr.port', must_exist=True, default=7878, is_type_of=int, gte=1, lte=65535),
Validator('radarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('radarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('radarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('radarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('radarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
# proxy section
Validator('proxy.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'socks5', 'http']),
Validator('proxy.url', must_exist=True, default='', is_type_of=str),
Validator('proxy.port', must_exist=True, default='', is_type_of=(str, int)),
Validator('proxy.username', must_exist=True, default='', is_type_of=str),
Validator('proxy.password', must_exist=True, default='', is_type_of=str),
Validator('proxy.exclude', must_exist=True, default=["localhost", "127.0.0.1"], is_type_of=list),
# opensubtitles.org section
Validator('opensubtitles.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.use_tag_search', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.vip', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.timeout', must_exist=True, default=15, is_type_of=int, gte=1),
Validator('opensubtitles.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# opensubtitles.com section
Validator('opensubtitlescom.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.use_hash', must_exist=True, default=True, is_type_of=bool),
# addic7ed section
Validator('addic7ed.username', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.password', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.cookies', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
# podnapisi section
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
# subf2m section
Validator('subf2m.verify_ssl', must_exist=True, default=True, is_type_of=bool),
Validator('subf2m.user_agent', must_exist=True, default='', is_type_of=str),
# hdbits section
Validator('hdbits.username', must_exist=True, default='', is_type_of=str),
Validator('hdbits.passkey', must_exist=True, default='', is_type_of=str),
# whisperai section
Validator('whisperai.endpoint', must_exist=True, default='http://127.0.0.1:9000', is_type_of=str),
Validator('whisperai.timeout', must_exist=True, default=3600, is_type_of=int, gte=1),
# legendasdivx section
Validator('legendasdivx.username', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# ktuvit section
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str),
# xsubs section
Validator('xsubs.username', must_exist=True, default='', is_type_of=str),
Validator('xsubs.password', must_exist=True, default='', is_type_of=str),
# assrt section
Validator('assrt.token', must_exist=True, default='', is_type_of=str),
# anticaptcha section
Validator('anticaptcha.anti_captcha_key', must_exist=True, default='', is_type_of=str),
# deathbycaptcha section
Validator('deathbycaptcha.username', must_exist=True, default='', is_type_of=str),
Validator('deathbycaptcha.password', must_exist=True, default='', is_type_of=str),
# napisy24 section
Validator('napisy24.username', must_exist=True, default='', is_type_of=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str),
# subscene section
Validator('subscene.username', must_exist=True, default='', is_type_of=str),
Validator('subscene.password', must_exist=True, default='', is_type_of=str),
# betaseries section
Validator('betaseries.token', must_exist=True, default='', is_type_of=str),
# analytics section
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
# titlovi section
Validator('titlovi.username', must_exist=True, default='', is_type_of=str),
Validator('titlovi.password', must_exist=True, default='', is_type_of=str),
# titulky section
Validator('titulky.username', must_exist=True, default='', is_type_of=str),
Validator('titulky.password', must_exist=True, default='', is_type_of=str),
Validator('titulky.approved_only', must_exist=True, default=False, is_type_of=bool),
# embeddedsubtitles section
Validator('embeddedsubtitles.included_codecs', must_exist=True, default=[], is_type_of=list),
Validator('embeddedsubtitles.hi_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.timeout', must_exist=True, default=600, is_type_of=int, gte=1),
Validator('embeddedsubtitles.unknown_as_english', must_exist=True, default=False, is_type_of=bool),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.password', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str),
# subsync section
Validator('subsync.use_subsync', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.use_subsync_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('subsync.use_subsync_movie_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_movie_threshold', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('subsync.debug', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.force_audio', must_exist=True, default=False, is_type_of=bool),
# series_scores section
Validator('series_scores.hash', must_exist=True, default=359, is_type_of=int),
Validator('series_scores.series', must_exist=True, default=180, is_type_of=int),
Validator('series_scores.year', must_exist=True, default=90, is_type_of=int),
Validator('series_scores.season', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.episode', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.release_group', must_exist=True, default=14, is_type_of=int),
Validator('series_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('series_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('series_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('series_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# movie_scores section
Validator('movie_scores.hash', must_exist=True, default=119, is_type_of=int),
Validator('movie_scores.title', must_exist=True, default=60, is_type_of=int),
Validator('movie_scores.year', must_exist=True, default=30, is_type_of=int),
Validator('movie_scores.release_group', must_exist=True, default=13, is_type_of=int),
Validator('movie_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('movie_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('movie_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.edition', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# postgresql section
Validator('postgresql.enabled', must_exist=True, default=False, is_type_of=bool),
Validator('postgresql.host', must_exist=True, default='localhost', is_type_of=str),
Validator('postgresql.port', must_exist=True, default=5432, is_type_of=int, gte=1, lte=65535),
Validator('postgresql.database', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str),
]
def convert_ini_to_yaml(config_file):
import configparser
import yaml
config_object = configparser.RawConfigParser()
file = open(config_file, "r")
config_object.read_file(file)
output_dict = dict()
sections = config_object.sections()
for section in sections:
items = config_object.items(section)
output_dict[section] = dict()
for item in items:
try:
output_dict[section].update({item[0]: ast.literal_eval(item[1])})
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
output_dict[section].update({item[0]: item[1]})
with open(os.path.join(os.path.dirname(config_file), 'config.yaml'), 'w') as file:
yaml.dump(output_dict, file)
os.replace(config_file, f'{config_file}.old')
config_yaml_file = os.path.join(args.config_dir, 'config', 'config.yaml')
config_ini_file = os.path.join(args.config_dir, 'config', 'config.ini')
if os.path.exists(config_ini_file) and not os.path.exists(config_yaml_file):
convert_ini_to_yaml(config_ini_file)
elif not os.path.exists(config_yaml_file):
if not os.path.isdir(os.path.dirname(config_yaml_file)):
os.makedirs(os.path.dirname(config_yaml_file))
open(config_yaml_file, mode='w').close()
settings = Dynaconf(
settings_file=config_yaml_file,
core_loaders=['YAML'],
apply_default_on_none=True,
)
settings.validators.register(*validators)
failed_validator = True
while failed_validator:
try:
settings.validators.validate_all()
failed_validator = False
except ValidationError as e:
current_validator_details = e.details[0][0]
if hasattr(current_validator_details, 'default') and current_validator_details.default is not empty:
settings[current_validator_details.names[0]] = current_validator_details.default
else:
logging.critical(f"Value for {current_validator_details.names[0]} doesn't pass validation and there's no "
f"default value. This issue must be reported. Bazarr won't works until it's been fixed.")
os._exit(0)
def write_config():
write(settings_path=config_yaml_file,
settings_data={k.lower(): v for k, v in settings.as_dict().items()},
merge=False)
base_url = settings.general.base_url.rstrip('/')
ignore_keys = ['flask_secret_key']
raw_keys = ['movie_default_forced', 'serie_default_forced']
array_keys = ['excluded_tags',
'exclude',
'included_codecs',
@ -305,79 +401,50 @@ array_keys = ['excluded_tags',
'path_mappings_movie',
'language_equals']
str_keys = ['chmod']
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if int(settings.sonarr.series_sync) < 15:
settings.sonarr.series_sync = "60"
if int(settings.sonarr.episodes_sync) < 15:
settings.sonarr.episodes_sync = "60"
if int(settings.radarr.movies_sync) < 15:
settings.radarr.movies_sync = "60"
if settings.sonarr.series_sync < 15:
settings.sonarr.series_sync = 60
if settings.sonarr.episodes_sync < 15:
settings.sonarr.episodes_sync = 60
if settings.radarr.movies_sync < 15:
settings.radarr.movies_sync = 60
# Make sure to get of double slashes in base_url
settings.general.base_url = base_url_slash_cleaner(uri=settings.general.base_url)
settings.sonarr.base_url = base_url_slash_cleaner(uri=settings.sonarr.base_url)
settings.radarr.base_url = base_url_slash_cleaner(uri=settings.radarr.base_url)
# fixing issue with improper page_size value
if settings.general.page_size not in ['25', '50', '100', '250', '500', '1000']:
settings.general.page_size = defaults['general']['page_size']
# increase delay between searches to reduce impact on providers
if settings.general.wanted_search_frequency == '3':
settings.general.wanted_search_frequency = '6'
if settings.general.wanted_search_frequency_movie == '3':
settings.general.wanted_search_frequency_movie = '6'
if settings.general.wanted_search_frequency == 3:
settings.general.wanted_search_frequency = 6
if settings.general.wanted_search_frequency_movie == 3:
settings.general.wanted_search_frequency_movie = 6
# save updated settings to file
if os.path.exists(os.path.join(args.config_dir, 'config', 'config.ini')):
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
def get_settings():
result = dict()
sections = settings.sections()
for sec in sections:
sec_values = settings.items(sec, False)
values_dict = dict()
for sec_val in sec_values:
key = sec_val[0]
value = sec_val[1]
if key in ignore_keys:
continue
if key not in raw_keys:
# Do some postprocessings
if value in empty_values:
if key in array_keys:
value = []
else:
continue
elif key in array_keys:
value = get_array_from(value)
elif value == 'True':
value = True
elif value == 'False':
value = False
# return {k.lower(): v for k, v in settings.as_dict().items()}
settings_to_return = {}
for k, v in settings.as_dict().items():
if isinstance(v, dict):
k = k.lower()
settings_to_return[k] = dict()
for subk, subv in v.items():
if subk.lower() in ignore_keys:
continue
if subv in empty_values and subk.lower() in array_keys:
settings_to_return[k].update({subk: []})
elif subk == 'subzero_mods':
settings_to_return[k].update({subk: get_array_from(subv)})
else:
if key not in str_keys:
try:
value = int(value)
except ValueError:
pass
values_dict[key] = value
result[sec] = values_dict
return result
settings_to_return[k].update({subk: subv})
return settings_to_return
def save_settings(settings_items):
@ -408,24 +475,31 @@ def save_settings(settings_items):
settings_keys = key.split('-')
# Make sure that text based form values aren't pass as list
# Make sure that text based form values aren't passed as list
if isinstance(value, list) and len(value) == 1 and settings_keys[-1] not in array_keys:
value = value[0]
if value in empty_values and value != '':
value = None
# try to cast string as integer
if isinstance(value, str) and settings_keys[-1] not in str_keys:
try:
value = int(value)
except ValueError:
pass
# Make sure empty language list are stored correctly
if settings_keys[-1] in array_keys and value[0] in empty_values:
value = []
# Handle path mappings settings since they are array in array
if settings_keys[-1] in ['path_mappings', 'path_mappings_movie']:
value = [v.split(',') for v in value]
value = [x.split(',') for x in value if isinstance(x, str)]
if value == 'true':
value = 'True'
value = True
elif value == 'false':
value = 'False'
value = False
if key in ['settings-general-use_embedded_subs', 'settings-general-ignore_pgs_subs',
'settings-general-ignore_vobsub_subs', 'settings-general-ignore_ass_subs']:
@ -553,14 +627,13 @@ def save_settings(settings_items):
reset_throttled_providers(only_auth_or_conf_error=True)
if settings_keys[0] == 'settings':
settings[settings_keys[1]][settings_keys[2]] = str(value)
settings[settings_keys[1]][settings_keys[2]] = value
if settings_keys[0] == 'subzero':
mod = settings_keys[1]
enabled = value == 'True'
if mod in subzero_mods and not enabled:
if mod in subzero_mods and not value:
subzero_mods.remove(mod)
elif enabled:
elif value:
subzero_mods.append(mod)
# Handle color
@ -581,77 +654,82 @@ def save_settings(settings_items):
from .scheduler import scheduler
from subtitles.indexer.series import list_missing_subtitles
from subtitles.indexer.movies import list_missing_subtitles_movies
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
if undefined_subtitles_track_default_changed:
from .scheduler import scheduler
from subtitles.indexer.series import series_full_scan_subtitles
from subtitles.indexer.movies import movies_full_scan_subtitles
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(series_full_scan_subtitles, kwargs={'use_cache': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(movies_full_scan_subtitles, kwargs={'use_cache': True})
if audio_tracks_parsing_changed:
from .scheduler import scheduler
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
from sonarr.sync.series import update_series
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
from radarr.sync.movies import update_movies
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)
if update_subzero:
settings.set('general', 'subzero_mods', ','.join(subzero_mods))
settings.general.subzero_mods = ','.join(subzero_mods)
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
# Reconfigure Bazarr to reflect changes
if configure_debug:
from .logger import configure_logging
configure_logging(settings.general.getboolean('debug') or args.debug)
if configure_captcha:
configure_captcha_func()
if update_schedule:
from .scheduler import scheduler
from .event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if sonarr_changed:
from .signalr_client import sonarr_signalr_client
try:
sonarr_signalr_client.restart()
except Exception:
pass
if radarr_changed:
from .signalr_client import radarr_signalr_client
try:
radarr_signalr_client.restart()
except Exception:
pass
if update_path_map:
from utilities.path_mappings import path_mappings
path_mappings.update()
if configure_proxy:
configure_proxy_func()
if exclusion_updated:
from .event_handler import event_stream
event_stream(type='badges')
if sonarr_exclusion_updated:
event_stream(type='reset-episode-wanted')
if radarr_exclusion_updated:
event_stream(type='reset-movie-wanted')
try:
settings.validators.validate()
except ValidationError:
settings.reload()
raise
else:
write_config()
# Reconfigure Bazarr to reflect changes
if configure_debug:
from .logger import configure_logging
configure_logging(settings.general.debug or args.debug)
if configure_captcha:
configure_captcha_func()
if update_schedule:
from .scheduler import scheduler
from .event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if sonarr_changed:
from .signalr_client import sonarr_signalr_client
try:
sonarr_signalr_client.restart()
except Exception:
pass
if radarr_changed:
from .signalr_client import radarr_signalr_client
try:
radarr_signalr_client.restart()
except Exception:
pass
if update_path_map:
from utilities.path_mappings import path_mappings
path_mappings.update()
if configure_proxy:
configure_proxy_func()
if exclusion_updated:
from .event_handler import event_stream
event_stream(type='badges')
if sonarr_exclusion_updated:
event_stream(type='reset-episode-wanted')
if radarr_exclusion_updated:
event_stream(type='reset-movie-wanted')
def get_array_from(property):
@ -681,15 +759,15 @@ def configure_captcha_func():
def configure_proxy_func():
if settings.proxy.type != 'None':
if settings.proxy.type:
if settings.proxy.username != '' and settings.proxy.password != '':
proxy = settings.proxy.type + '://' + quote_plus(settings.proxy.username) + ':' + \
quote_plus(settings.proxy.password) + '@' + settings.proxy.url + ':' + settings.proxy.port
proxy = (f'{settings.proxy.type}://{quote_plus(settings.proxy.username)}:'
f'{quote_plus(settings.proxy.password)}@{settings.proxy.url}:{settings.proxy.port}')
else:
proxy = settings.proxy.type + '://' + settings.proxy.url + ':' + settings.proxy.port
proxy = f'{settings.proxy.type}://{settings.proxy.url}:{settings.proxy.port}'
os.environ['HTTP_PROXY'] = str(proxy)
os.environ['HTTPS_PROXY'] = str(proxy)
exclude = ','.join(get_array_from(settings.proxy.exclude))
exclude = ','.join(settings.proxy.exclude)
os.environ['NO_PROXY'] = exclude

@ -18,12 +18,16 @@ from sqlalchemy.pool import NullPool
from flask_sqlalchemy import SQLAlchemy
from .config import settings, get_array_from
from .config import settings
from .get_args import args
logger = logging.getLogger(__name__)
postgresql = (os.getenv("POSTGRES_ENABLED", settings.postgresql.enabled).lower() == 'true')
POSTGRES_ENABLED_ENV = os.getenv("POSTGRES_ENABLED")
if POSTGRES_ENABLED_ENV:
postgresql = POSTGRES_ENABLED_ENV.lower() == 'true'
else:
postgresql = settings.postgresql.enabled
region = make_region().configure('dogpile.cache.memory')
@ -324,30 +328,30 @@ def migrate_db(app):
def get_exclusion_clause(exclusion_type):
where_clause = []
if exclusion_type == 'series':
tagsList = ast.literal_eval(settings.sonarr.excluded_tags)
tagsList = settings.sonarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableShows.tags.contains("\'" + tag + "\'")))
where_clause.append(~(TableShows.tags.contains(f"\'{tag}\'")))
else:
tagsList = ast.literal_eval(settings.radarr.excluded_tags)
tagsList = settings.radarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableMovies.tags.contains("\'" + tag + "\'")))
where_clause.append(~(TableMovies.tags.contains(f"\'{tag}\'")))
if exclusion_type == 'series':
monitoredOnly = settings.sonarr.getboolean('only_monitored')
monitoredOnly = settings.sonarr.only_monitored
if monitoredOnly:
where_clause.append((TableEpisodes.monitored == 'True')) # noqa E712
where_clause.append((TableShows.monitored == 'True')) # noqa E712
else:
monitoredOnly = settings.radarr.getboolean('only_monitored')
monitoredOnly = settings.radarr.only_monitored
if monitoredOnly:
where_clause.append((TableMovies.monitored == 'True')) # noqa E712
if exclusion_type == 'series':
typesList = get_array_from(settings.sonarr.excluded_series_types)
typesList = settings.sonarr.excluded_series_types
for item in typesList:
where_clause.append((TableShows.seriesType != item))
exclude_season_zero = settings.sonarr.getboolean('exclude_season_zero')
exclude_season_zero = settings.sonarr.exclude_season_zero
if exclude_season_zero:
where_clause.append((TableEpisodes.season != 0))

@ -1,6 +1,5 @@
# coding=utf-8
import ast
import os
import datetime
import pytz
@ -13,15 +12,17 @@ import requests
import traceback
import re
from requests import ConnectionError
from subzero.language import Language
from subliminal_patch.exceptions import TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked, \
MustGetBlacklisted, SearchLimitReached
from subliminal.providers.opensubtitles import DownloadLimitReached
from subliminal.providers.opensubtitles import DownloadLimitReached, PaymentRequired, Unauthorized
from subliminal.exceptions import DownloadLimitExceeded, ServiceUnavailable, AuthenticationError, ConfigurationError
from subliminal import region as subliminal_cache_region
from subliminal_patch.extensions import provider_registry
from app.get_args import args
from app.config import settings, get_array_from
from app.config import settings
from languages.get_languages import CustomLanguage
from app.event_handler import event_stream
from utilities.binaries import get_binary
@ -74,17 +75,21 @@ def provider_throttle_map():
socket.timeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ConnectTimeout: (datetime.timedelta(hours=1), "1 hour"),
requests.exceptions.ReadTimeout: (datetime.timedelta(hours=1), "1 hour"),
ConfigurationError: (datetime.timedelta(hours=12), "12 hours"),
PermissionError: (datetime.timedelta(hours=12), "12 hours"),
requests.exceptions.ProxyError: (datetime.timedelta(hours=1), "1 hour"),
AuthenticationError: (datetime.timedelta(hours=12), "12 hours"),
},
"opensubtitles": {
TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"),
DownloadLimitReached: (datetime.timedelta(hours=6), "6 hours"),
PaymentRequired: (datetime.timedelta(hours=12), "12 hours"),
Unauthorized: (datetime.timedelta(hours=12), "12 hours"),
APIThrottled: (datetime.timedelta(seconds=15), "15 seconds"),
ServiceUnavailable: (datetime.timedelta(hours=1), "1 hour"),
},
"opensubtitlescom": {
AuthenticationError: (datetime.timedelta(hours=12), "12 hours"),
ConfigurationError: (datetime.timedelta(hours=12), "12 hours"),
TooManyRequests: (datetime.timedelta(minutes=1), "1 minute"),
DownloadLimitExceeded: (datetime.timedelta(hours=24), "24 hours"),
},
@ -110,9 +115,6 @@ def provider_throttle_map():
legendasdivx_limit_reset_timedelta(),
f"{legendasdivx_limit_reset_timedelta().seconds // 3600 + 1} hours"),
},
"subf2m": {
ConfigurationError: (datetime.timedelta(hours=24), "24 hours"),
},
"whisperai": {
ConnectionError: (datetime.timedelta(hours=24), "24 hours"),
},
@ -126,7 +128,7 @@ throttle_count = {}
def provider_pool():
if settings.general.getboolean('multithreading'):
if settings.general.multithreading:
return subliminal_patch.core.SZAsyncProviderPool
return subliminal_patch.core.SZProviderPool
@ -157,7 +159,7 @@ def _lang_from_str(content: str):
def get_language_equals(settings_=None):
settings_ = settings_ or settings
equals = get_array_from(settings_.general.language_equals)
equals = settings_.general.language_equals
if not equals:
return []
@ -177,7 +179,7 @@ def get_language_equals(settings_=None):
def get_providers():
providers_list = []
existing_providers = provider_registry.names()
providers = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
providers_list.append(provider)
@ -205,9 +207,9 @@ def get_providers():
def get_enabled_providers():
# return enabled provider including those who can be throttled
try:
return ast.literal_eval(settings.general.enabled_providers)
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
if isinstance(settings.general.enabled_providers, list):
return settings.general.enabled_providers
else:
return []
@ -222,32 +224,28 @@ def get_providers_auth():
'password': settings.addic7ed.password,
'cookies': settings.addic7ed.cookies,
'user_agent': settings.addic7ed.user_agent,
'is_vip': settings.addic7ed.getboolean('vip'),
'is_vip': settings.addic7ed.vip,
},
'opensubtitles': {
'username': settings.opensubtitles.username,
'password': settings.opensubtitles.password,
'use_tag_search': settings.opensubtitles.getboolean(
'use_tag_search'
),
'use_tag_search': settings.opensubtitles.use_tag_search,
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'is_vip': settings.opensubtitles.getboolean('vip'),
'use_ssl': settings.opensubtitles.getboolean('ssl'),
'is_vip': settings.opensubtitles.vip,
'use_ssl': settings.opensubtitles.ssl,
'timeout': int(settings.opensubtitles.timeout) or 15,
'skip_wrong_fps': settings.opensubtitles.getboolean(
'skip_wrong_fps'
),
'skip_wrong_fps': settings.opensubtitles.skip_wrong_fps,
},
'opensubtitlescom': {'username': settings.opensubtitlescom.username,
'password': settings.opensubtitlescom.password,
'use_hash': settings.opensubtitlescom.getboolean('use_hash'),
'use_hash': settings.opensubtitlescom.use_hash,
'api_key': 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1'
},
'podnapisi': {
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'verify_ssl': settings.podnapisi.getboolean('verify_ssl')
'verify_ssl': settings.podnapisi.verify_ssl
},
'subscene': {
'username': settings.subscene.username,
@ -257,9 +255,7 @@ def get_providers_auth():
'legendasdivx': {
'username': settings.legendasdivx.username,
'password': settings.legendasdivx.password,
'skip_wrong_fps': settings.legendasdivx.getboolean(
'skip_wrong_fps'
),
'skip_wrong_fps': settings.legendasdivx.skip_wrong_fps,
},
'xsubs': {
'username': settings.xsubs.username,
@ -276,7 +272,7 @@ def get_providers_auth():
'titulky': {
'username': settings.titulky.username,
'password': settings.titulky.password,
'approved_only': settings.titulky.getboolean('approved_only'),
'approved_only': settings.titulky.approved_only,
},
'titlovi': {
'username': settings.titlovi.username,
@ -287,13 +283,13 @@ def get_providers_auth():
'hashed_password': settings.ktuvit.hashed_password,
},
'embeddedsubtitles': {
'included_codecs': get_array_from(settings.embeddedsubtitles.included_codecs),
'hi_fallback': settings.embeddedsubtitles.getboolean('hi_fallback'),
'included_codecs': settings.embeddedsubtitles.included_codecs,
'hi_fallback': settings.embeddedsubtitles.hi_fallback,
'cache_dir': os.path.join(args.config_dir, "cache"),
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_english': settings.embeddedsubtitles.getboolean('unknown_as_english'),
'unknown_as_english': settings.embeddedsubtitles.unknown_as_english,
},
'karagarga': {
'username': settings.karagarga.username,
@ -306,7 +302,7 @@ def get_providers_auth():
'passkey': settings.hdbits.passkey,
},
'subf2m': {
'verify_ssl': settings.subf2m.getboolean('verify_ssl'),
'verify_ssl': settings.subf2m.verify_ssl,
'user_agent': settings.subf2m.user_agent,
},
'whisperai': {
@ -317,18 +313,25 @@ def get_providers_auth():
}
def _handle_mgb(name, exception):
# There's no way to get Radarr/Sonarr IDs from subliminal_patch. Blacklisted subtitles
# will not appear on fronted but they will work with get_blacklist
if exception.media_type == "series":
blacklist_log("", "", name, exception.id, "")
def _handle_mgb(name, exception, ids, language):
if language.forced:
language_str = f'{language.basename}:forced'
elif language.hi:
language_str = f'{language.basename}:hi'
else:
blacklist_log_movie("", name, exception.id, "")
language_str = language.basename
if ids:
if exception.media_type == "series":
if 'sonarrSeriesId' in ids and 'sonarrEpsiodeId' in ids:
blacklist_log(ids['sonarrSeriesId'], ids['sonarrEpisodeId'], name, exception.id, language_str)
else:
blacklist_log_movie(ids['radarrId'], name, exception.id, language_str)
def provider_throttle(name, exception):
if isinstance(exception, MustGetBlacklisted):
return _handle_mgb(name, exception)
def provider_throttle(name, exception, ids=None, language=None):
if isinstance(exception, MustGetBlacklisted) and isinstance(ids, dict) and isinstance(language, Language):
return _handle_mgb(name, exception, ids, language)
cls = getattr(exception, "__class__")
cls_name = getattr(cls, "__name__")
@ -414,7 +417,7 @@ def throttled_count(name):
def update_throttled_provider():
existing_providers = provider_registry.names()
providers_list = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers_list = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in list(tp):
if provider not in providers_list:
@ -448,7 +451,7 @@ def list_throttled_providers():
update_throttled_provider()
throttled_providers = []
existing_providers = provider_registry.names()
providers = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
throttled_providers.append([provider, reason, pretty.date(until)])
@ -457,13 +460,15 @@ def list_throttled_providers():
def reset_throttled_providers(only_auth_or_conf_error=False):
for provider in list(tp):
if only_auth_or_conf_error and tp[provider][0] not in ['AuthenticationError', 'ConfigurationError']:
if only_auth_or_conf_error and tp[provider][0] not in ['AuthenticationError', 'ConfigurationError',
'PaymentRequired']:
continue
del tp[provider]
set_throttled_providers(str(tp))
update_throttled_provider()
if only_auth_or_conf_error:
logging.info('BAZARR throttled providers have been reset (only AuthenticationError and ConfigurationError).')
logging.info('BAZARR throttled providers have been reset (only AuthenticationError, ConfigurationError and '
'PaymentRequired).')
else:
logging.info('BAZARR throttled providers have been reset.')

@ -160,7 +160,7 @@ class PatchedTimedRotatingFileHandler(TimedRotatingFileHandler):
result = []
# See bpo-44753: Don't use the extension when computing the prefix.
n, e = os.path.splitext(baseName)
prefix = n + '.'
prefix = f'{n}.'
plen = len(prefix)
for fileName in fileNames:
if self.namer is None:

@ -24,7 +24,7 @@ def update_notifier():
for x in results['schemas']:
if x['service_name'] not in notifiers_in_db:
notifiers_added.append({'name': str(x['service_name']), 'enabled': 0})
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
logging.debug(f'Adding new notifier agent: {x["service_name"]}')
else:
notifiers_kept.append(x['service_name'])
@ -60,7 +60,7 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
series_title = series.title
series_year = series.year
if series_year not in [None, '', '0']:
series_year = ' ({})'.format(series_year)
series_year = f' ({series_year})'
else:
series_year = ''
episode = database.execute(
@ -80,8 +80,7 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
apobj.notify(
title='Bazarr notification',
body="{}{} - S{:02d}E{:02d} - {} : {}".format(series_title, series_year, episode.season, episode.episode,
episode.title, message),
body=f"{series_title}{series_year} - S{episode.season:02d}E{episode.episode:02d} - {episode.title} : {message}",
)
@ -98,7 +97,7 @@ def send_notifications_movie(radarr_id, message):
movie_title = movie.title
movie_year = movie.year
if movie_year not in [None, '', '0']:
movie_year = ' ({})'.format(movie_year)
movie_year = f' ({movie_year})'
else:
movie_year = ''
@ -112,5 +111,5 @@ def send_notifications_movie(radarr_id, message):
apobj.notify(
title='Bazarr notification',
body="{}{} : {}".format(movie_title, movie_year, message),
body=f"{movie_title}{movie_year} : {message}",
)

@ -127,10 +127,10 @@ class Scheduler:
if day == "*":
text = "everyday"
else:
text = "every " + day_name[int(day)]
text = f"every {day_name[int(day)]}"
if hour != "*":
text += " at " + hour + ":00"
text += f" at {hour}:00"
return text
@ -149,7 +149,7 @@ class Scheduler:
running = False
if isinstance(job.trigger, IntervalTrigger):
interval = "every " + get_time_from_interval(job.trigger.__getstate__()['interval'])
interval = f"every {get_time_from_interval(job.trigger.__getstate__()['interval'])}"
task_list.append({'name': job.name, 'interval': interval, 'next_run_in': next_run,
'next_run_time': next_run, 'job_id': job.id, 'job_running': running})
elif isinstance(job.trigger, CronTrigger):
@ -160,14 +160,14 @@ class Scheduler:
return task_list
def __sonarr_update_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
update_series, IntervalTrigger(minutes=int(settings.sonarr.series_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_series', name='Sync with Sonarr',
replace_existing=True)
def __radarr_update_task(self):
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.aps_scheduler.add_job(
update_movies, IntervalTrigger(minutes=int(settings.radarr.movies_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_movies', name='Sync with Radarr',
@ -200,7 +200,7 @@ class Scheduler:
pass
def __sonarr_full_update_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
full_update = settings.sonarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
@ -220,7 +220,7 @@ class Scheduler:
name='Index all Episode Subtitles from disk', replace_existing=True)
def __radarr_full_update_task(self):
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
full_update = settings.radarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
@ -242,7 +242,7 @@ class Scheduler:
if not args.no_update and os.environ["BAZARR_VERSION"] != '':
task_name = 'Update Bazarr'
if settings.general.getboolean('auto_update'):
if settings.general.auto_update:
self.aps_scheduler.add_job(
check_if_new_update, IntervalTrigger(hours=6), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_bazarr', name=task_name, replace_existing=True)
@ -264,13 +264,13 @@ class Scheduler:
id='update_announcements', name='Update Announcements File', replace_existing=True)
def __search_wanted_subtitles_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_series,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency)), max_instances=1, coalesce=True,
misfire_grace_time=15, id='wanted_search_missing_subtitles_series', replace_existing=True,
name='Search for wanted Series Subtitles')
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_movies,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency_movie)), max_instances=1,
@ -278,8 +278,8 @@ class Scheduler:
name='Search for wanted Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.getboolean('upgrade_subs') and \
(settings.general.getboolean('use_sonarr') or settings.general.getboolean('use_radarr')):
if settings.general.upgrade_subs and \
(settings.general.use_sonarr or settings.general.use_radarr):
self.aps_scheduler.add_job(
upgrade_subtitles, IntervalTrigger(hours=int(settings.general.upgrade_frequency)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
@ -303,9 +303,9 @@ scheduler = Scheduler()
# Force the execution of the sync process with Sonarr and Radarr after migration to v0.9.1
if 'BAZARR_AUDIO_PROFILES_MIGRATION' in os.environ:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.aps_scheduler.modify_job('update_series', next_run_time=datetime.now())
scheduler.aps_scheduler.modify_job('sync_episodes', next_run_time=datetime.now())
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.aps_scheduler.modify_job('update_movies', next_run_time=datetime.now())
del os.environ['BAZARR_AUDIO_PROFILES_MIGRATION']

@ -77,13 +77,13 @@ class Server:
try:
self.server.close()
except Exception as e:
logging.error('BAZARR Cannot stop Waitress: ' + repr(e))
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
else:
database.close()
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create stop file: ' + repr(e))
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
@ -94,13 +94,13 @@ class Server:
try:
self.server.close()
except Exception as e:
logging.error('BAZARR Cannot stop Waitress: ' + repr(e))
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
else:
database.close()
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create restart file: ' + repr(e))
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))

@ -45,8 +45,9 @@ class SonarrSignalrClientLegacy:
def start(self):
if get_sonarr_info.is_legacy():
logging.warning('BAZARR can only sync from Sonarr v3 SignalR feed to get real-time update. You should '
'consider upgrading your version({}).'.format(get_sonarr_info.version()))
logging.warning(
f'BAZARR can only sync from Sonarr v3 SignalR feed to get real-time update. You should consider '
f'upgrading your version({get_sonarr_info.version()}).')
else:
self.connected = False
event_stream(type='badges')
@ -86,7 +87,7 @@ class SonarrSignalrClientLegacy:
if self.connection:
if self.connection.started:
self.stop(log=False)
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
@ -98,7 +99,7 @@ class SonarrSignalrClientLegacy:
def configure(self):
self.apikey_sonarr = settings.sonarr.apikey
self.connection = Connection(url_sonarr() + "/signalr", self.session)
self.connection = Connection(f"{url_sonarr()}/signalr", self.session)
self.connection.qs = {'apikey': self.apikey_sonarr}
sonarr_hub = self.connection.register_hub('') # Sonarr doesn't use named hub
@ -133,7 +134,7 @@ class SonarrSignalrClient:
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
@ -158,7 +159,7 @@ class SonarrSignalrClient:
def configure(self):
self.apikey_sonarr = settings.sonarr.apikey
self.connection = HubConnectionBuilder() \
.with_url(url_sonarr() + "/signalr/messages?access_token={}".format(self.apikey_sonarr),
.with_url(f"{url_sonarr()}/signalr/messages?access_token={self.apikey_sonarr}",
options={
"verify_ssl": False,
"headers": headers
@ -200,7 +201,7 @@ class RadarrSignalrClient:
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.start()
def exception_handler(self):
@ -225,7 +226,7 @@ class RadarrSignalrClient:
def configure(self):
self.apikey_radarr = settings.radarr.apikey
self.connection = HubConnectionBuilder() \
.with_url(url_radarr() + "/signalr/messages?access_token={}".format(self.apikey_radarr),
.with_url(f"{url_radarr()}/signalr/messages?access_token={self.apikey_radarr}",
options={
"verify_ssl": False,
"headers": headers
@ -300,13 +301,13 @@ def dispatcher(data):
elif topic == 'episode':
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.getboolean('defer_search_signalr'))
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.defer_search_signalr)
elif topic == 'movie':
logging.debug(f'Event received from Radarr for movie: {movie_title} ({movie_year})')
update_one_movie(movie_id=media_id, action=action,
defer_search=settings.radarr.getboolean('defer_search_signalr'))
defer_search=settings.radarr.defer_search_signalr)
except Exception as e:
logging.debug('BAZARR an exception occurred while parsing SignalR feed: {}'.format(repr(e)))
logging.debug(f'BAZARR an exception occurred while parsing SignalR feed: {repr(e)}')
finally:
event_stream(type='badges')
return

@ -9,8 +9,8 @@ from functools import wraps
from urllib.parse import unquote
from constants import headers
from sonarr.info import get_sonarr_info, url_sonarr
from radarr.info import get_radarr_info, url_radarr
from sonarr.info import url_api_sonarr
from radarr.info import url_api_radarr
from utilities.helper import check_credentials
from .config import settings, base_url
@ -109,12 +109,7 @@ def series_images(url):
url = url.strip("/")
apikey = settings.sonarr.apikey
baseUrl = settings.sonarr.base_url
if get_sonarr_info.is_legacy():
url_image = (url_sonarr() + '/api/' + url.lstrip(baseUrl) + '?apikey=' +
apikey).replace('poster-250', 'poster-500')
else:
url_image = (url_sonarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' +
apikey).replace('poster-250', 'poster-500')
url_image = f'{url_api_sonarr()}{url.lstrip(baseUrl)}?apikey={apikey}'.replace('poster-250', 'poster-500')
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
except Exception:
@ -128,10 +123,7 @@ def series_images(url):
def movies_images(url):
apikey = settings.radarr.apikey
baseUrl = settings.radarr.base_url
if get_radarr_info.is_legacy():
url_image = url_radarr() + '/api/' + url.lstrip(baseUrl) + '?apikey=' + apikey
else:
url_image = url_radarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' + apikey
url_image = f'{url_api_radarr()}{url.lstrip(baseUrl)}?apikey={apikey}'
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
except Exception:
@ -171,7 +163,7 @@ def configured():
def proxy(protocol, url):
if protocol.lower() not in ['http', 'https']:
return dict(status=False, error='Unsupported protocol')
url = protocol + '://' + unquote(url)
url = f'{protocol}://{unquote(url)}'
params = request.args
try:
result = requests.get(url, params, allow_redirects=False, verify=False, timeout=5, headers=headers)

@ -11,7 +11,7 @@ import rarfile
from dogpile.cache.region import register_backend as register_cache_backend
from app.config import settings, configure_captcha_func, get_array_from
from app.config import settings, configure_captcha_func, write_config
from app.get_args import args
from app.logger import configure_logging
from utilities.binaries import get_binary, BinaryNotFound
@ -28,7 +28,7 @@ startTime = time.time()
restore_from_backup()
# set subliminal_patch user agent
os.environ["SZ_USER_AGENT"] = "Bazarr/{}".format(os.environ["BAZARR_VERSION"])
os.environ["SZ_USER_AGENT"] = f"Bazarr/{os.environ['BAZARR_VERSION']}"
# Check if args.config_dir exist
if not os.path.exists(args.config_dir):
@ -62,7 +62,7 @@ configure_captcha_func()
from ga4mp import GtagMP # noqa E402
# configure logging
configure_logging(settings.general.getboolean('debug') or args.debug)
configure_logging(settings.general.debug or args.debug)
import logging # noqa E402
@ -96,7 +96,7 @@ if not args.no_update:
pip_command.insert(4, '--user')
subprocess.check_output(pip_command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
logging.exception('BAZARR requirements.txt installation result: {}'.format(e.stdout))
logging.exception(f'BAZARR requirements.txt installation result: {e.stdout}')
os._exit(1)
else:
logging.info('BAZARR requirements installed.')
@ -104,37 +104,21 @@ if not args.no_update:
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create restart file: ' + repr(e))
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
os._exit(0)
# create random api_key if there's none in config.ini
if not settings.auth.apikey or settings.auth.apikey.startswith("b'"):
from binascii import hexlify
settings.auth.apikey = hexlify(os.urandom(16)).decode()
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
# create random Flask secret_key if there's none in config.ini
if not settings.general.flask_secret_key:
from binascii import hexlify
settings.general.flask_secret_key = hexlify(os.urandom(16)).decode()
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
# change default base_url to ''
settings.general.base_url = settings.general.base_url.rstrip('/')
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# migrate enabled_providers from comma separated string to list
if isinstance(settings.general.enabled_providers, str) and not settings.general.enabled_providers.startswith('['):
settings.general.enabled_providers = str(settings.general.enabled_providers.split(","))
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# Read package_info (if exists) to override some settings by package maintainers
# This file can also provide some info about the package version and author
@ -166,8 +150,7 @@ if os.path.isfile(package_info_file):
except Exception:
pass
else:
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# Configure dogpile file caching for Subliminal request
register_cache_backend("subzero.cache.file", "subzero.cache_backends.file", "SZFileBackend")
@ -186,30 +169,24 @@ if not os.path.exists(os.path.join(args.config_dir, 'config', 'announcements.txt
get_announcements_to_file()
logging.debug("BAZARR Created announcements file")
config_file = os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini'))
# Move GA visitor from config.ini to dedicated file
if settings.analytics.visitor:
# Move GA visitor from config to dedicated file
if 'visitor' in settings.analytics:
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'analytics.dat')), 'w+') as handle:
handle.write(settings.analytics.visitor)
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini')), 'w+') as handle:
settings.remove_option('analytics', 'visitor')
settings.write(handle)
settings['analytics'].pop('visitor', None)
# Clean unused settings from config.ini
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini')), 'w+') as handle:
settings.remove_option('general', 'throtteled_providers')
settings.remove_option('general', 'update_restart')
settings.write(handle)
# Clean unused settings from config
settings['general'].pop('throtteled_providers', None)
settings['general'].pop('update_restart', None)
write_config()
# Remove deprecated providers from enabled providers in config.ini
# Remove deprecated providers from enabled providers in config
from subliminal_patch.extensions import provider_registry # noqa E401
existing_providers = provider_registry.names()
enabled_providers = get_array_from(settings.general.enabled_providers)
settings.general.enabled_providers = str([x for x in enabled_providers if x in existing_providers])
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
enabled_providers = settings.general.enabled_providers
settings.general.enabled_providers = [x for x in enabled_providers if x in existing_providers]
write_config()
def init_binaries():

@ -28,7 +28,7 @@ if bazarr_version != '':
apply_update()
# Check for new update and install latest
if args.no_update or not settings.general.getboolean('auto_update'):
if args.no_update or not settings.general.auto_update:
# user have explicitly requested that we do not update or is using some kind of package/docker that prevent it
check_releases()
else:
@ -47,7 +47,7 @@ if args.create_db_revision:
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create stop file: ' + repr(e))
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
create_db_revision(app)
logging.info('Bazarr is being shutdown...')
@ -74,9 +74,9 @@ login_auth = settings.auth.type
update_notifier()
if not args.no_signalr:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
Thread(target=sonarr_signalr_client.start).start()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
Thread(target=radarr_signalr_client.start).start()

@ -4,7 +4,7 @@ import requests
import logging
from app.config import settings
from radarr.info import get_radarr_info, url_radarr
from radarr.info import url_api_radarr
from constants import headers
@ -12,16 +12,11 @@ def browse_radarr_filesystem(path='#'):
if path == '#':
path = ''
if get_radarr_info.is_legacy():
url_radarr_api_filesystem = url_radarr() + "/api/filesystem?path=" + path + \
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
settings.radarr.apikey
else:
url_radarr_api_filesystem = url_radarr() + "/api/v3/filesystem?path=" + path + \
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
settings.radarr.apikey
url_radarr_api_filesystem = (f"{url_api_radarr()}filesystem?path={path}&allowFoldersWithoutTrailingSlashes=true&"
f"includeFiles=false&apikey={settings.radarr.apikey}")
try:
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get series from Radarr. Http error.")

@ -26,25 +26,27 @@ class GetRadarrInfo:
return radarr_version
else:
radarr_version = ''
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
try:
rv = url_radarr() + "/api/system/status?apikey=" + settings.radarr.apikey
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()
rv = f"{url_radarr()}/api/system/status?apikey={settings.radarr.apikey}"
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers).json()
if 'version' in radarr_json:
radarr_version = radarr_json['version']
else:
raise json.decoder.JSONDecodeError
except json.decoder.JSONDecodeError:
try:
rv = url_radarr() + "/api/v3/system/status?apikey=" + settings.radarr.apikey
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()['version']
rv = f"{url_radarr()}/api/v3/system/status?apikey={settings.radarr.apikey}"
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers).json()['version']
except json.decoder.JSONDecodeError:
logging.debug('BAZARR cannot get Radarr version')
radarr_version = 'unknown'
except Exception:
logging.debug('BAZARR cannot get Radarr version')
radarr_version = 'unknown'
logging.debug('BAZARR got this Radarr version from its API: {}'.format(radarr_version))
logging.debug(f'BAZARR got this Radarr version from its API: {radarr_version}')
region.set("radarr_version", radarr_version)
return radarr_version
@ -75,7 +77,7 @@ get_radarr_info = GetRadarrInfo()
def url_radarr():
if settings.radarr.getboolean('ssl'):
if settings.radarr.ssl:
protocol_radarr = "https"
else:
protocol_radarr = "http"
@ -83,7 +85,7 @@ def url_radarr():
if settings.radarr.base_url == '':
settings.radarr.base_url = "/"
if not settings.radarr.base_url.startswith("/"):
settings.radarr.base_url = "/" + settings.radarr.base_url
settings.radarr.base_url = f"/{settings.radarr.base_url}"
if settings.radarr.base_url.endswith("/"):
settings.radarr.base_url = settings.radarr.base_url[:-1]
@ -93,3 +95,7 @@ def url_radarr():
port = f":{settings.radarr.port}"
return f"{protocol_radarr}://{settings.radarr.ip}{port}{settings.radarr.base_url}"
def url_api_radarr():
return url_radarr() + f'/api{"/v3" if not get_radarr_info.is_legacy() else ""}/'

@ -4,16 +4,13 @@ import logging
import requests
from app.config import settings
from radarr.info import get_radarr_info, url_radarr
from radarr.info import url_api_radarr
from constants import headers
def notify_radarr(radarr_id):
try:
if get_radarr_info.is_legacy():
url = url_radarr() + "/api/command?apikey=" + settings.radarr.apikey
else:
url = url_radarr() + "/api/v3/command?apikey=" + settings.radarr.apikey
url = f"{url_api_radarr()}command?apikey={settings.radarr.apikey}"
data = {
'name': 'RescanMovie',
'movieId': int(radarr_id)

@ -7,7 +7,7 @@ import logging
from app.config import settings
from utilities.path_mappings import path_mappings
from app.database import TableMoviesRootfolder, TableMovies, database, delete, update, insert, select
from radarr.info import get_radarr_info, url_radarr
from radarr.info import url_api_radarr
from constants import headers
@ -16,10 +16,7 @@ def get_radarr_rootfolder():
radarr_rootfolder = []
# Get root folder data from Radarr
if get_radarr_info.is_legacy():
url_radarr_api_rootfolder = url_radarr() + "/api/rootfolder?apikey=" + apikey_radarr
else:
url_radarr_api_rootfolder = url_radarr() + "/api/v3/rootfolder?apikey=" + apikey_radarr
url_radarr_api_rootfolder = f"{url_api_radarr()}rootfolder?apikey={apikey_radarr}"
try:
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)

@ -6,12 +6,11 @@ import logging
from sqlalchemy.exc import IntegrityError
from app.config import settings
from radarr.info import url_radarr
from utilities.path_mappings import path_mappings
from subtitles.indexer.movies import store_subtitles_movie, movies_full_scan_subtitles
from radarr.rootfolder import check_radarr_rootfolder
from subtitles.mass_download import movies_download_subtitles
from app.database import TableMovies, database, insert, update, delete, select
from app.database import TableMovies, TableLanguagesProfiles, database, insert, update, delete, select
from app.event_handler import event_stream, show_progress, hide_progress
from .utils import get_profile_list, get_tags, get_movies_from_radarr_api
@ -40,8 +39,7 @@ def update_movie(updated_movie, send_event):
except IntegrityError as e:
logging.error(f"BAZARR cannot update movie {updated_movie['path']} because of {e}")
else:
store_subtitles_movie(updated_movie['path'],
path_mappings.path_replace_movie(updated_movie['path']))
store_subtitles_movie(updated_movie['path'], path_mappings.path_replace_movie(updated_movie['path']))
if send_event:
event_stream(type='movie', action='update', payload=updated_movie['radarrId'])
@ -56,8 +54,7 @@ def add_movie(added_movie, send_event):
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {added_movie['path']} because of {e}")
else:
store_subtitles_movie(added_movie['path'],
path_mappings.path_replace_movie(added_movie['path']))
store_subtitles_movie(added_movie['path'], path_mappings.path_replace_movie(added_movie['path']))
if send_event:
event_stream(type='movie', action='update', payload=int(added_movie['radarrId']))
@ -68,7 +65,7 @@ def update_movies(send_event=True):
logging.debug('BAZARR Starting movie sync from Radarr.')
apikey_radarr = settings.radarr.apikey
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
movie_default_enabled = settings.general.movie_default_enabled
if movie_default_enabled is True:
movie_default_profile = settings.general.movie_default_profile
@ -77,6 +74,13 @@ def update_movies(send_event=True):
else:
movie_default_profile = None
# Prevent trying to insert a movie with a non-existing languages profileId
if (movie_default_profile and not database.execute(
select(TableLanguagesProfiles)
.where(TableLanguagesProfiles.profileId == movie_default_profile))
.first()):
movie_default_profile = None
if apikey_radarr is None:
pass
else:
@ -84,7 +88,7 @@ def update_movies(send_event=True):
tagsDict = get_tags()
# Get movies data from radarr
movies = get_movies_from_radarr_api(url=url_radarr(), apikey_radarr=apikey_radarr)
movies = get_movies_from_radarr_api(apikey_radarr=apikey_radarr)
if not isinstance(movies, list):
return
else:
@ -102,22 +106,19 @@ def update_movies(send_event=True):
'movieFile' in movie and
(movie['movieFile']['size'] > 20480 or
get_movie_file_size_from_db(movie['movieFile']['path']) > 20480)]
movies_to_add = []
# Remove old movies from DB
movies_to_delete = list(set(current_movies_id_db) - set(current_movies_radarr))
if len(movies_to_delete):
try:
removed_movies = database.execute(delete(TableMovies)
.where(TableMovies.tmdbId.in_(movies_to_delete))
.returning(TableMovies.radarrId))
database.execute(delete(TableMovies).where(TableMovies.tmdbId.in_(movies_to_delete)))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete movies because of {e}")
else:
for removed_movie in removed_movies:
for removed_movie in movies_to_delete:
if send_event:
event_stream(type='movie', action='delete', payload=removed_movie.radarrId)
event_stream(type='movie', action='delete', payload=removed_movie)
# Build new and updated movies
movies_count = len(movies)
@ -155,7 +156,7 @@ def update_movies(send_event=True):
def update_one_movie(movie_id, action, defer_search=False):
logging.debug('BAZARR syncing this specific movie from Radarr: {}'.format(movie_id))
logging.debug(f'BAZARR syncing this specific movie from Radarr: {movie_id}')
# Check if there's a row in database for this movie ID
existing_movie = database.execute(
@ -175,11 +176,12 @@ def update_one_movie(movie_id, action, defer_search=False):
f"because of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
logging.debug(
f'BAZARR deleted this movie from the database: '
f'{path_mappings.path_replace_movie(existing_movie.path)}')
return
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
movie_default_enabled = settings.general.movie_default_enabled
if movie_default_enabled is True:
movie_default_profile = settings.general.movie_default_profile
@ -194,8 +196,7 @@ def update_one_movie(movie_id, action, defer_search=False):
try:
# Get movie data from radarr api
movie = None
movie_data = get_movies_from_radarr_api(url=url_radarr(), apikey_radarr=settings.radarr.apikey,
radarr_id=movie_id)
movie_data = get_movies_from_radarr_api(apikey_radarr=settings.radarr.apikey, radarr_id=movie_id)
if not movie_data:
return
else:
@ -224,8 +225,8 @@ def update_one_movie(movie_id, action, defer_search=False):
f"of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
logging.debug(
f'BAZARR deleted this movie from the database:{path_mappings.path_replace_movie(existing_movie.path)}')
return
# Update existing movie in DB
@ -239,9 +240,10 @@ def update_one_movie(movie_id, action, defer_search=False):
logging.error(f"BAZARR cannot update movie {path_mappings.path_replace_movie(movie['path'])} because "
f"of {e}")
else:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
logging.debug(
f'BAZARR updated this movie into the database:{path_mappings.path_replace_movie(movie["path"])}')
# Insert new movie in DB
elif movie and not existing_movie:
@ -253,20 +255,21 @@ def update_one_movie(movie_id, action, defer_search=False):
logging.error(f"BAZARR cannot insert movie {path_mappings.path_replace_movie(movie['path'])} because "
f"of {e}")
else:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR inserted this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
logging.debug(
f'BAZARR inserted this movie into the database:{path_mappings.path_replace_movie(movie["path"])}')
# Storing existing subtitles
logging.debug('BAZARR storing subtitles for this movie: {}'.format(path_mappings.path_replace_movie(
movie['path'])))
logging.debug(f'BAZARR storing subtitles for this movie: {path_mappings.path_replace_movie(movie["path"])}')
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
# Downloading missing subtitles
if defer_search:
logging.debug('BAZARR searching for missing subtitles is deferred until scheduled task execution for this '
'movie: {}'.format(path_mappings.path_replace_movie(movie['path'])))
logging.debug(
f'BAZARR searching for missing subtitles is deferred until scheduled task execution for this movie: '
f'{path_mappings.path_replace_movie(movie["path"])}')
else:
logging.debug('BAZARR downloading missing subtitles for this movie: {}'.format(path_mappings.path_replace_movie(
movie['path'])))
logging.debug(
f'BAZARR downloading missing subtitles for this movie: {path_mappings.path_replace_movie(movie["path"])}')
movies_download_subtitles(movie_id)

@ -25,7 +25,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
overview = ""
try:
poster_big = movie['images'][0]['url']
poster = os.path.splitext(poster_big)[0] + '-500' + os.path.splitext(poster_big)[1]
poster = f'{os.path.splitext(poster_big)[0]}-500{os.path.splitext(poster_big)[1]}'
except Exception:
poster = ""
try:
@ -56,7 +56,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
except Exception:
format = movie['movieFile']['quality']['quality']['name']
try:
resolution = str(movie['movieFile']['quality']['quality']['resolution']) + 'p'
resolution = f'{movie["movieFile"]["quality"]["quality"]["resolution"]}p'
except Exception:
resolution = None
@ -92,7 +92,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
videoCodec = None
audioCodec = None
if settings.general.getboolean('parse_embedded_audio_track'):
if settings.general.parse_embedded_audio_track:
audio_language = embedded_audio_reader(path_mappings.path_replace_movie(movie['movieFile']['path']),
file_size=movie['movieFile']['size'],
movie_file_id=movie['movieFile']['id'],

@ -4,7 +4,7 @@ import requests
import logging
from app.config import settings
from radarr.info import get_radarr_info, url_radarr
from radarr.info import get_radarr_info, url_api_radarr
from constants import headers
@ -12,10 +12,8 @@ def get_profile_list():
apikey_radarr = settings.radarr.apikey
profiles_list = []
# Get profiles data from radarr
if get_radarr_info.is_legacy():
url_radarr_api_movies = url_radarr() + "/api/profile?apikey=" + apikey_radarr
else:
url_radarr_api_movies = url_radarr() + "/api/v3/qualityprofile?apikey=" + apikey_radarr
url_radarr_api_movies = (f"{url_api_radarr()}{'quality' if url_api_radarr().endswith('v3/') else ''}profile?"
f"apikey={apikey_radarr}")
try:
profiles_json = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
@ -44,10 +42,7 @@ def get_tags():
tagsDict = []
# Get tags data from Radarr
if get_radarr_info.is_legacy():
url_radarr_api_series = url_radarr() + "/api/tag?apikey=" + apikey_radarr
else:
url_radarr_api_series = url_radarr() + "/api/v3/tag?apikey=" + apikey_radarr
url_radarr_api_series = f"{url_api_radarr()}tag?apikey={apikey_radarr}"
try:
tagsDict = requests.get(url_radarr_api_series, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
@ -70,13 +65,8 @@ def get_tags():
return []
def get_movies_from_radarr_api(url, apikey_radarr, radarr_id=None):
if get_radarr_info.is_legacy():
url_radarr_api_movies = url + "/api/movie" + ("/{}".format(radarr_id) if radarr_id else "") + "?apikey=" + \
apikey_radarr
else:
url_radarr_api_movies = url + "/api/v3/movie" + ("/{}".format(radarr_id) if radarr_id else "") + "?apikey=" + \
apikey_radarr
def get_movies_from_radarr_api(apikey_radarr, radarr_id=None):
url_radarr_api_movies = f'{url_api_radarr()}movie{f"/{radarr_id}" if radarr_id else ""}?apikey={apikey_radarr}'
try:
r = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
@ -95,5 +85,11 @@ def get_movies_from_radarr_api(url, apikey_radarr, radarr_id=None):
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get movies from Radarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting movies from Radarr API: {e}")
return
else:
return r.json()
if r.status_code == 200:
return r.json()
else:
return

@ -4,23 +4,18 @@ import requests
import logging
from app.config import settings
from sonarr.info import get_sonarr_info, url_sonarr
from sonarr.info import url_api_sonarr
from constants import headers
def browse_sonarr_filesystem(path='#'):
if path == '#':
path = ''
if get_sonarr_info.is_legacy():
url_sonarr_api_filesystem = url_sonarr() + "/api/filesystem?path=" + path + \
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
settings.sonarr.apikey
else:
url_sonarr_api_filesystem = url_sonarr() + "/api/v3/filesystem?path=" + path + \
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
settings.sonarr.apikey
url_sonarr_api_filesystem = (f"{url_api_sonarr()}filesystem?path={path}&allowFoldersWithoutTrailingSlashes=true&"
f"includeFiles=false&apikey={settings.sonarr.apikey}")
try:
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get series from Sonarr. Http error.")

@ -26,25 +26,27 @@ class GetSonarrInfo:
return sonarr_version
else:
sonarr_version = ''
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
try:
sv = url_sonarr() + "/api/system/status?apikey=" + settings.sonarr.apikey
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()
sv = f"{url_sonarr()}/api/system/status?apikey={settings.sonarr.apikey}"
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers).json()
if 'version' in sonarr_json:
sonarr_version = sonarr_json['version']
else:
raise json.decoder.JSONDecodeError
except json.decoder.JSONDecodeError:
try:
sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()['version']
sv = f"{url_sonarr()}/api/v3/system/status?apikey={settings.sonarr.apikey}"
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers).json()['version']
except json.decoder.JSONDecodeError:
logging.debug('BAZARR cannot get Sonarr version')
sonarr_version = 'unknown'
except Exception:
logging.debug('BAZARR cannot get Sonarr version')
sonarr_version = 'unknown'
logging.debug('BAZARR got this Sonarr version from its API: {}'.format(sonarr_version))
logging.debug(f'BAZARR got this Sonarr version from its API: {sonarr_version}')
region.set("sonarr_version", sonarr_version)
return sonarr_version
@ -75,7 +77,7 @@ get_sonarr_info = GetSonarrInfo()
def url_sonarr():
if settings.sonarr.getboolean('ssl'):
if settings.sonarr.ssl:
protocol_sonarr = "https"
else:
protocol_sonarr = "http"
@ -83,7 +85,7 @@ def url_sonarr():
if settings.sonarr.base_url == '':
settings.sonarr.base_url = "/"
if not settings.sonarr.base_url.startswith("/"):
settings.sonarr.base_url = "/" + settings.sonarr.base_url
settings.sonarr.base_url = f"/{settings.sonarr.base_url}"
if settings.sonarr.base_url.endswith("/"):
settings.sonarr.base_url = settings.sonarr.base_url[:-1]
@ -93,3 +95,7 @@ def url_sonarr():
port = f":{settings.sonarr.port}"
return f"{protocol_sonarr}://{settings.sonarr.ip}{port}{settings.sonarr.base_url}"
def url_api_sonarr():
return url_sonarr() + f'/api{"/v3" if not get_sonarr_info.is_legacy() else ""}/'

@ -4,16 +4,13 @@ import logging
import requests
from app.config import settings
from sonarr.info import get_sonarr_info, url_sonarr
from sonarr.info import url_api_sonarr
from constants import headers
def notify_sonarr(sonarr_series_id):
try:
if get_sonarr_info.is_legacy():
url = url_sonarr() + "/api/command?apikey=" + settings.sonarr.apikey
else:
url = url_sonarr() + "/api/v3/command?apikey=" + settings.sonarr.apikey
url = f"{url_api_sonarr()}command?apikey={settings.sonarr.apikey}"
data = {
'name': 'RescanSeries',
'seriesId': int(sonarr_series_id)

@ -7,7 +7,7 @@ import logging
from app.config import settings
from app.database import TableShowsRootfolder, TableShows, database, insert, update, delete, select
from utilities.path_mappings import path_mappings
from sonarr.info import get_sonarr_info, url_sonarr
from sonarr.info import url_api_sonarr
from constants import headers
@ -16,10 +16,7 @@ def get_sonarr_rootfolder():
sonarr_rootfolder = []
# Get root folder data from Sonarr
if get_sonarr_info.is_legacy():
url_sonarr_api_rootfolder = url_sonarr() + "/api/rootfolder?apikey=" + apikey_sonarr
else:
url_sonarr_api_rootfolder = url_sonarr() + "/api/v3/rootfolder?apikey=" + apikey_sonarr
url_sonarr_api_rootfolder = f"{url_api_sonarr()}rootfolder?apikey={apikey_sonarr}"
try:
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)

@ -47,13 +47,11 @@ def sync_episodes(series_id, send_event=True):
episodes_to_add = []
# Get episodes data for a series from Sonarr
episodes = get_episodes_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=series_id)
episodes = get_episodes_from_sonarr_api(apikey_sonarr=apikey_sonarr, series_id=series_id)
if episodes:
# For Sonarr v3, we need to update episodes to integrate the episodeFile API endpoint results
if not get_sonarr_info.is_legacy():
episodeFiles = get_episodesFiles_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=series_id)
episodeFiles = get_episodesFiles_from_sonarr_api(apikey_sonarr=apikey_sonarr, series_id=series_id)
for episode in episodes:
if episodeFiles and episode['hasFile']:
item = [x for x in episodeFiles if x['id'] == episode['episodeFileId']]
@ -80,31 +78,32 @@ def sync_episodes(series_id, send_event=True):
episodes_to_update.append(parsed_episode)
else:
episodes_to_add.append(episodeParser(episode))
else:
return
# Remove old episodes from DB
episodes_to_delete = list(set(current_episodes_id_db_list) - set(current_episodes_sonarr))
if len(episodes_to_delete):
try:
removed_episodes = database.execute(delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId.in_(episodes_to_delete))
.returning(TableEpisodes.sonarrEpisodeId))
database.execute(delete(TableEpisodes).where(TableEpisodes.sonarrEpisodeId.in_(episodes_to_delete)))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete episodes because of {e}")
else:
for removed_episode in removed_episodes:
for removed_episode in episodes_to_delete:
if send_event:
event_stream(type='episode', action='delete', payload=removed_episode.sonarrEpisodeId)
event_stream(type='episode', action='delete', payload=removed_episode)
# Update existing episodes in DB
if len(episodes_to_update):
try:
database.execute(update(TableEpisodes), episodes_to_update)
except IntegrityError as e:
logging.error(f"BAZARR cannot update episodes because of {e}")
else:
for updated_episode in episodes_to_update:
# not using .returning() because it's not supported on executemany() with SQlite
for updated_episode in episodes_to_update:
try:
database.execute(update(TableEpisodes)
.values(updated_episode)
.where(TableEpisodes.sonarrEpisodeId == updated_episode['sonarrEpisodeId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update episodes because of {e}")
else:
store_subtitles(updated_episode['path'], path_mappings.path_replace(updated_episode['path']))
if send_event:
@ -112,25 +111,22 @@ def sync_episodes(series_id, send_event=True):
# Insert new episodes in DB
if len(episodes_to_add):
try:
added_episodes = database.execute(
insert(TableEpisodes)
.values(episodes_to_add)
.returning(TableEpisodes.sonarrEpisodeId, TableEpisodes.path, TableEpisodes.sonarrSeriesId))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episodes because of {e}")
else:
for added_episode in added_episodes:
store_subtitles(added_episode.path, path_mappings.path_replace(added_episode.path))
for added_episode in episodes_to_add:
try:
database.execute(insert(TableEpisodes).values(added_episode))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episodes because of {e}")
else:
store_subtitles(added_episode['path'], path_mappings.path_replace(added_episode['path']))
if send_event:
event_stream(type='episode', payload=added_episode.sonarrEpisodeId)
event_stream(type='episode', payload=added_episode['sonarrEpisodeId'])
logging.debug(f'BAZARR All episodes from series ID {series_id} synced from Sonarr into database.')
def sync_one_episode(episode_id, defer_search=False):
logging.debug('BAZARR syncing this specific episode from Sonarr: {}'.format(episode_id))
logging.debug(f'BAZARR syncing this specific episode from Sonarr: {episode_id}')
url = url_sonarr()
apikey_sonarr = settings.sonarr.apikey
@ -143,8 +139,7 @@ def sync_one_episode(episode_id, defer_search=False):
try:
# Get episode data from sonarr api
episode = None
episode_data = get_episodes_from_sonarr_api(url=url, apikey_sonarr=apikey_sonarr,
episode_id=episode_id)
episode_data = get_episodes_from_sonarr_api(apikey_sonarr=apikey_sonarr, episode_id=episode_id)
if not episode_data:
return
@ -152,7 +147,7 @@ def sync_one_episode(episode_id, defer_search=False):
# For Sonarr v3, we need to update episodes to integrate the episodeFile API endpoint results
if not get_sonarr_info.is_legacy() and existing_episode and episode_data['hasFile']:
episode_data['episodeFile'] = \
get_episodesFiles_from_sonarr_api(url=url, apikey_sonarr=apikey_sonarr,
get_episodesFiles_from_sonarr_api(apikey_sonarr=apikey_sonarr,
episode_file_id=episode_data['episodeFileId'])
episode = episodeParser(episode_data)
except Exception:
@ -173,8 +168,8 @@ def sync_one_episode(episode_id, defer_search=False):
logging.error(f"BAZARR cannot delete episode {existing_episode.path} because of {e}")
else:
event_stream(type='episode', action='delete', payload=int(episode_id))
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
existing_episode['path'])))
logging.debug(
f'BAZARR deleted this episode from the database:{path_mappings.path_replace(existing_episode["path"])}')
return
# Update existing episodes in DB
@ -187,9 +182,10 @@ def sync_one_episode(episode_id, defer_search=False):
except IntegrityError as e:
logging.error(f"BAZARR cannot update episode {episode['path']} because of {e}")
else:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
event_stream(type='episode', action='update', payload=int(episode_id))
logging.debug('BAZARR updated this episode into the database:{}'.format(path_mappings.path_replace(
episode['path'])))
logging.debug(
f'BAZARR updated this episode into the database:{path_mappings.path_replace(episode["path"])}')
# Insert new episodes in DB
elif episode and not existing_episode:
@ -200,20 +196,21 @@ def sync_one_episode(episode_id, defer_search=False):
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {episode['path']} because of {e}")
else:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
event_stream(type='episode', action='update', payload=int(episode_id))
logging.debug('BAZARR inserted this episode into the database:{}'.format(path_mappings.path_replace(
episode['path'])))
logging.debug(
f'BAZARR inserted this episode into the database:{path_mappings.path_replace(episode["path"])}')
# Storing existing subtitles
logging.debug('BAZARR storing subtitles for this episode: {}'.format(path_mappings.path_replace(
episode['path'])))
logging.debug(f'BAZARR storing subtitles for this episode: {path_mappings.path_replace(episode["path"])}')
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
# Downloading missing subtitles
if defer_search:
logging.debug('BAZARR searching for missing subtitles is deferred until scheduled task execution for this '
'episode: {}'.format(path_mappings.path_replace(episode['path'])))
logging.debug(
f'BAZARR searching for missing subtitles is deferred until scheduled task execution for this episode: '
f'{path_mappings.path_replace(episode["path"])}')
else:
logging.debug('BAZARR downloading missing subtitles for this episode: {}'.format(path_mappings.path_replace(
episode['path'])))
logging.debug(
f'BAZARR downloading missing subtitles for this episode: {path_mappings.path_replace(episode["path"])}')
episode_download_subtitles(episode_id)

@ -18,7 +18,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
for image in show['images']:
if image['coverType'] == 'poster':
poster_big = image['url'].split('?')[0]
poster = os.path.splitext(poster_big)[0] + '-250' + os.path.splitext(poster_big)[1]
poster = f'{os.path.splitext(poster_big)[0]}-250{os.path.splitext(poster_big)[1]}'
if image['coverType'] == 'fanart':
fanart = image['url'].split('?')[0]
@ -32,7 +32,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
imdbId = show['imdbId'] if 'imdbId' in show else None
audio_language = []
if not settings.general.getboolean('parse_embedded_audio_track'):
if not settings.general.parse_embedded_audio_track:
if get_sonarr_info.is_legacy():
audio_language = profile_id_to_language(show['qualityProfileId'], audio_profiles)
else:
@ -98,7 +98,7 @@ def episodeParser(episode):
else:
sceneName = None
if settings.general.getboolean('parse_embedded_audio_track'):
if settings.general.parse_embedded_audio_track:
audio_language = embedded_audio_reader(path_mappings.path_replace(episode['episodeFile']
['path']),
file_size=episode['episodeFile']['size'],
@ -144,7 +144,7 @@ def episodeParser(episode):
except Exception:
video_format = episode['episodeFile']['quality']['quality']['name']
try:
video_resolution = str(episode['episodeFile']['quality']['quality']['resolution']) + 'p'
video_resolution = f'{episode["episodeFile"]["quality"]["quality"]["resolution"]}p'
except Exception:
video_resolution = None

@ -8,7 +8,7 @@ from app.config import settings
from sonarr.info import url_sonarr
from subtitles.indexer.series import list_missing_subtitles
from sonarr.rootfolder import check_sonarr_rootfolder
from app.database import TableShows, database, insert, update, delete, select
from app.database import TableShows, TableLanguagesProfiles, database, insert, update, delete, select
from utilities.path_mappings import path_mappings
from app.event_handler import event_stream, show_progress, hide_progress
@ -23,7 +23,7 @@ def update_series(send_event=True):
if apikey_sonarr is None:
return
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
serie_default_enabled = settings.general.serie_default_enabled
if serie_default_enabled is True:
serie_default_profile = settings.general.serie_default_profile
@ -32,11 +32,18 @@ def update_series(send_event=True):
else:
serie_default_profile = None
# Prevent trying to insert a series with a non-existing languages profileId
if (serie_default_profile and not database.execute(
select(TableLanguagesProfiles)
.where(TableLanguagesProfiles.profileId == serie_default_profile))
.first()):
serie_default_profile = None
audio_profiles = get_profile_list()
tagsDict = get_tags()
# Get shows data from Sonarr
series = get_series_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr)
series = get_series_from_sonarr_api(apikey_sonarr=apikey_sonarr)
if not isinstance(series, list):
return
else:
@ -117,7 +124,7 @@ def update_series(send_event=True):
def update_one_series(series_id, action):
logging.debug('BAZARR syncing this specific series from Sonarr: {}'.format(series_id))
logging.debug(f'BAZARR syncing this specific series from Sonarr: {series_id}')
# Check if there's a row in database for this series ID
existing_series = database.execute(
@ -134,7 +141,7 @@ def update_one_series(series_id, action):
event_stream(type='series', action='delete', payload=int(series_id))
return
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
serie_default_enabled = settings.general.serie_default_enabled
if serie_default_enabled is True:
serie_default_profile = settings.general.serie_default_profile
@ -150,8 +157,7 @@ def update_one_series(series_id, action):
# Get series data from sonarr api
series = None
series_data = get_series_from_sonarr_api(url=url_sonarr(), apikey_sonarr=settings.sonarr.apikey,
sonarr_series_id=int(series_id))
series_data = get_series_from_sonarr_api(apikey_sonarr=settings.sonarr.apikey, sonarr_series_id=int(series_id))
if not series_data:
return
@ -180,8 +186,7 @@ def update_one_series(series_id, action):
else:
sync_episodes(series_id=int(series_id), send_event=False)
event_stream(type='series', action='update', payload=int(series_id))
logging.debug('BAZARR updated this series into the database:{}'.format(path_mappings.path_replace(
series['path'])))
logging.debug(f'BAZARR updated this series into the database:{path_mappings.path_replace(series["path"])}')
# Insert new series in DB
elif action == 'updated' and not existing_series:
@ -193,5 +198,4 @@ def update_one_series(series_id, action):
logging.error(f"BAZARR cannot insert series {series['path']} because of {e}")
else:
event_stream(type='series', action='update', payload=int(series_id))
logging.debug('BAZARR inserted this series into the database:{}'.format(path_mappings.path_replace(
series['path'])))
logging.debug(f'BAZARR inserted this series into the database:{path_mappings.path_replace(series["path"])}')

@ -4,7 +4,7 @@ import requests
import logging
from app.config import settings
from sonarr.info import get_sonarr_info, url_sonarr
from sonarr.info import get_sonarr_info, url_api_sonarr
from constants import headers
@ -14,15 +14,16 @@ def get_profile_list():
# Get profiles data from Sonarr
if get_sonarr_info.is_legacy():
url_sonarr_api_series = url_sonarr() + "/api/profile?apikey=" + apikey_sonarr
url_sonarr_api_series = f"{url_api_sonarr()}profile?apikey={apikey_sonarr}"
else:
if not get_sonarr_info.version().startswith('3.'):
# return an empty list when using Sonarr >= v4 that does not support series languages profiles anymore
return profiles_list
url_sonarr_api_series = url_sonarr() + "/api/v3/languageprofile?apikey=" + apikey_sonarr
url_sonarr_api_series = f"{url_api_sonarr()}languageprofile?apikey={apikey_sonarr}"
try:
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get profiles from Sonarr. Connection Error.")
return None
@ -49,10 +50,7 @@ def get_tags():
tagsDict = []
# Get tags data from Sonarr
if get_sonarr_info.is_legacy():
url_sonarr_api_series = url_sonarr() + "/api/tag?apikey=" + apikey_sonarr
else:
url_sonarr_api_series = url_sonarr() + "/api/v3/tag?apikey=" + apikey_sonarr
url_sonarr_api_series = f"{url_api_sonarr()}tag?apikey={apikey_sonarr}"
try:
tagsDict = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
@ -69,9 +67,9 @@ def get_tags():
return tagsDict.json()
def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
url_sonarr_api_series = url + "/api/{0}series/{1}?apikey={2}".format(
'' if get_sonarr_info.is_legacy() else 'v3/', sonarr_series_id if sonarr_series_id else "", apikey_sonarr)
def get_series_from_sonarr_api(apikey_sonarr, sonarr_series_id=None):
url_sonarr_api_series = (f"{url_api_sonarr()}series/{sonarr_series_id if sonarr_series_id else ''}?"
f"apikey={apikey_sonarr}")
try:
r = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
r.raise_for_status()
@ -89,21 +87,25 @@ def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get series from Sonarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting series from Sonarr API: {e}")
return
else:
result = r.json()
if isinstance(result, dict):
return [result]
if r.status_code == 200:
result = r.json()
if isinstance(result, dict):
return [result]
else:
return r.json()
else:
return r.json()
return
def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=None):
def get_episodes_from_sonarr_api(apikey_sonarr, series_id=None, episode_id=None):
if series_id:
url_sonarr_api_episode = url + "/api/{0}episode?seriesId={1}&apikey={2}".format(
'' if get_sonarr_info.is_legacy() else 'v3/', series_id, apikey_sonarr)
url_sonarr_api_episode = f"{url_api_sonarr()}episode?seriesId={series_id}&apikey={apikey_sonarr}"
elif episode_id:
url_sonarr_api_episode = url + "/api/{0}episode/{1}?apikey={2}".format(
'' if get_sonarr_info.is_legacy() else 'v3/', episode_id, apikey_sonarr)
url_sonarr_api_episode = f"{url_api_sonarr()}episode/{episode_id}?apikey={apikey_sonarr}"
else:
return
@ -122,21 +124,27 @@ def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get episodes from Sonarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting episodes from Sonarr API: {e}")
return
else:
return r.json()
if r.status_code == 200:
return r.json()
else:
return
def get_episodesFiles_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_file_id=None):
def get_episodesFiles_from_sonarr_api(apikey_sonarr, series_id=None, episode_file_id=None):
if series_id:
url_sonarr_api_episodeFiles = url + "/api/v3/episodeFile?seriesId={0}&apikey={1}".format(series_id,
apikey_sonarr)
url_sonarr_api_episodeFiles = f"{url_api_sonarr()}episodeFile?seriesId={series_id}&apikey={apikey_sonarr}"
elif episode_file_id:
url_sonarr_api_episodeFiles = url + "/api/v3/episodeFile/{0}?apikey={1}".format(episode_file_id, apikey_sonarr)
url_sonarr_api_episodeFiles = f"{url_api_sonarr()}episodeFile/{episode_file_id}?apikey={apikey_sonarr}"
else:
return
try:
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get episodeFiles from Sonarr. Http error.")
@ -150,5 +158,11 @@ def get_episodesFiles_from_sonarr_api(url, apikey_sonarr, series_id=None, episod
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get episodeFiles from Sonarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting episodes from Sonarr API: {e}")
return
else:
return r.json()
if r.status_code == 200:
return r.json()
else:
return

@ -23,7 +23,7 @@ def is_search_active(desired_language, attempt_string):
@rtype: bool
"""
if settings.general.getboolean('adaptive_searching'):
if settings.general.adaptive_searching:
logging.debug("Adaptive searching is enable, we'll see if it's time to search again...")
try:
# let's try to get a list of lists from the string representation in database

@ -12,7 +12,7 @@ from subliminal_patch.core import save_subtitles
from subliminal_patch.core_persistent import download_best_subtitles
from subliminal_patch.score import ComputeScore
from app.config import settings, get_array_from, get_scores
from app.config import settings, get_scores, get_array_from
from app.database import TableEpisodes, TableMovies, database, select
from utilities.path_mappings import path_mappings
from utilities.helper import get_target_folder, force_unicode
@ -29,9 +29,9 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
if not languages:
return None
logging.debug('BAZARR Searching subtitles for this file: ' + path)
logging.debug(f'BAZARR Searching subtitles for this file: {path}')
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
@ -40,7 +40,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
providers = pool.providers
language_set = _get_language_obj(languages=languages)
hi_required = any([x.hi for x in language_set])
hi_required = "force HI" if any([x.hi for x in language_set]) else False
also_forced = any([x.forced for x in language_set])
forced_required = all([x.forced for x in language_set])
_set_forced_providers(pool=pool, also_forced=also_forced, forced_required=forced_required)
@ -86,9 +86,9 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
try:
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
saved_subtitles = save_subtitles(video.original_path, subtitles,
single=settings.general.getboolean('single_language'),
single=settings.general.single_language,
tags=None, # fixme
directory=fld,
chmod=chmod,
@ -97,7 +97,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
)
except Exception as e:
logging.exception(
'BAZARR Error saving Subtitles file to disk for this file:' + path + ': ' + repr(e))
f'BAZARR Error saving Subtitles file to disk for this file {path}: {repr(e)}')
pass
else:
saved_any = True
@ -115,12 +115,12 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
return None
if not saved_any:
logging.debug('BAZARR No Subtitles were found for this file: ' + path)
logging.debug(f'BAZARR No Subtitles were found for this file: {path}')
return None
subliminal.region.backend.sync()
logging.debug('BAZARR Ended searching Subtitles for file: ' + path)
logging.debug(f'BAZARR Ended searching Subtitles for file: {path}')
def _get_language_obj(languages):

@ -22,10 +22,10 @@ gc.enable()
def store_subtitles_movie(original_path, reversed_path, use_cache=True):
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
logging.debug(f'BAZARR started subtitles indexing for this file: {reversed_path}')
actual_subtitles = []
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
if settings.general.use_embedded_subs:
logging.debug("BAZARR is trying to index embedded subtitles.")
item = database.execute(
select(TableMovies.movie_file_id, TableMovies.file_size)
@ -41,10 +41,10 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
if (settings.general.ignore_pgs_subs and subtitle_codec.lower() == "pgs") or \
(settings.general.ignore_vobsub_subs and subtitle_codec.lower() ==
"vobsub") or \
(settings.general.getboolean("ignore_ass_subs") and subtitle_codec.lower() ==
(settings.general.ignore_ass_subs and subtitle_codec.lower() ==
"ass"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -52,18 +52,18 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
if alpha2_from_alpha3(subtitle_language) is not None:
lang = str(alpha2_from_alpha3(subtitle_language))
if subtitle_forced:
lang = lang + ':forced'
lang = f'{lang}:forced'
if subtitle_hi:
lang = lang + ':hi'
logging.debug("BAZARR embedded subtitles detected: " + lang)
lang = f'{lang}:hi'
logging.debug(f"BAZARR embedded subtitles detected: {lang}")
actual_subtitles.append([lang, None, None])
except Exception as error:
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)",
subtitle_language, error)
logging.debug(f"BAZARR unable to index this unrecognized language: {subtitle_language} "
f"({error})")
except Exception:
logging.exception(
"BAZARR error when trying to analyze this %s file: %s" % (os.path.splitext(reversed_path)[1],
reversed_path))
f"BAZARR error when trying to analyze this {os.path.splitext(reversed_path)[1]} file: "
f"{reversed_path}")
try:
dest_folder = get_subtitle_destination_folder()
@ -85,7 +85,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
only_one=settings.general.single_language)
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
if settings.general.subfolder == "absolute":
@ -119,12 +119,12 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
elif str(language.basename) != 'und':
if language.forced:
language_str = str(language)
language_str = f'{language}:forced'
elif language.hi:
language_str = str(language) + ':hi'
language_str = f'{language}:hi'
else:
language_str = str(language)
logging.debug("BAZARR external subtitles detected: " + language_str)
logging.debug(f"BAZARR external subtitles detected: {language_str}")
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path),
os.stat(subtitle_path).st_size])
@ -139,14 +139,14 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
for movie in matching_movies:
if movie:
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
logging.debug(f"BAZARR storing those languages to DB: {actual_subtitles}")
list_missing_subtitles_movies(no=movie.radarrId)
else:
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
logging.debug(f"BAZARR haven't been able to update existing subtitles to DB: {actual_subtitles}")
else:
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
logging.debug(f'BAZARR ended subtitles indexing for this file: {reversed_path}')
return actual_subtitles
@ -168,7 +168,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
TableMovies.audio_language)) \
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
use_embedded_subs = settings.general.use_embedded_subs
for movie_subtitles in movies_subtitles:
missing_subtitles_text = '[]'
@ -264,7 +264,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
event_stream(type='badges')
def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe_cache')):
def movies_full_scan_subtitles(use_cache=settings.radarr.use_ffprobe_cache):
movies = database.execute(
select(TableMovies.path))\
.all()

@ -22,10 +22,10 @@ gc.enable()
def store_subtitles(original_path, reversed_path, use_cache=True):
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
logging.debug(f'BAZARR started subtitles indexing for this file: {reversed_path}')
actual_subtitles = []
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
if settings.general.use_embedded_subs:
logging.debug("BAZARR is trying to index embedded subtitles.")
item = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.file_size)
@ -41,10 +41,10 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
if (settings.general.ignore_pgs_subs and subtitle_codec.lower() == "pgs") or \
(settings.general.ignore_vobsub_subs and subtitle_codec.lower() ==
"vobsub") or \
(settings.general.getboolean("ignore_ass_subs") and subtitle_codec.lower() ==
(settings.general.ignore_ass_subs and subtitle_codec.lower() ==
"ass"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -52,10 +52,10 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
if alpha2_from_alpha3(subtitle_language) is not None:
lang = str(alpha2_from_alpha3(subtitle_language))
if subtitle_forced:
lang = lang + ":forced"
lang = f"{lang}:forced"
if subtitle_hi:
lang = lang + ":hi"
logging.debug("BAZARR embedded subtitles detected: " + lang)
lang = f"{lang}:hi"
logging.debug(f"BAZARR embedded subtitles detected: {lang}")
actual_subtitles.append([lang, None, None])
except Exception as error:
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)", subtitle_language, error)
@ -84,7 +84,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
only_one=settings.general.single_language)
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
if settings.general.subfolder == "absolute":
@ -118,12 +118,12 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
elif str(language.basename) != 'und':
if language.forced:
language_str = str(language)
language_str = f'{language}:forced'
elif language.hi:
language_str = str(language) + ':hi'
language_str = f'{language}:hi'
else:
language_str = str(language)
logging.debug("BAZARR external subtitles detected: " + language_str)
logging.debug(f"BAZARR external subtitles detected: {language_str}")
actual_subtitles.append([language_str, path_mappings.path_replace_reverse(subtitle_path),
os.stat(subtitle_path).st_size])
@ -138,14 +138,14 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
for episode in matching_episodes:
if episode:
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
logging.debug(f"BAZARR storing those languages to DB: {actual_subtitles}")
list_missing_subtitles(epno=episode.sonarrEpisodeId)
else:
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
logging.debug(f"BAZARR haven't been able to update existing subtitles to DB: {actual_subtitles}")
else:
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
logging.debug(f'BAZARR ended subtitles indexing for this file: {reversed_path}')
return actual_subtitles
@ -168,7 +168,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
.where(episodes_subtitles_clause))\
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
use_embedded_subs = settings.general.use_embedded_subs
for episode_subtitles in episodes_subtitles:
missing_subtitles_text = '[]'
@ -266,7 +266,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
event_stream(type='badges')
def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe_cache')):
def series_full_scan_subtitles(use_cache=settings.sonarr.use_ffprobe_cache):
episodes = database.execute(
select(TableEpisodes.path))\
.all()

@ -69,8 +69,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
# to improve performance, skip detection of files larger that 1M
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
logging.debug("BAZARR subtitles file is too large to be text based. Skipping this file: " +
subtitle_path)
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
f"{subtitle_path}")
continue
with open(subtitle_path, 'rb') as f:
@ -80,8 +80,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
if encoding and 'encoding' in encoding and encoding['encoding']:
encoding = detect(text)['encoding']
else:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
"It's probably a binary file: " + subtitle_path)
logging.debug(f"BAZARR skipping this subtitles because we can't guess the encoding. "
f"It's probably a binary file: {subtitle_path}")
continue
text = text.decode(encoding)
@ -97,8 +97,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
detected_language = 'zt'
if detected_language:
logging.debug("BAZARR external subtitles detected and guessed this language: " + str(
detected_language))
logging.debug(f"BAZARR external subtitles detected and guessed this language: {detected_language}")
try:
subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=forced,
hi=False)
@ -121,8 +120,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
if os.path.exists(subtitle_path) and os.path.splitext(subtitle_path)[1] in core.SUBTITLE_EXTENSIONS:
# to improve performance, skip detection of files larger that 1M
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
logging.debug("BAZARR subtitles file is too large to be text based. Skipping this file: " +
subtitle_path)
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
f"{subtitle_path}")
continue
with open(subtitle_path, 'rb') as f:
@ -132,8 +131,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
if encoding and 'encoding' in encoding and encoding['encoding']:
encoding = detect(text)['encoding']
else:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
"It's probably a binary file: " + subtitle_path)
logging.debug(f"BAZARR skipping this subtitles because we can't guess the encoding. "
f"It's probably a binary file: {subtitle_path}")
continue
text = text.decode(encoding)

@ -25,7 +25,7 @@ from .processing import process_subtitle
@update_pools
def manual_search(path, profile_id, providers, sceneName, title, media_type):
logging.debug('BAZARR Manually searching subtitles for this file: ' + path)
logging.debug(f'BAZARR Manually searching subtitles for this file: {path}')
final_subtitles = []
@ -64,7 +64,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
logging.info("BAZARR All providers are throttled")
return 'All providers are throttled'
except Exception:
logging.exception("BAZARR Error trying to get Subtitle list from provider for this file: " + path)
logging.exception(f"BAZARR Error trying to get Subtitle list from provider for this file: {path}")
else:
subtitles_list = []
minimum_score = settings.general.minimum_score
@ -145,8 +145,8 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
final_subtitles = sorted(subtitles_list, key=lambda x: (x['orig_score'], x['score_without_hash']),
reverse=True)
logging.debug('BAZARR ' + str(len(final_subtitles)) + " Subtitles have been found for this file: " + path)
logging.debug('BAZARR Ended searching Subtitles for this file: ' + path)
logging.debug(f'BAZARR {len(final_subtitles)} Subtitles have been found for this file: {path}')
logging.debug(f'BAZARR Ended searching Subtitles for this file: {path}')
subliminal.region.backend.sync()
@ -156,9 +156,9 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
@update_pools
def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provider, sceneName, title, media_type,
use_original_format, profile_id):
logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path)
logging.debug(f'BAZARR Manually downloading Subtitles for this file: {path}')
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
@ -180,29 +180,29 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
try:
if provider:
download_subtitles([subtitle], _get_pool(media_type, profile_id))
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
logging.debug(f'BAZARR Subtitles file downloaded for this file: {path}')
else:
logging.info("BAZARR All providers are throttled")
return 'All providers are throttled'
except Exception:
logging.exception('BAZARR Error downloading Subtitles for this file ' + path)
logging.exception(f'BAZARR Error downloading Subtitles for this file {path}')
return 'Error downloading Subtitles'
else:
if not subtitle.is_valid():
logging.exception('BAZARR No valid Subtitles file found for this file: ' + path)
return 'No valid Subtitles file found'
logging.error(f"BAZARR Downloaded subtitles isn't valid for this file: {path}")
return "Downloaded subtitles isn't valid. Check log."
try:
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
saved_subtitles = save_subtitles(video.original_path, [subtitle],
single=settings.general.getboolean('single_language'),
single=settings.general.single_language,
tags=None, # fixme
directory=get_target_folder(path),
chmod=chmod,
formats=(subtitle.format,),
path_decoder=force_unicode)
except Exception:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
return 'Error saving Subtitles file to disk'
else:
if saved_subtitles:
@ -218,14 +218,14 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
continue
else:
logging.error(
"BAZARR Tried to manually download a Subtitles for file: " + path
+ " but we weren't able to do (probably throttled by " + str(subtitle.provider_name)
+ ". Please retry later or select a Subtitles from another provider.")
f"BAZARR Tried to manually download a Subtitles for file: {path} but we weren't able to do "
f"(probably throttled by {subtitle.provider_name}. Please retry later or select a Subtitles "
f"from another provider.")
return 'Something went wrong, check the logs for error'
subliminal.region.backend.sync()
logging.debug('BAZARR Ended manually downloading Subtitles for file: ' + path)
logging.debug(f'BAZARR Ended manually downloading Subtitles for file: {path}')
def _get_language_obj(profile_id):

@ -67,7 +67,7 @@ def movies_download_subtitles(no):
logging.info("BAZARR All providers are throttled")
break
show_progress(id='movie_search_progress_{}'.format(no),
show_progress(id=f'movie_search_progress_{no}',
header='Searching missing subtitles...',
name=movie.title,
value=0,
@ -88,4 +88,4 @@ def movies_download_subtitles(no):
history_log_movie(1, no, result)
send_notifications_movie(no, result.message)
hide_progress(id='movie_search_progress_{}'.format(no))
hide_progress(id=f'movie_search_progress_{no}')

@ -49,8 +49,8 @@ def series_download_subtitles(no):
.where(reduce(operator.and_, conditions))) \
.all()
if not episodes_details:
logging.debug("BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
"ignored because of monitored status, series type or series tags: {}".format(no))
logging.debug(f"BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
f"ignored because of monitored status, series type or series tags: {no}")
return
count_episodes_details = len(episodes_details)
@ -59,12 +59,9 @@ def series_download_subtitles(no):
providers_list = get_providers()
if providers_list:
show_progress(id='series_search_progress_{}'.format(no),
show_progress(id=f'series_search_progress_{no}',
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
value=i,
count=count_episodes_details)
@ -101,7 +98,7 @@ def series_download_subtitles(no):
logging.info("BAZARR All providers are throttled")
break
hide_progress(id='series_search_progress_{}'.format(no))
hide_progress(id=f'series_search_progress_{no}')
def episode_download_subtitles(no, send_progress=False):
@ -134,12 +131,9 @@ def episode_download_subtitles(no, send_progress=False):
if providers_list:
if send_progress:
show_progress(id='episode_search_progress_{}'.format(no),
show_progress(id=f'episode_search_progress_{no}',
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
value=0,
count=1)
@ -174,7 +168,7 @@ def episode_download_subtitles(no, send_progress=False):
send_notifications(episode.sonarrSeriesId, episode.sonarrEpisodeId, result.message)
if send_progress:
hide_progress(id='episode_search_progress_{}'.format(no))
hide_progress(id=f'episode_search_progress_{no}')
else:
logging.info("BAZARR All providers are throttled")
break

@ -26,13 +26,13 @@ def postprocessing(command, path):
out = out.replace('\n', ' ').replace('\r', ' ')
except Exception as e:
logging.error('BAZARR Post-processing failed for file ' + path + ' : ' + repr(e))
logging.error(f'BAZARR Post-processing failed for file {path}: {repr(e)}')
else:
if err:
logging.error(
'BAZARR Post-processing result for file ' + path + ' : ' + err.replace('\n', ' ').replace('\r', ' '))
parsed_err = err.replace('\n', ' ').replace('\r', ' ')
logging.error(f'BAZARR Post-processing result for file {path}: {parsed_err}')
elif out == "":
logging.info(
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
f'BAZARR Post-processing result for file {path}: Nothing returned from command execution')
else:
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
logging.info(f'BAZARR Post-processing result for file {path}: {out}')

@ -31,15 +31,15 @@ class ProcessSubtitlesResult:
self.not_matched = not_matched
if hearing_impaired:
self.language_code = downloaded_language_code2 + ":hi"
self.language_code = f"{downloaded_language_code2}:hi"
elif forced:
self.language_code = downloaded_language_code2 + ":forced"
self.language_code = f"{downloaded_language_code2}:forced"
else:
self.language_code = downloaded_language_code2
def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_upgrade=False, is_manual=False):
use_postprocessing = settings.general.getboolean('use_postprocessing')
use_postprocessing = settings.general.use_postprocessing
postprocessing_cmd = settings.general.postprocessing_cmd
downloaded_provider = subtitle.provider_name
@ -57,7 +57,7 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
modifier_string = " forced"
else:
modifier_string = ""
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
logging.debug(f'BAZARR Subtitles file saved to disk: {downloaded_path}')
if is_upgrade:
action = "upgraded"
elif is_manual:
@ -66,8 +66,8 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
action = "downloaded"
percent_score = round(subtitle.score * 100 / max_score, 2)
message = downloaded_language + modifier_string + " subtitles " + action + " from " + \
downloaded_provider + " with a score of " + str(percent_score) + "%."
message = (f"{downloaded_language}{modifier_string} subtitles {action} from {downloaded_provider} with a score of "
f"{percent_score}%.")
if media_type == 'series':
episode_metadata = database.execute(
@ -109,19 +109,19 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
percent_score, subtitle_id, downloaded_provider, series_id, episode_id)
if media_type == 'series':
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold')
use_pp_threshold = settings.general.use_postprocessing_threshold
pp_threshold = int(settings.general.postprocessing_threshold)
else:
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold_movie')
use_pp_threshold = settings.general.use_postprocessing_threshold_movie
pp_threshold = int(settings.general.postprocessing_threshold_movie)
if not use_pp_threshold or (use_pp_threshold and percent_score < pp_threshold):
logging.debug("BAZARR Using post-processing command: {}".format(command))
logging.debug(f"BAZARR Using post-processing command: {command}")
postprocessing(command, path)
set_chmod(subtitles_path=downloaded_path)
else:
logging.debug("BAZARR post-processing skipped because subtitles score isn't below this "
"threshold value: " + str(pp_threshold) + "%")
logging.debug(f"BAZARR post-processing skipped because subtitles score isn't below this "
f"threshold value: {pp_threshold}%")
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)

@ -30,7 +30,9 @@ def refine_from_db(path, video):
TableEpisodes.video_codec,
TableEpisodes.audio_codec,
TableEpisodes.path,
TableShows.imdbId)
TableShows.imdbId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)
.select_from(TableEpisodes)
.join(TableShows)
.where((TableEpisodes.path == path_mappings.path_replace_reverse(path)))) \
@ -38,8 +40,10 @@ def refine_from_db(path, video):
if data:
video.series = _TITLE_RE.sub('', data.seriesTitle)
video.season = int(data.season)
video.episode = int(data.episode)
if not video.season and data.season:
video.season = int(data.season)
if not video.episode and data.episode:
video.episode = int(data.episode)
video.title = data.episodeTitle
# Only refine year as a fallback
@ -61,6 +65,9 @@ def refine_from_db(path, video):
if not video.audio_codec:
if data.audio_codec:
video.audio_codec = convert_to_guessit('audio_codec', data.audio_codec)
video.sonarrSeriesId = data.sonarrSeriesId
video.sonarrEpisodeId = data.sonarrEpisodeId
elif isinstance(video, Movie):
data = database.execute(
select(TableMovies.title,
@ -70,7 +77,8 @@ def refine_from_db(path, video):
TableMovies.resolution,
TableMovies.video_codec,
TableMovies.audio_codec,
TableMovies.imdbId)
TableMovies.imdbId,
TableMovies.radarrId)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))) \
.first()
@ -98,4 +106,6 @@ def refine_from_db(path, video):
if data.audio_codec:
video.audio_codec = convert_to_guessit('audio_codec', data.audio_codec)
video.radarrId = data.radarrId
return video

@ -33,7 +33,7 @@ def refine_from_ffprobe(path, video):
episode_file_id=file_id.episode_file_id)
if not data or ('ffprobe' not in data and 'mediainfo' not in data):
logging.debug("No cache available for this file: {}".format(path))
logging.debug(f"No cache available for this file: {path}")
return video
if data['ffprobe']:

@ -12,16 +12,16 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
sonarr_episode_id=None, radarr_id=None):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
elif not settings.subsync.getboolean('use_subsync'):
elif not settings.subsync.use_subsync:
logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.')
else:
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
f'subtitles: {srt_path}.')
if media_type == 'series':
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
use_subsync_threshold = settings.subsync.use_subsync_threshold
subsync_threshold = settings.subsync.subsync_threshold
else:
use_subsync_threshold = settings.subsync.getboolean('use_subsync_movie_threshold')
use_subsync_threshold = settings.subsync.use_subsync_movie_threshold
subsync_threshold = settings.subsync.subsync_movie_threshold
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
@ -32,6 +32,6 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
gc.collect()
return True
else:
logging.debug("BAZARR subsync skipped because subtitles score isn't below this "
"threshold value: " + subsync_threshold + "%")
logging.debug(f"BAZARR subsync skipped because subtitles score isn't below this "
f"threshold value: {subsync_threshold}%")
return False

@ -36,7 +36,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
language_log += ':forced'
language_string += ' forced'
result = ProcessSubtitlesResult(message=language_string + " subtitles deleted from disk.",
result = ProcessSubtitlesResult(message=f"{language_string} subtitles deleted from disk.",
reversed_path=path_mappings.path_replace_reverse(media_path),
downloaded_language_code2=language_log,
downloaded_provider=None,
@ -50,7 +50,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
try:
os.remove(path_mappings.path_replace(subtitles_path))
except OSError:
logging.exception('BAZARR cannot delete subtitles file: ' + subtitles_path)
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
return False
else:
@ -64,7 +64,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
try:
os.remove(path_mappings.path_replace_movie(subtitles_path))
except OSError:
logging.exception('BAZARR cannot delete subtitles file: ' + subtitles_path)
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
return False
else:

@ -19,14 +19,14 @@ def subtitles_apply_mods(language, subtitle_path, mods, use_original_format, vid
lang_obj = Language(language)
else:
lang_obj = custom.subzero_language()
single = settings.general.getboolean('single_language')
single = settings.general.single_language
sub = Subtitle(lang_obj, mods=mods, original_format=use_original_format)
with open(subtitle_path, 'rb') as f:
sub.content = f.read()
if not sub.is_valid():
logging.exception('BAZARR Invalid subtitle file: ' + subtitle_path)
logging.exception(f'BAZARR Invalid subtitle file: {subtitle_path}')
return
if use_original_format:

@ -34,7 +34,7 @@ class SubSyncer:
radarr_id=None):
self.reference = video_path
self.srtin = srt_path
self.srtout = '{}.synced.srt'.format(os.path.splitext(self.srtin)[0])
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced.srt'
self.args = None
ffprobe_exe = get_binary('ffprobe')
@ -54,11 +54,11 @@ class SubSyncer:
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path, '--vad',
self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.getboolean('force_audio'):
if settings.subsync.force_audio:
unparsed_args.append('--no-fix-framerate')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.getboolean('debug'):
if settings.subsync.debug:
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
@ -68,22 +68,22 @@ class SubSyncer:
try:
result = run(self.args)
except Exception:
logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: '
'{0}'.format(self.srtin))
logging.exception(
f'BAZARR an exception occurs during the synchronization process for this subtitles: {self.srtin}')
raise OSError
else:
if settings.subsync.getboolean('debug'):
if settings.subsync.debug:
return result
if os.path.isfile(self.srtout):
if not settings.subsync.getboolean('debug'):
if not settings.subsync.debug:
os.remove(self.srtin)
os.rename(self.srtout, self.srtin)
offset_seconds = result['offset_seconds'] or 0
framerate_scale_factor = result['framerate_scale_factor'] or 0
message = "{0} subtitles synchronization ended with an offset of {1} seconds and a framerate " \
"scale factor of {2}.".format(language_from_alpha2(srt_lang), offset_seconds,
"{:.2f}".format(framerate_scale_factor))
message = (f"{language_from_alpha2(srt_lang)} subtitles synchronization ended with an offset of "
f"{offset_seconds} seconds and a framerate scale factor of "
f"{f'{framerate_scale_factor:.2f}'}.")
result = ProcessSubtitlesResult(message=message,
reversed_path=path_mappings.path_replace_reverse(self.reference),
@ -101,6 +101,6 @@ class SubSyncer:
else:
history_log_movie(action=5, radarr_id=radarr_id, result=result)
else:
logging.error('BAZARR unable to sync subtitles: {0}'.format(self.srtin))
logging.error(f'BAZARR unable to sync subtitles: {self.srtin}')
return result

@ -31,7 +31,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
if hi:
lang_obj = Language.rebuild(lang_obj, hi=True)
logging.debug('BAZARR is translating in {0} this subtitles {1}'.format(lang_obj, source_srt_file))
logging.debug(f'BAZARR is translating in {lang_obj} this subtitles {source_srt_file}')
max_characters = 5000
@ -46,7 +46,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
lines_list = [x.plaintext for x in subs]
joined_lines_str = '\n\n\n'.join(lines_list)
logging.debug('BAZARR splitting subtitles into {} characters blocks'.format(max_characters))
logging.debug(f'BAZARR splitting subtitles into {max_characters} characters blocks')
lines_block_list = []
translated_lines_list = []
while len(joined_lines_str):
@ -60,7 +60,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
lines_block_list.append(new_partial_lines_str)
joined_lines_str = joined_lines_str.replace(new_partial_lines_str, '')
logging.debug('BAZARR is sending {} blocks to Google Translate'.format(len(lines_block_list)))
logging.debug(f'BAZARR is sending {len(lines_block_list)} blocks to Google Translate')
for block_str in lines_block_list:
try:
translated_partial_srt_text = GoogleTranslator(source='auto',
@ -74,7 +74,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
translated_partial_srt_list = translated_partial_srt_text.split('\n\n\n')
translated_lines_list += translated_partial_srt_list
logging.debug('BAZARR saving translated subtitles to {}'.format(dest_srt_file))
logging.debug(f'BAZARR saving translated subtitles to {dest_srt_file}')
for i, line in enumerate(subs):
try:
line.plaintext = translated_lines_list[i]

@ -24,8 +24,8 @@ from .download import generate_subtitles
def upgrade_subtitles():
use_sonarr = settings.general.getboolean('use_sonarr')
use_radarr = settings.general.getboolean('use_radarr')
use_sonarr = settings.general.use_sonarr
use_radarr = settings.general.use_radarr
if use_sonarr:
episodes_to_upgrade = get_upgradable_episode_subtitles()
@ -87,10 +87,7 @@ def upgrade_subtitles():
show_progress(id='upgrade_episodes_progress',
header='Upgrading episodes subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['seriesTitle'],
episode['season'],
episode['episode'],
episode['title']),
name=f'{episode["seriesTitle"]} - S{episode["season"]:02d}E{episode["episode"]:02d} - {episode["title"]}',
value=i,
count=count_episode_to_upgrade)
@ -218,7 +215,7 @@ def get_queries_condition_parameters():
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = (datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
if settings.general.getboolean('upgrade_manual'):
if settings.general.upgrade_manual:
query_actions = [1, 2, 3, 4, 6]
else:
query_actions = [1, 3]
@ -244,7 +241,7 @@ def parse_language_string(language_string):
def get_upgradable_episode_subtitles():
if not settings.general.getboolean('upgrade_subs'):
if not settings.general.upgrade_subs:
# return an empty set of rows
return select(TableHistory.id) \
.where(TableHistory.id.is_(None)) \
@ -277,7 +274,7 @@ def get_upgradable_episode_subtitles():
def get_upgradable_movies_subtitles():
if not settings.general.getboolean('upgrade_subs'):
if not settings.general.upgrade_subs:
# return an empty set of rows
return select(TableHistoryMovie.id) \
.where(TableHistoryMovie.id.is_(None)) \
@ -323,10 +320,10 @@ def _language_from_items(items):
results = []
for item in items:
if item['forced'] == 'True':
results.append(item['language'] + ':forced')
results.append(f'{item["language"]}:forced')
elif item['hi'] == 'True':
results.append(item['language'] + ':hi')
results.append(f'{item["language"]}:hi')
else:
results.append(item['language'])
results.append(item['language'] + ':hi')
results.append(f'{item["language"]}:hi')
return results

@ -29,13 +29,13 @@ from .post_processing import postprocessing
def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, audio_language):
logging.debug(f'BAZARR Manually uploading subtitles for this file: {path}')
single = settings.general.getboolean('single_language')
single = settings.general.single_language
use_postprocessing = settings.general.getboolean('use_postprocessing')
use_postprocessing = settings.general.use_postprocessing
postprocessing_cmd = settings.general.postprocessing_cmd
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
language = alpha3_from_alpha2(language)
@ -84,10 +84,10 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
sub.content = subtitle.read()
if not sub.is_valid():
logging.exception('BAZARR Invalid subtitle file: ' + subtitle.filename)
logging.exception(f'BAZARR Invalid subtitle file: {subtitle.filename}')
sub.mods = None
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
sub.set_encoding("utf-8")
try:
@ -106,11 +106,11 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
formats=(sub.format,) if use_original_format else ("srt",),
path_decoder=force_unicode)
except Exception:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
return
if len(saved_subtitles) < 1:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
return
subtitle_path = saved_subtitles[0].storage_path
@ -168,8 +168,8 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
event_stream(type='movie', action='update', payload=movie_metadata.radarrId)
event_stream(type='movie-wanted', action='delete', payload=movie_metadata.radarrId)
result = ProcessSubtitlesResult(message=language_from_alpha3(language) + modifier_string + " Subtitles manually "
"uploaded.",
result = ProcessSubtitlesResult(message=f"{language_from_alpha3(language)}{modifier_string} Subtitles manually "
"uploaded.",
reversed_path=reversed_path,
downloaded_language_code2=uploaded_language_code2,
downloaded_provider=None,

@ -37,7 +37,7 @@ def get_video(path, title, sceneName, providers=None, media_type="movie"):
hash_from = original_path
try:
skip_hashing = settings.general.getboolean('skip_hashing')
skip_hashing = settings.general.skip_hashing
video = parse_video(path, hints=hints, skip_hashing=skip_hashing, dry_run=used_scene_name, providers=providers,
hash_from=hash_from)
video.used_scene_name = used_scene_name

@ -109,10 +109,7 @@ def wanted_search_missing_subtitles_series():
for i, episode in enumerate(episodes):
show_progress(id='wanted_episodes_progress',
header='Searching subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
value=i,
count=count_episodes)

@ -48,7 +48,7 @@ class EventTracker:
self.tracker.store.save()
def track_subtitles(self, provider, action, language):
if not settings.analytics.getboolean('enabled'):
if not settings.analytics.enabled:
return
subtitles_event = self.tracker.create_new_event(name="subtitles")
@ -65,7 +65,7 @@ class EventTracker:
self.tracker.store.save()
def track_throttling(self, provider, exception_name, exception_info):
if not settings.analytics.getboolean('enabled'):
if not settings.analytics.enabled:
return
throttling_event = self.tracker.create_new_event(name="throttling")

@ -7,7 +7,7 @@ import shutil
import logging
from datetime import datetime, timedelta
from zipfile import ZipFile, BadZipFile
from zipfile import ZipFile, BadZipFile, ZIP_DEFLATED
from glob import glob
from app.get_args import args
@ -52,7 +52,7 @@ def backup_to_zip():
backup_filename = f"bazarr_backup_v{os.environ['BAZARR_VERSION']}_{now_string}.zip"
logging.debug(f'Backup filename will be: {backup_filename}')
if not settings.postgresql.getboolean('enabled'):
if not settings.postgresql.enabled:
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
logging.debug(f'Database file path to backup is: {database_src_file}')
@ -71,10 +71,11 @@ def backup_to_zip():
database_backup_file = None
logging.exception('Unable to backup database file.')
config_file = os.path.join(args.config_dir, 'config', 'config.ini')
config_file = os.path.join(args.config_dir, 'config', 'config.yaml')
logging.debug(f'Config file path to backup is: {config_file}')
with ZipFile(os.path.join(get_backup_path(), backup_filename), 'w') as backupZip:
with ZipFile(os.path.join(get_backup_path(), backup_filename), 'w', compression=ZIP_DEFLATED,
compresslevel=9) as backupZip:
if database_backup_file:
backupZip.write(database_backup_file, 'bazarr.db')
try:
@ -83,12 +84,19 @@ def backup_to_zip():
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
else:
logging.debug('Database file is not included in backup. See previous exception')
backupZip.write(config_file, 'config.ini')
backupZip.write(config_file, 'config.yaml')
def restore_from_backup():
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.ini')
if os.path.isfile(os.path.join(get_restore_path(), 'config.yaml')):
restore_config_path = os.path.join(get_restore_path(), 'config.yaml')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.yaml')
new_config = True
else:
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.ini')
new_config = False
restore_database_path = os.path.join(get_restore_path(), 'bazarr.db')
dest_database_path = os.path.join(args.config_dir, 'db', 'bazarr.db')
@ -97,8 +105,15 @@ def restore_from_backup():
shutil.copy(restore_config_path, dest_config_path)
os.remove(restore_config_path)
except OSError:
logging.exception(f'Unable to restore or delete config.ini to {dest_config_path}')
if not settings.postgresql.getboolean('enabled'):
logging.exception(f'Unable to restore or delete config file to {dest_config_path}')
else:
if new_config:
if os.path.isfile(os.path.join(get_restore_path(), 'config.ini')):
os.remove(os.path.join(get_restore_path(), 'config.ini'))
else:
if os.path.isfile(os.path.join(get_restore_path(), 'config.yaml')):
os.remove(os.path.join(get_restore_path(), 'config.yaml'))
if not settings.postgresql.enabled:
try:
shutil.copy(restore_database_path, dest_database_path)
os.remove(restore_database_path)

@ -44,7 +44,7 @@ def get_binary(name):
installed_exe = which(name)
if installed_exe and os.path.isfile(installed_exe):
logging.debug('BAZARR returning this binary: {}'.format(installed_exe))
logging.debug(f'BAZARR returning this binary: {installed_exe}')
return installed_exe
else:
logging.debug('BAZARR binary not found in path, searching for it...')
@ -72,27 +72,27 @@ def get_binary(name):
logging.debug('BAZARR binary not found in binaries.json')
raise BinaryNotFound
else:
logging.debug('BAZARR found this in binaries.json: {}'.format(binary))
logging.debug(f'BAZARR found this in binaries.json: {binary}')
if os.path.isfile(exe) and md5(exe) == binary['checksum']:
logging.debug('BAZARR returning this existing and up-to-date binary: {}'.format(exe))
logging.debug(f'BAZARR returning this existing and up-to-date binary: {exe}')
return exe
else:
try:
logging.debug('BAZARR creating directory tree for {}'.format(exe_dir))
logging.debug(f'BAZARR creating directory tree for {exe_dir}')
os.makedirs(exe_dir, exist_ok=True)
logging.debug('BAZARR downloading {0} from {1}'.format(name, binary['url']))
logging.debug(f'BAZARR downloading {name} from {binary["url"]}')
r = requests.get(binary['url'])
logging.debug('BAZARR saving {0} to {1}'.format(name, exe_dir))
logging.debug(f'BAZARR saving {name} to {exe_dir}')
with open(exe, 'wb') as f:
f.write(r.content)
if system != 'Windows':
logging.debug('BAZARR adding execute permission on {}'.format(exe))
logging.debug(f'BAZARR adding execute permission on {exe}')
st = os.stat(exe)
os.chmod(exe, st.st_mode | stat.S_IEXEC)
except Exception:
logging.exception('BAZARR unable to download {0} to {1}'.format(name, exe_dir))
logging.exception(f'BAZARR unable to download {name} to {exe_dir}')
raise BinaryNotFound
else:
logging.debug('BAZARR returning this new binary: {}'.format(exe))
logging.debug(f'BAZARR returning this new binary: {exe}')
return exe

@ -9,7 +9,7 @@ def browse_bazarr_filesystem(path='#'):
if os.name == 'nt':
dir_list = []
for drive in string.ascii_uppercase:
drive_letter = drive + ':\\'
drive_letter = f'{drive}:\\'
if os.path.exists(drive_letter):
dir_list.append(drive_letter)
else:

@ -9,9 +9,9 @@ from radarr.rootfolder import check_radarr_rootfolder
def check_health():
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
check_sonarr_rootfolder()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
check_radarr_rootfolder()
event_stream(type='badges')
@ -24,7 +24,7 @@ def get_health_issues():
health_issues = []
# get Sonarr rootfolder issues
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
rootfolder = database.execute(
select(TableShowsRootfolder.path,
TableShowsRootfolder.accessible,
@ -36,7 +36,7 @@ def get_health_issues():
'issue': item.error})
# get Radarr rootfolder issues
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
rootfolder = database.execute(
select(TableMoviesRootfolder.path,
TableMoviesRootfolder.accessible,

@ -52,7 +52,7 @@ def get_target_folder(file_path):
try:
os.makedirs(fld)
except Exception:
logging.error('BAZARR is unable to create directory to save subtitles: ' + fld)
logging.error(f'BAZARR is unable to create directory to save subtitles: {fld}')
fld = None
else:
fld = None

@ -2,7 +2,7 @@
import re
from app.config import settings, get_array_from
from app.config import settings
class PathMappings:
@ -11,8 +11,8 @@ class PathMappings:
self.path_mapping_movies = []
def update(self):
self.path_mapping_series = [x for x in get_array_from(settings.general.path_mappings) if x[0] != x[1]]
self.path_mapping_movies = [x for x in get_array_from(settings.general.path_mappings_movie) if x[0] != x[1]]
self.path_mapping_series = [x for x in settings.general.path_mappings if x[0] != x[1]]
self.path_mapping_movies = [x for x in settings.general.path_mappings_movie if x[0] != x[1]]
def path_replace(self, path):
if path is None:

@ -43,7 +43,7 @@ def pp_replace(pp_command, episode, subtitles, language, language_code2, languag
def set_chmod(subtitles_path):
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
if chmod:
logging.debug(f"BAZARR setting permission to {chmod} on {subtitles_path} after custom post-processing.")
os.chmod(subtitles_path, chmod)

@ -9,7 +9,7 @@
# Bazarr configuration path, must be absolute path
# Vite will use this variable to find your bazarr's configuration file
VITE_BAZARR_CONFIG_FILE="../data/config/config.ini"
VITE_BAZARR_CONFIG_FILE="../data/config/config.yaml"
# Display update section in settings
VITE_CAN_UPDATE=true

@ -2,48 +2,34 @@
/// <reference types="node" />
import { readFile } from "fs/promises";
import { get } from "lodash";
import YAML from "yaml";
class ConfigReader {
config?: string;
config: object;
constructor() {
this.config = undefined;
this.config = {};
}
async open(path: string) {
try {
this.config = await readFile(path, "utf8");
const rawConfig = await readFile(path, "utf8");
this.config = YAML.parse(rawConfig);
} catch (err) {
// We don't want to catch the error here, handle it on getValue method
}
}
getValue(sectionName: string, fieldName: string) {
if (!this.config) {
throw new Error("Cannot find config to read");
}
const targetSection = this.config
.split("\n\n")
.filter((section) => section.includes(`[${sectionName}]`));
if (targetSection.length === 0) {
throw new Error(`Cannot find [${sectionName}] section in config`);
}
const path = `${sectionName}.${fieldName}`;
const result = get(this.config, path);
const section = targetSection[0];
for (const line of section.split("\n")) {
const matched = line.startsWith(fieldName);
if (matched) {
const results = line.split("=");
if (results.length === 2) {
const key = results[1].trim();
return key;
}
}
if (result === undefined) {
throw new Error(`Failed to find ${path} in the local config file`);
}
throw new Error(`Cannot find ${fieldName} in config`);
return result;
}
}

@ -58,7 +58,8 @@
"typescript": "^5",
"vite": "^4.3.0",
"vite-plugin-checker": "^0.5.5",
"vitest": "^0.30.1"
"vitest": "^0.30.1",
"yaml": "^2.3.1"
}
},
"node_modules/@adobe/css-tools": {
@ -4818,6 +4819,14 @@
"node": ">=10"
}
},
"node_modules/cosmiconfig/node_modules/yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"engines": {
"node": ">= 6"
}
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
@ -10439,11 +10448,12 @@
"dev": true
},
"node_modules/yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz",
"integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==",
"dev": true,
"engines": {
"node": ">= 6"
"node": ">= 14"
}
},
"node_modules/yargs": {

@ -42,7 +42,6 @@
"@types/react-dom": "^18.2.0",
"@types/react-table": "^7.7.0",
"@vitejs/plugin-react": "^4.0.0",
"vitest": "^0.30.1",
"@vitest/coverage-c8": "^0.30.0",
"@vitest/ui": "^0.30.0",
"clsx": "^1.2.0",
@ -62,7 +61,9 @@
"sass": "^1.62.0",
"typescript": "^5",
"vite": "^4.3.0",
"vite-plugin-checker": "^0.5.5"
"vite-plugin-checker": "^0.5.5",
"vitest": "^0.30.1",
"yaml": "^2.3.1"
},
"scripts": {
"start": "vite",

@ -83,24 +83,32 @@ const SettingsGeneralView: FunctionComponent = () => {
</CollapseBox>
<Text
label="API Key"
disabled
// User can copy through the clipboard button
disabled={window.isSecureContext}
// Enable user to at least copy when not in secure context
readOnly={!window.isSecureContext}
rightSectionWidth={95}
rightSectionProps={{ style: { justifyContent: "flex-end" } }}
rightSection={
<MantineGroup spacing="xs" mx="xs" position="right">
<Action
label="Copy API Key"
variant="light"
settingKey={settingApiKey}
color={copied ? "green" : undefined}
icon={copied ? faCheck : faClipboard}
onClick={(update, value) => {
if (value) {
clipboard.copy(value);
toggleState(setCopy, 1500);
}
}}
></Action>
{
// Clipboard API is only available in secure contexts See: https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API#interfaces
window.isSecureContext && (
<Action
label="Copy API Key"
variant="light"
settingKey={settingApiKey}
color={copied ? "green" : undefined}
icon={copied ? faCheck : faClipboard}
onClick={(update, value) => {
if (value) {
clipboard.copy(value);
toggleState(setCopy, 1500);
}
}}
/>
)
}
<Action
label="Regenerate"
variant="light"

@ -0,0 +1,31 @@
from __future__ import annotations
from dynaconf.base import LazySettings # noqa
from dynaconf.constants import DEFAULT_SETTINGS_FILES
from dynaconf.contrib import DjangoDynaconf # noqa
from dynaconf.contrib import FlaskDynaconf # noqa
from dynaconf.validator import ValidationError # noqa
from dynaconf.validator import Validator # noqa
settings = LazySettings(
# This global `settings` is deprecated from v3.0.0+
# kept here for backwards compatibility
# To Be Removed in 4.0.x
warn_dynaconf_global_settings=True,
environments=True,
lowercase_read=False,
load_dotenv=True,
default_settings_paths=DEFAULT_SETTINGS_FILES,
)
# This is the new recommended base class alias
Dynaconf = LazySettings # noqa
__all__ = [
"Dynaconf",
"LazySettings",
"Validator",
"FlaskDynaconf",
"ValidationError",
"DjangoDynaconf",
]

File diff suppressed because it is too large Load Diff

@ -0,0 +1,773 @@
from __future__ import annotations
import importlib
import json
import os
import pprint
import sys
import warnings
import webbrowser
from contextlib import suppress
from pathlib import Path
from dynaconf import constants
from dynaconf import default_settings
from dynaconf import LazySettings
from dynaconf import loaders
from dynaconf import settings as legacy_settings
from dynaconf.loaders.py_loader import get_module
from dynaconf.utils import upperfy
from dynaconf.utils.files import read_file
from dynaconf.utils.functional import empty
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.utils.parse_conf import unparse_conf_data
from dynaconf.validator import ValidationError
from dynaconf.validator import Validator
from dynaconf.vendor import click
from dynaconf.vendor import toml
from dynaconf.vendor import tomllib
os.environ["PYTHONIOENCODING"] = "utf-8"
CWD = None
try:
CWD = Path.cwd()
except FileNotFoundError:
pass
EXTS = ["ini", "toml", "yaml", "json", "py", "env"]
WRITERS = ["ini", "toml", "yaml", "json", "py", "redis", "vault", "env"]
ENC = default_settings.ENCODING_FOR_DYNACONF
def set_settings(ctx, instance=None):
"""Pick correct settings instance and set it to a global variable."""
global settings
settings = None
_echo_enabled = ctx.invoked_subcommand not in ["get", None]
if instance is not None:
if ctx.invoked_subcommand in ["init"]:
raise click.UsageError(
"-i/--instance option is not allowed for `init` command"
)
sys.path.insert(0, ".")
settings = import_settings(instance)
elif "FLASK_APP" in os.environ: # pragma: no cover
with suppress(ImportError, click.UsageError):
from flask.cli import ScriptInfo # noqa
from dynaconf import FlaskDynaconf
flask_app = ScriptInfo().load_app()
settings = FlaskDynaconf(flask_app, **flask_app.config).settings
_echo_enabled and click.echo(
click.style(
"Flask app detected", fg="white", bg="bright_black"
)
)
elif "DJANGO_SETTINGS_MODULE" in os.environ: # pragma: no cover
sys.path.insert(0, os.path.abspath(os.getcwd()))
try:
# Django extension v2
from django.conf import settings # noqa
settings.DYNACONF.configure()
except AttributeError:
settings = LazySettings()
if settings is not None:
_echo_enabled and click.echo(
click.style(
"Django app detected", fg="white", bg="bright_black"
)
)
if settings is None:
if instance is None and "--help" not in click.get_os_args():
if ctx.invoked_subcommand and ctx.invoked_subcommand not in [
"init",
]:
warnings.warn(
"Starting on 3.x the param --instance/-i is now required. "
"try passing it `dynaconf -i path.to.settings <cmd>` "
"Example `dynaconf -i config.settings list` "
)
settings = legacy_settings
else:
settings = LazySettings(create_new_settings=True)
else:
settings = LazySettings()
def import_settings(dotted_path):
"""Import settings instance from python dotted path.
Last item in dotted path must be settings instance.
Example: import_settings('path.to.settings')
"""
if "." in dotted_path:
module, name = dotted_path.rsplit(".", 1)
else:
raise click.UsageError(
f"invalid path to settings instance: {dotted_path}"
)
try:
module = importlib.import_module(module)
except ImportError as e:
raise click.UsageError(e)
except FileNotFoundError:
return
try:
return getattr(module, name)
except AttributeError as e:
raise click.UsageError(e)
def split_vars(_vars):
"""Splits values like foo=bar=zaz in {'foo': 'bar=zaz'}"""
return (
{
upperfy(k.strip()): parse_conf_data(
v.strip(), tomlfy=True, box_settings=settings
)
for k, _, v in [item.partition("=") for item in _vars]
}
if _vars
else {}
)
def read_file_in_root_directory(*names, **kwargs):
"""Read a file on root dir."""
return read_file(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf-8"),
)
def print_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(read_file_in_root_directory("VERSION"))
ctx.exit()
def open_docs(ctx, param, value): # pragma: no cover
if not value or ctx.resilient_parsing:
return
url = "https://dynaconf.com/"
webbrowser.open(url, new=2)
click.echo(f"{url} opened in browser")
ctx.exit()
def show_banner(ctx, param, value):
"""Shows dynaconf awesome banner"""
if not value or ctx.resilient_parsing:
return
set_settings(ctx)
click.echo(settings.dynaconf_banner)
click.echo("Learn more at: http://github.com/dynaconf/dynaconf")
ctx.exit()
@click.group()
@click.option(
"--version",
is_flag=True,
callback=print_version,
expose_value=False,
is_eager=True,
help="Show dynaconf version",
)
@click.option(
"--docs",
is_flag=True,
callback=open_docs,
expose_value=False,
is_eager=True,
help="Open documentation in browser",
)
@click.option(
"--banner",
is_flag=True,
callback=show_banner,
expose_value=False,
is_eager=True,
help="Show awesome banner",
)
@click.option(
"--instance",
"-i",
default=None,
envvar="INSTANCE_FOR_DYNACONF",
help="Custom instance of LazySettings",
)
@click.pass_context
def main(ctx, instance):
"""Dynaconf - Command Line Interface\n
Documentation: https://dynaconf.com/
"""
set_settings(ctx, instance)
@main.command()
@click.option(
"--format", "fileformat", "-f", default="toml", type=click.Choice(EXTS)
)
@click.option(
"--path", "-p", default=CWD, help="defaults to current directory"
)
@click.option(
"--env",
"-e",
default=None,
help="deprecated command (kept for compatibility but unused)",
)
@click.option(
"--vars",
"_vars",
"-v",
multiple=True,
default=None,
help=(
"extra values to write to settings file "
"e.g: `dynaconf init -v NAME=foo -v X=2`"
),
)
@click.option(
"--secrets",
"_secrets",
"-s",
multiple=True,
default=None,
help=(
"secret key values to be written in .secrets "
"e.g: `dynaconf init -s TOKEN=kdslmflds"
),
)
@click.option("--wg/--no-wg", default=True)
@click.option("-y", default=False, is_flag=True)
@click.option("--django", default=os.environ.get("DJANGO_SETTINGS_MODULE"))
@click.pass_context
def init(ctx, fileformat, path, env, _vars, _secrets, wg, y, django):
"""Inits a dynaconf project
By default it creates a settings.toml and a .secrets.toml
for [default|development|staging|testing|production|global] envs.
The format of the files can be changed passing
--format=yaml|json|ini|py.
This command must run on the project's root folder or you must pass
--path=/myproject/root/folder.
The --env/-e is deprecated (kept for compatibility but unused)
"""
click.echo("⚙️ Configuring your Dynaconf environment")
click.echo("-" * 42)
if "FLASK_APP" in os.environ: # pragma: no cover
click.echo(
"⚠️ Flask detected, you can't use `dynaconf init` "
"on a flask project, instead go to dynaconf.com/flask/ "
"for more information.\n"
"Or add the following to your app.py\n"
"\n"
"from dynaconf import FlaskDynaconf\n"
"app = Flask(__name__)\n"
"FlaskDynaconf(app)\n"
)
exit(1)
path = Path(path)
if env is not None:
click.secho(
"⚠️ The --env/-e option is deprecated (kept for\n"
" compatibility but unused)\n",
fg="red",
bold=True,
# stderr=True,
)
if settings.get("create_new_settings") is True:
filename = Path("config.py")
if not filename.exists():
with open(filename, "w") as new_settings:
new_settings.write(
constants.INSTANCE_TEMPLATE.format(
settings_files=[
f"settings.{fileformat}",
f".secrets.{fileformat}",
]
)
)
click.echo(
"🐍 The file `config.py` was generated.\n"
" on your code now use `from config import settings`.\n"
" (you must have `config` importable in your PYTHONPATH).\n"
)
else:
click.echo(
f"⁉️ You already have a {filename} so it is not going to be\n"
" generated for you, you will need to create your own \n"
" settings instance e.g: config.py \n"
" from dynaconf import Dynaconf \n"
" settings = Dynaconf(**options)\n"
)
sys.path.append(str(path))
set_settings(ctx, "config.settings")
env = settings.current_env.lower()
loader = importlib.import_module(f"dynaconf.loaders.{fileformat}_loader")
# Turn foo=bar=zaz in {'foo': 'bar=zaz'}
env_data = split_vars(_vars)
_secrets = split_vars(_secrets)
# create placeholder data for every env
settings_data = {}
secrets_data = {}
if env_data:
settings_data[env] = env_data
settings_data["default"] = {k: "a default value" for k in env_data}
if _secrets:
secrets_data[env] = _secrets
secrets_data["default"] = {k: "a default value" for k in _secrets}
if str(path).endswith(
constants.ALL_EXTENSIONS + ("py",)
): # pragma: no cover # noqa
settings_path = path
secrets_path = path.parent / f".secrets.{fileformat}"
gitignore_path = path.parent / ".gitignore"
else:
if fileformat == "env":
if str(path) in (".env", "./.env"): # pragma: no cover
settings_path = path
elif str(path).endswith("/.env"): # pragma: no cover
settings_path = path
elif str(path).endswith(".env"): # pragma: no cover
settings_path = path.parent / ".env"
else:
settings_path = path / ".env"
Path.touch(settings_path)
secrets_path = None
else:
settings_path = path / f"settings.{fileformat}"
secrets_path = path / f".secrets.{fileformat}"
gitignore_path = path / ".gitignore"
if fileformat in ["py", "env"] or env == "main":
# for Main env, Python and .env formats writes a single env
settings_data = settings_data.get(env, {})
secrets_data = secrets_data.get(env, {})
if not y and settings_path and settings_path.exists(): # pragma: no cover
click.confirm(
f"{settings_path} exists do you want to overwrite it?",
abort=True,
)
if not y and secrets_path and secrets_path.exists(): # pragma: no cover
click.confirm(
f"{secrets_path} exists do you want to overwrite it?",
abort=True,
)
if settings_path:
loader.write(settings_path, settings_data, merge=True)
click.echo(
f"🎛️ {settings_path.name} created to hold your settings.\n"
)
if secrets_path:
loader.write(secrets_path, secrets_data, merge=True)
click.echo(f"🔑 {secrets_path.name} created to hold your secrets.\n")
ignore_line = ".secrets.*"
comment = "\n# Ignore dynaconf secret files\n"
if not gitignore_path.exists():
with open(str(gitignore_path), "w", encoding=ENC) as f:
f.writelines([comment, ignore_line, "\n"])
else:
existing = (
ignore_line in open(str(gitignore_path), encoding=ENC).read()
)
if not existing: # pragma: no cover
with open(str(gitignore_path), "a+", encoding=ENC) as f:
f.writelines([comment, ignore_line, "\n"])
click.echo(
f"🙈 the {secrets_path.name} is also included in `.gitignore` \n"
" beware to not push your secrets to a public repo \n"
" or use dynaconf builtin support for Vault Servers.\n"
)
if django: # pragma: no cover
dj_module, _ = get_module({}, django)
dj_filename = dj_module.__file__
if Path(dj_filename).exists():
click.confirm(
f"{dj_filename} is found do you want to add dynaconf?",
abort=True,
)
with open(dj_filename, "a") as dj_file:
dj_file.write(constants.DJANGO_PATCH)
click.echo("🎠 Now your Django settings are managed by Dynaconf")
else:
click.echo("❌ Django settings file not written.")
else:
click.echo(
"🎉 Dynaconf is configured! read more on https://dynaconf.com\n"
" Use `dynaconf -i config.settings list` to see your settings\n"
)
@main.command(name="get")
@click.argument("key", required=True)
@click.option(
"--default",
"-d",
default=empty,
help="Default value if settings doesn't exist",
)
@click.option(
"--env", "-e", default=None, help="Filters the env to get the values"
)
@click.option(
"--unparse",
"-u",
default=False,
help="Unparse data by adding markers such as @none, @int etc..",
is_flag=True,
)
def get(key, default, env, unparse):
"""Returns the raw value for a settings key.
If result is a dict, list or tuple it is printes as a valid json string.
"""
if env:
env = env.strip()
if key:
key = key.strip()
if env:
settings.setenv(env)
if default is not empty:
result = settings.get(key, default)
else:
result = settings[key] # let the keyerror raises
if unparse:
result = unparse_conf_data(result)
if isinstance(result, (dict, list, tuple)):
result = json.dumps(result, sort_keys=True)
click.echo(result, nl=False)
@main.command(name="list")
@click.option(
"--env", "-e", default=None, help="Filters the env to get the values"
)
@click.option("--key", "-k", default=None, help="Filters a single key")
@click.option(
"--more",
"-m",
default=None,
help="Pagination more|less style",
is_flag=True,
)
@click.option(
"--loader",
"-l",
default=None,
help="a loader identifier to filter e.g: toml|yaml",
)
@click.option(
"--all",
"_all",
"-a",
default=False,
is_flag=True,
help="show dynaconf internal settings?",
)
@click.option(
"--output",
"-o",
type=click.Path(writable=True, dir_okay=False),
default=None,
help="Filepath to write the listed values as json",
)
@click.option(
"--output-flat",
"flat",
is_flag=True,
default=False,
help="Output file is flat (do not include [env] name)",
)
def _list(env, key, more, loader, _all=False, output=None, flat=False):
"""Lists all user defined config values
and if `--all` is passed it also shows dynaconf internal variables.
"""
if env:
env = env.strip()
if key:
key = key.strip()
if loader:
loader = loader.strip()
if env:
settings.setenv(env)
cur_env = settings.current_env.lower()
if cur_env == "main":
flat = True
click.echo(
click.style(
f"Working in {cur_env} environment ",
bold=True,
bg="bright_blue",
fg="bright_white",
)
)
if not loader:
data = settings.as_dict(env=env, internal=_all)
else:
identifier = f"{loader}_{cur_env}"
data = settings._loaded_by_loaders.get(identifier, {})
data = data or settings._loaded_by_loaders.get(loader, {})
# remove to avoid displaying twice
data.pop("SETTINGS_MODULE", None)
def color(_k):
if _k in dir(default_settings):
return "blue"
return "magenta"
def format_setting(_k, _v):
key = click.style(_k, bg=color(_k), fg="bright_white")
data_type = click.style(
f"<{type(_v).__name__}>", bg="bright_black", fg="bright_white"
)
value = pprint.pformat(_v)
return f"{key}{data_type} {value}"
if not key:
datalines = "\n".join(
format_setting(k, v)
for k, v in data.items()
if k not in data.get("RENAMED_VARS", [])
)
(click.echo_via_pager if more else click.echo)(datalines)
if output:
loaders.write(output, data, env=not flat and cur_env)
else:
key = upperfy(key)
try:
value = settings.get(key, empty)
except AttributeError:
value = empty
if value is empty:
click.echo(click.style("Key not found", bg="red", fg="white"))
return
click.echo(format_setting(key, value))
if output:
loaders.write(output, {key: value}, env=not flat and cur_env)
if env:
settings.setenv()
@main.command()
@click.argument("to", required=True, type=click.Choice(WRITERS))
@click.option(
"--vars",
"_vars",
"-v",
multiple=True,
default=None,
help=(
"key values to be written "
"e.g: `dynaconf write toml -e NAME=foo -e X=2"
),
)
@click.option(
"--secrets",
"_secrets",
"-s",
multiple=True,
default=None,
help=(
"secret key values to be written in .secrets "
"e.g: `dynaconf write toml -s TOKEN=kdslmflds -s X=2"
),
)
@click.option(
"--path",
"-p",
default=CWD,
help="defaults to current directory/settings.{ext}",
)
@click.option(
"--env",
"-e",
default="default",
help=(
"env to write to defaults to DEVELOPMENT for files "
"for external sources like Redis and Vault "
"it will be DYNACONF or the value set in "
"$ENVVAR_PREFIX_FOR_DYNACONF"
),
)
@click.option("-y", default=False, is_flag=True)
def write(to, _vars, _secrets, path, env, y):
"""Writes data to specific source"""
_vars = split_vars(_vars)
_secrets = split_vars(_secrets)
loader = importlib.import_module(f"dynaconf.loaders.{to}_loader")
if to in EXTS:
# Lets write to a file
path = Path(path)
if str(path).endswith(constants.ALL_EXTENSIONS + ("py",)):
settings_path = path
secrets_path = path.parent / f".secrets.{to}"
else:
if to == "env":
if str(path) in (".env", "./.env"): # pragma: no cover
settings_path = path
elif str(path).endswith("/.env"):
settings_path = path
elif str(path).endswith(".env"):
settings_path = path.parent / ".env"
else:
settings_path = path / ".env"
Path.touch(settings_path)
secrets_path = None
_vars.update(_secrets)
else:
settings_path = path / f"settings.{to}"
secrets_path = path / f".secrets.{to}"
if (
_vars and not y and settings_path and settings_path.exists()
): # pragma: no cover # noqa
click.confirm(
f"{settings_path} exists do you want to overwrite it?",
abort=True,
)
if (
_secrets and not y and secrets_path and secrets_path.exists()
): # pragma: no cover # noqa
click.confirm(
f"{secrets_path} exists do you want to overwrite it?",
abort=True,
)
if to not in ["py", "env"]:
if _vars:
_vars = {env: _vars}
if _secrets:
_secrets = {env: _secrets}
if _vars and settings_path:
loader.write(settings_path, _vars, merge=True)
click.echo(f"Data successful written to {settings_path}")
if _secrets and secrets_path:
loader.write(secrets_path, _secrets, merge=True)
click.echo(f"Data successful written to {secrets_path}")
else: # pragma: no cover
# lets write to external source
with settings.using_env(env):
# make sure we're in the correct environment
loader.write(settings, _vars, **_secrets)
click.echo(f"Data successful written to {to}")
@main.command()
@click.option(
"--path", "-p", default=CWD, help="defaults to current directory"
)
def validate(path): # pragma: no cover
"""Validates Dynaconf settings based on rules defined in
dynaconf_validators.toml"""
# reads the 'dynaconf_validators.toml' from path
# for each section register the validator for specific env
# call validate
path = Path(path)
if not str(path).endswith(".toml"):
path = path / "dynaconf_validators.toml"
if not path.exists(): # pragma: no cover # noqa
click.echo(click.style(f"{path} not found", fg="white", bg="red"))
sys.exit(1)
try: # try tomlib first
validation_data = tomllib.load(open(str(path), "rb"))
except UnicodeDecodeError: # fallback to legacy toml (TBR in 4.0.0)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
validation_data = toml.load(
open(str(path), encoding=default_settings.ENCODING_FOR_DYNACONF),
)
success = True
for env, name_data in validation_data.items():
for name, data in name_data.items():
if not isinstance(data, dict): # pragma: no cover
click.echo(
click.style(
f"Invalid rule for parameter '{name}'",
fg="white",
bg="yellow",
)
)
else:
data.setdefault("env", env)
click.echo(
click.style(
f"Validating '{name}' with '{data}'",
fg="white",
bg="blue",
)
)
try:
Validator(name, **data).validate(settings)
except ValidationError as e:
click.echo(
click.style(f"Error: {e}", fg="white", bg="red")
)
success = False
if success:
click.echo(click.style("Validation success!", fg="white", bg="green"))
else:
click.echo(click.style("Validation error!", fg="white", bg="red"))
sys.exit(1)
if __name__ == "__main__": # pragma: no cover
main()

@ -0,0 +1,52 @@
# pragma: no cover
from __future__ import annotations
INI_EXTENSIONS = (".ini", ".conf", ".properties")
TOML_EXTENSIONS = (".toml", ".tml")
YAML_EXTENSIONS = (".yaml", ".yml")
JSON_EXTENSIONS = (".json",)
ALL_EXTENSIONS = (
INI_EXTENSIONS + TOML_EXTENSIONS + YAML_EXTENSIONS + JSON_EXTENSIONS
) # noqa
EXTERNAL_LOADERS = {
"ENV": "dynaconf.loaders.env_loader",
"VAULT": "dynaconf.loaders.vault_loader",
"REDIS": "dynaconf.loaders.redis_loader",
}
DJANGO_PATCH = """
# HERE STARTS DYNACONF EXTENSION LOAD (Keep at the very bottom of settings.py)
# Read more at https://www.dynaconf.com/django/
import dynaconf # noqa
settings = dynaconf.DjangoDynaconf(__name__) # noqa
# HERE ENDS DYNACONF EXTENSION LOAD (No more code below this line)
"""
INSTANCE_TEMPLATE = """
from dynaconf import Dynaconf
settings = Dynaconf(
envvar_prefix="DYNACONF",
settings_files={settings_files},
)
# `envvar_prefix` = export envvars with `export DYNACONF_FOO=bar`.
# `settings_files` = Load these files in the order.
"""
EXTS = (
"py",
"toml",
"tml",
"yaml",
"yml",
"ini",
"conf",
"properties",
"json",
)
DEFAULT_SETTINGS_FILES = [f"settings.{ext}" for ext in EXTS] + [
f".secrets.{ext}" for ext in EXTS
]

@ -0,0 +1,5 @@
from __future__ import annotations
from dynaconf.contrib.django_dynaconf_v2 import DjangoDynaconf # noqa
from dynaconf.contrib.flask_dynaconf import DynaconfConfig # noqa
from dynaconf.contrib.flask_dynaconf import FlaskDynaconf # noqa

@ -0,0 +1,142 @@
"""Dynaconf django extension
In the `django_project/settings.py` put at the very bottom of the file:
# HERE STARTS DYNACONF EXTENSION LOAD (Keep at the very bottom of settings.py)
# Read more at https://www.dynaconf.com/django/
import dynaconf # noqa
settings = dynaconf.DjangoDynaconf(__name__) # noqa
# HERE ENDS DYNACONF EXTENSION LOAD (No more code below this line)
Now in the root of your Django project
(the same folder where manage.py is located)
Put your config files `settings.{py|yaml|toml|ini|json}`
and or `.secrets.{py|yaml|toml|ini|json}`
On your projects root folder now you can start as::
DJANGO_DEBUG='false' \
DJANGO_ALLOWED_HOSTS='["localhost"]' \
python manage.py runserver
"""
from __future__ import annotations
import inspect
import os
import sys
import dynaconf
try: # pragma: no cover
from django import conf
from django.conf import settings as django_settings
django_installed = True
except ImportError: # pragma: no cover
django_installed = False
def load(django_settings_module_name=None, **kwargs): # pragma: no cover
if not django_installed:
raise RuntimeError(
"To use this extension django must be installed "
"install it with: pip install django"
)
try:
django_settings_module = sys.modules[django_settings_module_name]
except KeyError:
django_settings_module = sys.modules[
os.environ["DJANGO_SETTINGS_MODULE"]
]
settings_module_name = django_settings_module.__name__
settings_file = os.path.abspath(django_settings_module.__file__)
_root_path = os.path.dirname(settings_file)
# 1) Create the lazy settings object reusing settings_module consts
options = {
k.upper(): v
for k, v in django_settings_module.__dict__.items()
if k.isupper()
}
options.update(kwargs)
options.setdefault(
"SKIP_FILES_FOR_DYNACONF", [settings_file, "dynaconf_merge"]
)
options.setdefault("ROOT_PATH_FOR_DYNACONF", _root_path)
options.setdefault("ENVVAR_PREFIX_FOR_DYNACONF", "DJANGO")
options.setdefault("ENV_SWITCHER_FOR_DYNACONF", "DJANGO_ENV")
options.setdefault("ENVIRONMENTS_FOR_DYNACONF", True)
options.setdefault("load_dotenv", True)
options.setdefault(
"default_settings_paths", dynaconf.DEFAULT_SETTINGS_FILES
)
class UserSettingsHolder(dynaconf.LazySettings):
_django_override = True
lazy_settings = dynaconf.LazySettings(**options)
dynaconf.settings = lazy_settings # rebind the settings
# 2) Set all settings back to django_settings_module for 'django check'
lazy_settings.populate_obj(django_settings_module)
# 3) Bind `settings` and `DYNACONF`
setattr(django_settings_module, "settings", lazy_settings)
setattr(django_settings_module, "DYNACONF", lazy_settings)
# 4) keep django original settings
dj = {}
for key in dir(django_settings):
if (
key.isupper()
and (key != "SETTINGS_MODULE")
and key not in lazy_settings.store
):
dj[key] = getattr(django_settings, key, None)
dj["ORIGINAL_SETTINGS_MODULE"] = django_settings.SETTINGS_MODULE
lazy_settings.update(dj)
# Allow dynaconf_hooks to be in the same folder as the django.settings
dynaconf.loaders.execute_hooks(
"post",
lazy_settings,
lazy_settings.current_env,
modules=[settings_module_name],
files=[settings_file],
)
lazy_settings._loaded_py_modules.insert(0, settings_module_name)
# 5) Patch django.conf.settings
class Wrapper:
# lazy_settings = conf.settings.lazy_settings
def __getattribute__(self, name):
if name == "settings":
return lazy_settings
if name == "UserSettingsHolder":
return UserSettingsHolder
return getattr(conf, name)
# This implementation is recommended by Guido Van Rossum
# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules["django.conf"] = Wrapper()
# 6) Enable standalone scripts to use Dynaconf
# This is for when `django.conf.settings` is imported directly
# on external `scripts` (out of Django's lifetime)
for stack_item in reversed(inspect.stack()):
if isinstance(
stack_item.frame.f_globals.get("settings"), conf.LazySettings
):
stack_item.frame.f_globals["settings"] = lazy_settings
return lazy_settings
# syntax sugar
DjangoDynaconf = load # noqa

@ -0,0 +1,230 @@
from __future__ import annotations
import warnings
from collections import ChainMap
from contextlib import suppress
try:
from flask.config import Config
flask_installed = True
except ImportError: # pragma: no cover
flask_installed = False
Config = object
import dynaconf
import pkg_resources
class FlaskDynaconf:
"""The arguments are.
app = The created app
dynaconf_args = Extra args to be passed to Dynaconf (validator for example)
All other values are stored as config vars specially::
ENVVAR_PREFIX_FOR_DYNACONF = env prefix for your envvars to be loaded
example:
if you set to `MYSITE` then
export MYSITE_SQL_PORT='@int 5445'
with that exported to env you access using:
app.config.SQL_PORT
app.config.get('SQL_PORT')
app.config.get('sql_port')
# get is case insensitive
app.config['SQL_PORT']
Dynaconf uses `@int, @bool, @float, @json` to cast
env vars
SETTINGS_FILE_FOR_DYNACONF = The name of the module or file to use as
default to load settings. If nothing is
passed it will be `settings.*` or value
found in `ENVVAR_FOR_DYNACONF`
Dynaconf supports
.py, .yml, .toml, ini, json
ATTENTION: Take a look at `settings.yml` and `.secrets.yml` to know the
required settings format.
Settings load order in Dynaconf:
- Load all defaults and Flask defaults
- Load all passed variables when applying FlaskDynaconf
- Update with data in settings files
- Update with data in environment vars `ENVVAR_FOR_DYNACONF_`
TOML files are very useful to have `envd` settings, lets say,
`production` and `development`.
You can also achieve the same using multiple `.py` files naming as
`settings.py`, `production_settings.py` and `development_settings.py`
(see examples/validator)
Example::
app = Flask(__name__)
FlaskDynaconf(
app,
ENV='MYSITE',
SETTINGS_FILE='settings.yml',
EXTRA_VALUE='You can add additional config vars here'
)
Take a look at examples/flask in Dynaconf repository
"""
def __init__(
self,
app=None,
instance_relative_config=False,
dynaconf_instance=None,
extensions_list=False,
**kwargs,
):
"""kwargs holds initial dynaconf configuration"""
if not flask_installed: # pragma: no cover
raise RuntimeError(
"To use this extension Flask must be installed "
"install it with: pip install flask"
)
self.kwargs = {k.upper(): v for k, v in kwargs.items()}
kwargs.setdefault("ENVVAR_PREFIX", "FLASK")
env_prefix = f"{kwargs['ENVVAR_PREFIX']}_ENV" # FLASK_ENV
kwargs.setdefault("ENV_SWITCHER", env_prefix)
kwargs.setdefault("ENVIRONMENTS", True)
kwargs.setdefault("load_dotenv", True)
kwargs.setdefault(
"default_settings_paths", dynaconf.DEFAULT_SETTINGS_FILES
)
self.dynaconf_instance = dynaconf_instance
self.instance_relative_config = instance_relative_config
self.extensions_list = extensions_list
if app:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
"""kwargs holds initial dynaconf configuration"""
self.kwargs.update(kwargs)
self.settings = self.dynaconf_instance or dynaconf.LazySettings(
**self.kwargs
)
dynaconf.settings = self.settings # rebind customized settings
app.config = self.make_config(app)
app.dynaconf = self.settings
if self.extensions_list:
if not isinstance(self.extensions_list, str):
self.extensions_list = "EXTENSIONS"
app.config.load_extensions(self.extensions_list)
def make_config(self, app):
root_path = app.root_path
if self.instance_relative_config: # pragma: no cover
root_path = app.instance_path
if self.dynaconf_instance:
self.settings.update(self.kwargs)
return DynaconfConfig(
root_path=root_path,
defaults=app.config,
_settings=self.settings,
_app=app,
)
class DynaconfConfig(Config):
"""
Replacement for flask.config_class that responds as a Dynaconf instance.
"""
def __init__(self, _settings, _app, *args, **kwargs):
"""perform the initial load"""
super().__init__(*args, **kwargs)
# Bring Dynaconf instance value to Flask Config
Config.update(self, _settings.store)
self._settings = _settings
self._app = _app
def __contains__(self, item):
return hasattr(self, item)
def __getitem__(self, key):
try:
return self._settings[key]
except KeyError:
return Config.__getitem__(self, key)
def __setitem__(self, key, value):
"""
Allows app.config['key'] = 'foo'
"""
return self._settings.__setitem__(key, value)
def _chain_map(self):
return ChainMap(self._settings, dict(dict.items(self)))
def keys(self):
return self._chain_map().keys()
def values(self):
return self._chain_map().values()
def items(self):
return self._chain_map().items()
def setdefault(self, key, value=None):
return self._chain_map().setdefault(key, value)
def __iter__(self):
return self._chain_map().__iter__()
def __getattr__(self, name):
"""
First try to get value from dynaconf then from Flask Config
"""
with suppress(AttributeError):
return getattr(self._settings, name)
with suppress(KeyError):
return self[name]
raise AttributeError(
f"'{self.__class__.__name__}' object has no attribute '{name}'"
)
def __call__(self, name, *args, **kwargs):
return self.get(name, *args, **kwargs)
def get(self, key, default=None):
"""Gets config from dynaconf variables
if variables does not exists in dynaconf try getting from
`app.config` to support runtime settings."""
return self._settings.get(key, Config.get(self, key, default))
def load_extensions(self, key="EXTENSIONS", app=None):
"""Loads flask extensions dynamically."""
app = app or self._app
extensions = app.config.get(key)
if not extensions:
warnings.warn(
f"Settings is missing {key} to load Flask Extensions",
RuntimeWarning,
)
return
for object_reference in app.config[key]:
# add a placeholder `name` to create a valid entry point
entry_point_spec = f"__name = {object_reference}"
# parse the entry point specification
entry_point = pkg_resources.EntryPoint.parse(entry_point_spec)
# dynamically resolve the entry point
initializer = entry_point.resolve()
# Invoke extension initializer
initializer(app)

@ -0,0 +1,252 @@
from __future__ import annotations
import importlib
import os
import sys
import warnings
from dynaconf.utils import RENAMED_VARS
from dynaconf.utils import upperfy
from dynaconf.utils import warn_deprecations
from dynaconf.utils.files import find_file
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.vendor.dotenv import load_dotenv
def try_renamed(key, value, older_key, current_key):
if value is None:
if key == current_key:
if older_key in os.environ:
warnings.warn(
f"{older_key} is deprecated please use {current_key}",
DeprecationWarning,
)
value = os.environ[older_key]
return value
def get(key, default=None):
value = os.environ.get(upperfy(key))
# compatibility with renamed variables
for old, new in RENAMED_VARS.items():
value = try_renamed(key, value, old, new)
return (
parse_conf_data(value, tomlfy=True, box_settings={})
if value is not None
else default
)
def start_dotenv(obj=None, root_path=None):
# load_from_dotenv_if_installed
obj = obj or {}
_find_file = getattr(obj, "find_file", find_file)
root_path = (
root_path
or getattr(obj, "_root_path", None)
or get("ROOT_PATH_FOR_DYNACONF")
)
dotenv_path = (
obj.get("DOTENV_PATH_FOR_DYNACONF")
or get("DOTENV_PATH_FOR_DYNACONF")
or _find_file(".env", project_root=root_path)
)
load_dotenv(
dotenv_path,
verbose=obj.get("DOTENV_VERBOSE_FOR_DYNACONF", False),
override=obj.get("DOTENV_OVERRIDE_FOR_DYNACONF", False),
)
warn_deprecations(os.environ)
def reload(load_dotenv=None, *args, **kwargs):
if load_dotenv:
start_dotenv(*args, **kwargs)
importlib.reload(sys.modules[__name__])
# default proj root
# pragma: no cover
ROOT_PATH_FOR_DYNACONF = get("ROOT_PATH_FOR_DYNACONF", None)
# Default settings file
SETTINGS_FILE_FOR_DYNACONF = get("SETTINGS_FILE_FOR_DYNACONF", [])
# MISPELLS `FILES` when/if it happens
mispelled_files = get("SETTINGS_FILES_FOR_DYNACONF", None)
if not SETTINGS_FILE_FOR_DYNACONF and mispelled_files is not None:
SETTINGS_FILE_FOR_DYNACONF = mispelled_files
# # ENV SETTINGS
# # In dynaconf 1.0.0 `NAMESPACE` got renamed to `ENV`
# If provided environments will be loaded separately
ENVIRONMENTS_FOR_DYNACONF = get("ENVIRONMENTS_FOR_DYNACONF", False)
MAIN_ENV_FOR_DYNACONF = get("MAIN_ENV_FOR_DYNACONF", "MAIN")
# If False dynaconf will allow access to first level settings only in upper
LOWERCASE_READ_FOR_DYNACONF = get("LOWERCASE_READ_FOR_DYNACONF", True)
# The environment variable to switch current env
ENV_SWITCHER_FOR_DYNACONF = get(
"ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
)
# The current env by default is DEVELOPMENT
# to switch is needed to `export ENV_FOR_DYNACONF=PRODUCTION`
# or put that value in .env file
# this value is used only when reading files like .toml|yaml|ini|json
ENV_FOR_DYNACONF = get(ENV_SWITCHER_FOR_DYNACONF, "DEVELOPMENT")
# This variable exists to support `from_env` method
FORCE_ENV_FOR_DYNACONF = get("FORCE_ENV_FOR_DYNACONF", None)
# Default values is taken from DEFAULT pseudo env
# this value is used only when reading files like .toml|yaml|ini|json
DEFAULT_ENV_FOR_DYNACONF = get("DEFAULT_ENV_FOR_DYNACONF", "DEFAULT")
# Global values are taken from DYNACONF env used for exported envvars
# Values here overwrites all other envs
# This namespace is used for files and also envvars
ENVVAR_PREFIX_FOR_DYNACONF = get("ENVVAR_PREFIX_FOR_DYNACONF", "DYNACONF")
# By default all environment variables (filtered by `envvar_prefix`) will
# be pulled into settings space. In case some of them are polluting the space,
# setting this flag to `True` will change this behaviour.
# Only "known" variables will be considered -- that is variables defined before
# in settings files (or includes/preloads).
IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF = get(
"IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF", False
)
AUTO_CAST_FOR_DYNACONF = get("AUTO_CAST_FOR_DYNACONF", True)
# The default encoding to open settings files
ENCODING_FOR_DYNACONF = get("ENCODING_FOR_DYNACONF", "utf-8")
# Merge objects on load
MERGE_ENABLED_FOR_DYNACONF = get("MERGE_ENABLED_FOR_DYNACONF", False)
# Lookup keys considering dots as separators
DOTTED_LOOKUP_FOR_DYNACONF = get("DOTTED_LOOKUP_FOR_DYNACONF", True)
# BY default `__` is the separator for nested env vars
# export `DYNACONF__DATABASE__server=server.com`
# export `DYNACONF__DATABASE__PORT=6666`
# Should result in settings.DATABASE == {'server': 'server.com', 'PORT': 6666}
# To disable it one can set `NESTED_SEPARATOR_FOR_DYNACONF=false`
NESTED_SEPARATOR_FOR_DYNACONF = get("NESTED_SEPARATOR_FOR_DYNACONF", "__")
# The env var specifying settings module
ENVVAR_FOR_DYNACONF = get("ENVVAR_FOR_DYNACONF", "SETTINGS_FILE_FOR_DYNACONF")
# Default values for redis configs
default_redis = {
"host": get("REDIS_HOST_FOR_DYNACONF", "localhost"),
"port": int(get("REDIS_PORT_FOR_DYNACONF", 6379)),
"db": int(get("REDIS_DB_FOR_DYNACONF", 0)),
"decode_responses": get("REDIS_DECODE_FOR_DYNACONF", True),
"username": get("REDIS_USERNAME_FOR_DYNACONF", None),
"password": get("REDIS_PASSWORD_FOR_DYNACONF", None),
}
REDIS_FOR_DYNACONF = get("REDIS_FOR_DYNACONF", default_redis)
REDIS_ENABLED_FOR_DYNACONF = get("REDIS_ENABLED_FOR_DYNACONF", False)
# Hashicorp Vault Project
vault_scheme = get("VAULT_SCHEME_FOR_DYNACONF", "http")
vault_host = get("VAULT_HOST_FOR_DYNACONF", "localhost")
vault_port = get("VAULT_PORT_FOR_DYNACONF", "8200")
default_vault = {
"url": get(
"VAULT_URL_FOR_DYNACONF", f"{vault_scheme}://{vault_host}:{vault_port}"
),
"token": get("VAULT_TOKEN_FOR_DYNACONF", None),
"cert": get("VAULT_CERT_FOR_DYNACONF", None),
"verify": get("VAULT_VERIFY_FOR_DYNACONF", None),
"timeout": get("VAULT_TIMEOUT_FOR_DYNACONF", None),
"proxies": get("VAULT_PROXIES_FOR_DYNACONF", None),
"allow_redirects": get("VAULT_ALLOW_REDIRECTS_FOR_DYNACONF", None),
"namespace": get("VAULT_NAMESPACE_FOR_DYNACONF", None),
}
VAULT_FOR_DYNACONF = get("VAULT_FOR_DYNACONF", default_vault)
VAULT_ENABLED_FOR_DYNACONF = get("VAULT_ENABLED_FOR_DYNACONF", False)
VAULT_PATH_FOR_DYNACONF = get("VAULT_PATH_FOR_DYNACONF", "dynaconf")
VAULT_MOUNT_POINT_FOR_DYNACONF = get(
"VAULT_MOUNT_POINT_FOR_DYNACONF", "secret"
)
VAULT_ROOT_TOKEN_FOR_DYNACONF = get("VAULT_ROOT_TOKEN_FOR_DYNACONF", None)
VAULT_KV_VERSION_FOR_DYNACONF = get("VAULT_KV_VERSION_FOR_DYNACONF", 1)
VAULT_AUTH_WITH_IAM_FOR_DYNACONF = get(
"VAULT_AUTH_WITH_IAM_FOR_DYNACONF", False
)
VAULT_AUTH_ROLE_FOR_DYNACONF = get("VAULT_AUTH_ROLE_FOR_DYNACONF", None)
VAULT_ROLE_ID_FOR_DYNACONF = get("VAULT_ROLE_ID_FOR_DYNACONF", None)
VAULT_SECRET_ID_FOR_DYNACONF = get("VAULT_SECRET_ID_FOR_DYNACONF", None)
# Only core loaders defined on this list will be invoked
core_loaders = ["YAML", "TOML", "INI", "JSON", "PY"]
CORE_LOADERS_FOR_DYNACONF = get("CORE_LOADERS_FOR_DYNACONF", core_loaders)
# External Loaders to read vars from different data stores
default_loaders = [
"dynaconf.loaders.env_loader",
# 'dynaconf.loaders.redis_loader'
# 'dynaconf.loaders.vault_loader'
]
LOADERS_FOR_DYNACONF = get("LOADERS_FOR_DYNACONF", default_loaders)
# Errors in loaders should be silenced?
SILENT_ERRORS_FOR_DYNACONF = get("SILENT_ERRORS_FOR_DYNACONF", True)
# always fresh variables
FRESH_VARS_FOR_DYNACONF = get("FRESH_VARS_FOR_DYNACONF", [])
DOTENV_PATH_FOR_DYNACONF = get("DOTENV_PATH_FOR_DYNACONF", None)
DOTENV_VERBOSE_FOR_DYNACONF = get("DOTENV_VERBOSE_FOR_DYNACONF", False)
DOTENV_OVERRIDE_FOR_DYNACONF = get("DOTENV_OVERRIDE_FOR_DYNACONF", False)
# Currently this is only used by cli. INSTANCE_FOR_DYNACONF specifies python
# dotted path to custom LazySettings instance. Last dotted path item should be
# instance of LazySettings.
INSTANCE_FOR_DYNACONF = get("INSTANCE_FOR_DYNACONF", None)
# https://msg.pyyaml.org/load
YAML_LOADER_FOR_DYNACONF = get("YAML_LOADER_FOR_DYNACONF", "safe_load")
# Use commentjson? https://commentjson.readthedocs.io/en/latest/
COMMENTJSON_ENABLED_FOR_DYNACONF = get(
"COMMENTJSON_ENABLED_FOR_DYNACONF", False
)
# Extra file, or list of files where to look for secrets
# useful for CI environment like jenkins
# where you can export this variable pointing to a local
# absolute path of the secrets file.
SECRETS_FOR_DYNACONF = get("SECRETS_FOR_DYNACONF", None)
# To include extra paths based on envvar
INCLUDES_FOR_DYNACONF = get("INCLUDES_FOR_DYNACONF", [])
# To pre-load extra paths based on envvar
PRELOAD_FOR_DYNACONF = get("PRELOAD_FOR_DYNACONF", [])
# Files to skip if found on search tree
SKIP_FILES_FOR_DYNACONF = get("SKIP_FILES_FOR_DYNACONF", [])
# YAML reads empty vars as None, should dynaconf apply validator defaults?
# this is set to None, then evaluated on base.Settings.setdefault
# possible values are True/False
APPLY_DEFAULT_ON_NONE_FOR_DYNACONF = get(
"APPLY_DEFAULT_ON_NONE_FOR_DYNACONF", None
)
# Backwards compatibility with renamed variables
for old, new in RENAMED_VARS.items():
setattr(sys.modules[__name__], old, locals()[new])

@ -0,0 +1,277 @@
from __future__ import annotations
import importlib
import os
from dynaconf import constants as ct
from dynaconf import default_settings
from dynaconf.loaders import ini_loader
from dynaconf.loaders import json_loader
from dynaconf.loaders import py_loader
from dynaconf.loaders import toml_loader
from dynaconf.loaders import yaml_loader
from dynaconf.utils import deduplicate
from dynaconf.utils import ensure_a_list
from dynaconf.utils.boxing import DynaBox
from dynaconf.utils.files import get_local_filename
from dynaconf.utils.parse_conf import false_values
def default_loader(obj, defaults=None):
"""Loads default settings and check if there are overridings
exported as environment variables"""
defaults = defaults or {}
default_settings_values = {
key: value
for key, value in default_settings.__dict__.items() # noqa
if key.isupper()
}
all_keys = deduplicate(
list(defaults.keys()) + list(default_settings_values.keys())
)
for key in all_keys:
if not obj.exists(key):
value = defaults.get(key, default_settings_values.get(key))
obj.set(key, value)
# start dotenv to get default env vars from there
# check overrides in env vars
if obj.get("load_dotenv") is True:
default_settings.start_dotenv(obj)
# Deal with cases where a custom ENV_SWITCHER_IS_PROVIDED
# Example: Flask and Django Extensions
env_switcher = defaults.get(
"ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
)
for key in all_keys:
if key not in default_settings_values.keys():
continue
env_value = obj.get_environ(
env_switcher if key == "ENV_FOR_DYNACONF" else key,
default="_not_found",
)
if env_value != "_not_found":
obj.set(key, env_value, tomlfy=True)
def _run_hook_module(hook, hook_module, obj, key=None):
"""Run the hook function from the settings obj.
given a hook name, a hook_module and a settings object
load the function and execute if found.
"""
if hook in obj._loaded_hooks.get(hook_module.__file__, {}):
# already loaded
return
if hook_module and getattr(hook_module, "_error", False):
if not isinstance(hook_module._error, FileNotFoundError):
raise hook_module._error
hook_func = getattr(hook_module, hook, None)
if hook_func:
hook_dict = hook_func(obj.dynaconf.clone())
if hook_dict:
merge = hook_dict.pop(
"dynaconf_merge", hook_dict.pop("DYNACONF_MERGE", False)
)
if key and key in hook_dict:
obj.set(key, hook_dict[key], tomlfy=False, merge=merge)
elif not key:
obj.update(hook_dict, tomlfy=False, merge=merge)
obj._loaded_hooks[hook_module.__file__][hook] = hook_dict
def execute_hooks(
hook, obj, env=None, silent=True, key=None, modules=None, files=None
):
"""Execute dynaconf_hooks from module or filepath."""
if hook not in ["post"]:
raise ValueError(f"hook {hook} not supported yet.")
# try to load hooks using python module __name__
modules = modules or obj._loaded_py_modules
for loaded_module in modules:
hook_module_name = ".".join(
loaded_module.split(".")[:-1] + ["dynaconf_hooks"]
)
try:
hook_module = importlib.import_module(hook_module_name)
except (ImportError, TypeError):
# There was no hook on the same path as a python module
continue
else:
_run_hook_module(
hook=hook,
hook_module=hook_module,
obj=obj,
key=key,
)
# Try to load from python filename path
files = files or obj._loaded_files
for loaded_file in files:
hook_file = os.path.join(
os.path.dirname(loaded_file), "dynaconf_hooks.py"
)
hook_module = py_loader.import_from_filename(
obj, hook_file, silent=silent
)
if not hook_module:
# There was no hook on the same path as a python file
continue
_run_hook_module(
hook=hook,
hook_module=hook_module,
obj=obj,
key=key,
)
def settings_loader(
obj, settings_module=None, env=None, silent=True, key=None, filename=None
):
"""Loads from defined settings module
:param obj: A dynaconf instance
:param settings_module: A path or a list of paths e.g settings.toml
:param env: Env to look for data defaults: development
:param silent: Boolean to raise loading errors
:param key: Load a single key if provided
:param filename: optional filename to override the settings_module
"""
if filename is None:
settings_module = settings_module or obj.settings_module
if not settings_module: # pragma: no cover
return
files = ensure_a_list(settings_module)
else:
files = ensure_a_list(filename)
files.extend(ensure_a_list(obj.get("SECRETS_FOR_DYNACONF", None)))
found_files = []
modules_names = []
for item in files:
item = str(item) # Ensure str in case of LocalPath/Path is passed.
if item.endswith(ct.ALL_EXTENSIONS + (".py",)):
p_root = obj._root_path or (
os.path.dirname(found_files[0]) if found_files else None
)
found = obj.find_file(item, project_root=p_root)
if found:
found_files.append(found)
else:
# a bare python module name w/o extension
modules_names.append(item)
enabled_core_loaders = [
item.upper() for item in obj.get("CORE_LOADERS_FOR_DYNACONF") or []
]
# add `.local.` to found_files list to search for local files.
found_files.extend(
[
get_local_filename(item)
for item in found_files
if ".local." not in str(item)
]
)
for mod_file in modules_names + found_files:
# can be set to multiple files settings.py,settings.yaml,...
# Cascade all loaders
loaders = [
{"ext": ct.YAML_EXTENSIONS, "name": "YAML", "loader": yaml_loader},
{"ext": ct.TOML_EXTENSIONS, "name": "TOML", "loader": toml_loader},
{"ext": ct.INI_EXTENSIONS, "name": "INI", "loader": ini_loader},
{"ext": ct.JSON_EXTENSIONS, "name": "JSON", "loader": json_loader},
]
for loader in loaders:
if loader["name"] not in enabled_core_loaders:
continue
if mod_file.endswith(loader["ext"]):
loader["loader"].load(
obj, filename=mod_file, env=env, silent=silent, key=key
)
continue
if mod_file.endswith(ct.ALL_EXTENSIONS):
continue
if "PY" not in enabled_core_loaders:
# pyloader is disabled
continue
# must be Python file or module
# load from default defined module settings.py or .secrets.py if exists
py_loader.load(obj, mod_file, key=key)
# load from the current env e.g: development_settings.py
env = env or obj.current_env
if mod_file.endswith(".py"):
if ".secrets.py" == mod_file:
tmpl = ".{0}_{1}{2}"
mod_file = "secrets.py"
else:
tmpl = "{0}_{1}{2}"
dirname = os.path.dirname(mod_file)
filename, extension = os.path.splitext(os.path.basename(mod_file))
new_filename = tmpl.format(env.lower(), filename, extension)
env_mod_file = os.path.join(dirname, new_filename)
global_filename = tmpl.format("global", filename, extension)
global_mod_file = os.path.join(dirname, global_filename)
else:
env_mod_file = f"{env.lower()}_{mod_file}"
global_mod_file = f"global_{mod_file}"
py_loader.load(
obj,
env_mod_file,
identifier=f"py_{env.upper()}",
silent=True,
key=key,
)
# load from global_settings.py
py_loader.load(
obj, global_mod_file, identifier="py_global", silent=True, key=key
)
def enable_external_loaders(obj):
"""Enable external service loaders like `VAULT_` and `REDIS_`
looks forenv variables like `REDIS_ENABLED_FOR_DYNACONF`
"""
for name, loader in ct.EXTERNAL_LOADERS.items():
enabled = getattr(obj, f"{name.upper()}_ENABLED_FOR_DYNACONF", False)
if (
enabled
and enabled not in false_values
and loader not in obj.LOADERS_FOR_DYNACONF
): # noqa
obj.LOADERS_FOR_DYNACONF.insert(0, loader)
def write(filename, data, env=None):
"""Writes `data` to `filename` infers format by file extension."""
loader_name = f"{filename.rpartition('.')[-1]}_loader"
loader = globals().get(loader_name)
if not loader:
raise OSError(f"{loader_name} cannot be found.")
data = DynaBox(data, box_settings={}).to_dict()
if loader is not py_loader and env and env not in data:
data = {env: data}
loader.write(filename, data, merge=False)

@ -0,0 +1,195 @@
from __future__ import annotations
import io
import warnings
from dynaconf.utils import build_env_list
from dynaconf.utils import ensure_a_list
from dynaconf.utils import upperfy
class BaseLoader:
"""Base loader for dynaconf source files.
:param obj: {[LazySettings]} -- [Dynaconf settings]
:param env: {[string]} -- [the current env to be loaded defaults to
[development]]
:param identifier: {[string]} -- [identifier ini, yaml, json, py, toml]
:param extensions: {[list]} -- [List of extensions with dots ['.a', '.b']]
:param file_reader: {[callable]} -- [reads file return dict]
:param string_reader: {[callable]} -- [reads string return dict]
"""
def __init__(
self,
obj,
env,
identifier,
extensions,
file_reader,
string_reader,
opener_params=None,
):
"""Instantiates a loader for different sources"""
self.obj = obj
self.env = env or obj.current_env
self.identifier = identifier
self.extensions = extensions
self.file_reader = file_reader
self.string_reader = string_reader
self.opener_params = opener_params or {
"mode": "r",
"encoding": obj.get("ENCODING_FOR_DYNACONF", "utf-8"),
}
@staticmethod
def warn_not_installed(obj, identifier): # pragma: no cover
if identifier not in obj._not_installed_warnings:
warnings.warn(
f"{identifier} support is not installed in your environment. "
f"`pip install dynaconf[{identifier}]`"
)
obj._not_installed_warnings.append(identifier)
def load(self, filename=None, key=None, silent=True):
"""
Reads and loads in to `self.obj` a single key or all keys from source
:param filename: Optional filename to load
:param key: if provided load a single key
:param silent: if load errors should be silenced
"""
filename = filename or self.obj.get(self.identifier.upper())
if not filename:
return
if not isinstance(filename, (list, tuple)):
split_files = ensure_a_list(filename)
if all([f.endswith(self.extensions) for f in split_files]): # noqa
files = split_files # it is a ['file.ext', ...]
else: # it is a single config as string
files = [filename]
else: # it is already a list/tuple
files = filename
source_data = self.get_source_data(files)
if self.obj.get("ENVIRONMENTS_FOR_DYNACONF") is False:
self._envless_load(source_data, silent, key)
else:
self._load_all_envs(source_data, silent, key)
def get_source_data(self, files):
"""Reads each file and returns source data for each file
{"path/to/file.ext": {"key": "value"}}
"""
data = {}
for source_file in files:
if source_file.endswith(self.extensions):
try:
with open(source_file, **self.opener_params) as open_file:
content = self.file_reader(open_file)
self.obj._loaded_files.append(source_file)
if content:
data[source_file] = content
except OSError as e:
if ".local." not in source_file:
warnings.warn(
f"{self.identifier}_loader: {source_file} "
f":{str(e)}"
)
else:
# for tests it is possible to pass string
content = self.string_reader(source_file)
if content:
data[source_file] = content
return data
def _envless_load(self, source_data, silent=True, key=None):
"""Load all the keys from each file without env separation"""
for file_data in source_data.values():
self._set_data_to_obj(
file_data,
self.identifier,
key=key,
)
def _load_all_envs(self, source_data, silent=True, key=None):
"""Load configs from files separating by each environment"""
for file_data in source_data.values():
# env name is checked in lower
file_data = {k.lower(): value for k, value in file_data.items()}
# is there a `dynaconf_merge` on top level of file?
file_merge = file_data.get("dynaconf_merge")
# is there a flag disabling dotted lookup on file?
file_dotted_lookup = file_data.get("dynaconf_dotted_lookup")
for env in build_env_list(self.obj, self.env):
env = env.lower() # lower for better comparison
try:
data = file_data[env] or {}
except KeyError:
if silent:
continue
raise
if not data:
continue
self._set_data_to_obj(
data,
f"{self.identifier}_{env}",
file_merge,
key,
file_dotted_lookup=file_dotted_lookup,
)
def _set_data_to_obj(
self,
data,
identifier,
file_merge=None,
key=False,
file_dotted_lookup=None,
):
"""Calls settings.set to add the keys"""
# data 1st level keys should be transformed to upper case.
data = {upperfy(k): v for k, v in data.items()}
if key:
key = upperfy(key)
if self.obj.filter_strategy:
data = self.obj.filter_strategy(data)
# is there a `dynaconf_merge` inside an `[env]`?
file_merge = file_merge or data.pop("DYNACONF_MERGE", False)
# If not passed or passed as None,
# look for inner [env] value, or default settings.
if file_dotted_lookup is None:
file_dotted_lookup = data.pop(
"DYNACONF_DOTTED_LOOKUP",
self.obj.get("DOTTED_LOOKUP_FOR_DYNACONF"),
)
if not key:
self.obj.update(
data,
loader_identifier=identifier,
merge=file_merge,
dotted_lookup=file_dotted_lookup,
)
elif key in data:
self.obj.set(
key,
data.get(key),
loader_identifier=identifier,
merge=file_merge,
dotted_lookup=file_dotted_lookup,
)

@ -0,0 +1,108 @@
from __future__ import annotations
from os import environ
from dynaconf.utils import missing
from dynaconf.utils import upperfy
from dynaconf.utils.parse_conf import parse_conf_data
DOTENV_IMPORTED = False
try:
from dynaconf.vendor.dotenv import cli as dotenv_cli
DOTENV_IMPORTED = True
except ImportError:
pass
except FileNotFoundError:
pass
IDENTIFIER = "env"
def load(obj, env=None, silent=True, key=None):
"""Loads envvars with prefixes:
`DYNACONF_` (default global) or `$(ENVVAR_PREFIX_FOR_DYNACONF)_`
"""
global_prefix = obj.get("ENVVAR_PREFIX_FOR_DYNACONF")
if global_prefix is False or global_prefix.upper() != "DYNACONF":
load_from_env(obj, "DYNACONF", key, silent, IDENTIFIER + "_global")
# Load the global env if exists and overwrite everything
load_from_env(obj, global_prefix, key, silent, IDENTIFIER + "_global")
def load_from_env(
obj,
prefix=False,
key=None,
silent=False,
identifier=IDENTIFIER,
env=False, # backwards compatibility bc renamed param
):
if prefix is False and env is not False:
prefix = env
env_ = ""
if prefix is not False:
if not isinstance(prefix, str):
raise TypeError("`prefix/env` must be str or False")
prefix = prefix.upper()
env_ = f"{prefix}_"
# Load a single environment variable explicitly.
if key:
key = upperfy(key)
value = environ.get(f"{env_}{key}")
if value:
try: # obj is a Settings
obj.set(key, value, loader_identifier=identifier, tomlfy=True)
except AttributeError: # obj is a dict
obj[key] = parse_conf_data(
value, tomlfy=True, box_settings=obj
)
# Load environment variables in bulk (when matching).
else:
# Only known variables should be loaded from environment?
ignore_unknown = obj.get("IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF")
trim_len = len(env_)
data = {
key[trim_len:]: parse_conf_data(
data, tomlfy=True, box_settings=obj
)
for key, data in environ.items()
if key.startswith(env_)
and not (
# Ignore environment variables that haven't been
# pre-defined in settings space.
ignore_unknown
and obj.get(key[trim_len:], default=missing) is missing
)
}
# Update the settings space based on gathered data from environment.
if data:
filter_strategy = obj.get("FILTER_STRATEGY")
if filter_strategy:
data = filter_strategy(data)
obj.update(data, loader_identifier=identifier)
def write(settings_path, settings_data, **kwargs):
"""Write data to .env file"""
if not DOTENV_IMPORTED:
return
for key, value in settings_data.items():
quote_mode = (
isinstance(value, str)
and (value.startswith("'") or value.startswith('"'))
) or isinstance(value, (list, dict))
dotenv_cli.set_key(
str(settings_path),
key,
str(value),
quote_mode="always" if quote_mode else "none",
)

@ -0,0 +1,62 @@
from __future__ import annotations
import io
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import INI_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
try:
from configobj import ConfigObj
except ImportError: # pragma: no cover
ConfigObj = None
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
if ConfigObj is None: # pragma: no cover
BaseLoader.warn_not_installed(obj, "ini")
return
loader = BaseLoader(
obj=obj,
env=env,
identifier="ini",
extensions=INI_EXTENSIONS,
file_reader=lambda fileobj: ConfigObj(fileobj).dict(),
string_reader=lambda strobj: ConfigObj(strobj.split("\n")).dict(),
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(ConfigObj(open_file).dict(), settings_data)
new = ConfigObj()
new.update(settings_data)
new.write(open(str(settings_path), "bw"))

@ -0,0 +1,80 @@
from __future__ import annotations
import io
import json
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import JSON_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.utils.parse_conf import try_to_encode
try: # pragma: no cover
import commentjson
except ImportError: # pragma: no cover
commentjson = None
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
if (
obj.get("COMMENTJSON_ENABLED_FOR_DYNACONF") and commentjson
): # pragma: no cover # noqa
file_reader = commentjson.load
string_reader = commentjson.loads
else:
file_reader = json.load
string_reader = json.loads
loader = BaseLoader(
obj=obj,
env=env,
identifier="json",
extensions=JSON_EXTENSIONS,
file_reader=file_reader,
string_reader=string_reader,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(json.load(open_file), settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
json.dump(settings_data, open_file, cls=DynaconfEncoder)
class DynaconfEncoder(json.JSONEncoder):
"""Transform Dynaconf custom types instances to json representation"""
def default(self, o):
return try_to_encode(o, callback=super().default)

@ -0,0 +1,148 @@
from __future__ import annotations
import errno
import importlib
import inspect
import io
import types
from contextlib import suppress
from pathlib import Path
from dynaconf import default_settings
from dynaconf.utils import DynaconfDict
from dynaconf.utils import object_merge
from dynaconf.utils import upperfy
from dynaconf.utils.files import find_file
def load(obj, settings_module, identifier="py", silent=False, key=None):
"""Tries to import a python module"""
mod, loaded_from = get_module(obj, settings_module, silent)
if not (mod and loaded_from):
return
load_from_python_object(obj, mod, settings_module, key, identifier)
def load_from_python_object(
obj, mod, settings_module, key=None, identifier=None
):
file_merge = getattr(mod, "dynaconf_merge", False) or getattr(
mod, "DYNACONF_MERGE", False
)
for setting in dir(mod):
# A setting var in a Python file should start with upper case
# valid: A_value=1, ABC_value=3 A_BBB__default=1
# invalid: a_value=1, MyValue=3
# This is to avoid loading functions, classes and built-ins
if setting.split("__")[0].isupper():
if key is None or key == setting:
setting_value = getattr(mod, setting)
obj.set(
setting,
setting_value,
loader_identifier=identifier,
merge=file_merge,
)
obj._loaded_py_modules.append(mod.__name__)
obj._loaded_files.append(mod.__file__)
def try_to_load_from_py_module_name(
obj, name, key=None, identifier="py", silent=False
):
"""Try to load module by its string name.
Arguments:
obj {LAzySettings} -- Dynaconf settings instance
name {str} -- Name of the module e.g: foo.bar.zaz
Keyword Arguments:
key {str} -- Single key to be loaded (default: {None})
identifier {str} -- Name of identifier to store (default: 'py')
silent {bool} -- Weather to raise or silence exceptions.
"""
ctx = suppress(ImportError, TypeError) if silent else suppress()
with ctx:
mod = importlib.import_module(str(name))
load_from_python_object(obj, mod, name, key, identifier)
return True # loaded ok!
# if it reaches this point that means exception occurred, module not found.
return False
def get_module(obj, filename, silent=False):
try:
mod = importlib.import_module(filename)
loaded_from = "module"
mod.is_error = False
except (ImportError, TypeError):
mod = import_from_filename(obj, filename, silent=silent)
if mod and not mod._is_error:
loaded_from = "filename"
else:
# it is important to return None in case of not loaded
loaded_from = None
return mod, loaded_from
def import_from_filename(obj, filename, silent=False): # pragma: no cover
"""If settings_module is a filename path import it."""
if filename in [item.filename for item in inspect.stack()]:
raise ImportError(
"Looks like you are loading dynaconf "
f"from inside the {filename} file and then it is trying "
"to load itself entering in a circular reference "
"problem. To solve it you have to "
"invoke your program from another root folder "
"or rename your program file."
)
_find_file = getattr(obj, "find_file", find_file)
if not filename.endswith(".py"):
filename = f"{filename}.py"
if filename in default_settings.SETTINGS_FILE_FOR_DYNACONF:
silent = True
mod = types.ModuleType(filename.rstrip(".py"))
mod.__file__ = filename
mod._is_error = False
mod._error = None
try:
with open(
_find_file(filename),
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as config_file:
exec(compile(config_file.read(), filename, "exec"), mod.__dict__)
except OSError as e:
e.strerror = (
f"py_loader: error loading file " f"({e.strerror} {filename})\n"
)
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return
mod._is_error = True
mod._error = e
return mod
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
existing = DynaconfDict()
load(existing, str(settings_path))
object_merge(existing, settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as f:
f.writelines(
[f"{upperfy(k)} = {repr(v)}\n" for k, v in settings_data.items()]
)

@ -0,0 +1,108 @@
from __future__ import annotations
from dynaconf.utils import build_env_list
from dynaconf.utils import upperfy
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.utils.parse_conf import unparse_conf_data
try:
from redis import StrictRedis
except ImportError:
StrictRedis = None
IDENTIFIER = "redis"
def load(obj, env=None, silent=True, key=None):
"""Reads and loads in to "settings" a single key or all keys from redis
:param obj: the settings instance
:param env: settings env default='DYNACONF'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:return: None
"""
if StrictRedis is None:
raise ImportError(
"redis package is not installed in your environment. "
"`pip install dynaconf[redis]` or disable the redis loader with "
"export REDIS_ENABLED_FOR_DYNACONF=false"
)
redis = StrictRedis(**obj.get("REDIS_FOR_DYNACONF"))
prefix = obj.get("ENVVAR_PREFIX_FOR_DYNACONF")
# prefix is added to env_list to keep backwards compatibility
env_list = [prefix] + build_env_list(obj, env or obj.current_env)
for env_name in env_list:
holder = f"{prefix.upper()}_{env_name.upper()}"
try:
if key:
value = redis.hget(holder.upper(), key)
if value:
parsed_value = parse_conf_data(
value, tomlfy=True, box_settings=obj
)
if parsed_value:
obj.set(key, parsed_value)
else:
data = {
key: parse_conf_data(value, tomlfy=True, box_settings=obj)
for key, value in redis.hgetall(holder.upper()).items()
}
if data:
obj.update(data, loader_identifier=IDENTIFIER)
except Exception:
if silent:
return False
raise
def write(obj, data=None, **kwargs):
"""Write a value in to loader source
:param obj: settings object
:param data: vars to be stored
:param kwargs: vars to be stored
:return:
"""
if obj.REDIS_ENABLED_FOR_DYNACONF is False:
raise RuntimeError(
"Redis is not configured \n"
"export REDIS_ENABLED_FOR_DYNACONF=true\n"
"and configure the REDIS_*_FOR_DYNACONF variables"
)
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF").upper()
# add env to holder
holder = f"{holder}_{obj.current_env.upper()}"
data = data or {}
data.update(kwargs)
if not data:
raise AttributeError("Data must be provided")
redis_data = {
upperfy(key): unparse_conf_data(value) for key, value in data.items()
}
client.hmset(holder.upper(), redis_data)
load(obj)
def delete(obj, key=None):
"""
Delete a single key if specified, or all env if key is none
:param obj: settings object
:param key: key to delete from store location
:return: None
"""
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF").upper()
# add env to holder
holder = f"{holder}_{obj.current_env.upper()}"
if key:
client.hdel(holder.upper(), upperfy(key))
obj.unset(key)
else:
keys = client.hkeys(holder.upper())
client.delete(holder.upper())
obj.unset_all(keys)

@ -0,0 +1,122 @@
from __future__ import annotations
import warnings
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import TOML_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.vendor import toml # Backwards compatibility with uiri/toml
from dynaconf.vendor import tomllib # New tomllib stdlib on py3.11
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
try:
loader = BaseLoader(
obj=obj,
env=env,
identifier="toml",
extensions=TOML_EXTENSIONS,
file_reader=tomllib.load,
string_reader=tomllib.loads,
opener_params={"mode": "rb"},
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
except UnicodeDecodeError: # pragma: no cover
"""
NOTE: Compat functions exists to keep backwards compatibility with
the new tomllib library. The old library was called `toml` and
the new one is called `tomllib`.
The old lib uiri/toml allowed unicode characters and re-added files
as string.
The new tomllib (stdlib) does not allow unicode characters, only
utf-8 encoded, and read files as binary.
NOTE: In dynaconf 4.0.0 we will drop support for the old library
removing the compat functions and calling directly the new lib.
"""
loader = BaseLoader(
obj=obj,
env=env,
identifier="toml",
extensions=TOML_EXTENSIONS,
file_reader=toml.load,
string_reader=toml.loads,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
try: # tomllib first
with open(str(settings_path), "rb") as open_file:
object_merge(tomllib.load(open_file), settings_data)
except UnicodeDecodeError: # pragma: no cover
# uiri/toml fallback (TBR on 4.0.0)
with open(
str(settings_path),
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
object_merge(toml.load(open_file), settings_data)
try: # tomllib first
with open(str(settings_path), "wb") as open_file:
tomllib.dump(encode_nulls(settings_data), open_file)
except UnicodeEncodeError: # pragma: no cover
# uiri/toml fallback (TBR on 4.0.0)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
toml.dump(encode_nulls(settings_data), open_file)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
def encode_nulls(data):
"""TOML does not support `None` so this function transforms to '@none '."""
if data is None:
return "@none "
if isinstance(data, dict):
return {key: encode_nulls(value) for key, value in data.items()}
elif isinstance(data, (list, tuple)):
return [encode_nulls(item) for item in data]
return data

@ -0,0 +1,186 @@
# docker run -e 'VAULT_DEV_ROOT_TOKEN_ID=myroot' -p 8200:8200 vault
# pip install hvac
from __future__ import annotations
from dynaconf.utils import build_env_list
from dynaconf.utils.parse_conf import parse_conf_data
try:
import boto3
except ImportError:
boto3 = None
try:
from hvac import Client
from hvac.exceptions import InvalidPath
except ImportError:
raise ImportError(
"vault package is not installed in your environment. "
"`pip install dynaconf[vault]` or disable the vault loader with "
"export VAULT_ENABLED_FOR_DYNACONF=false"
)
IDENTIFIER = "vault"
# backwards compatibility
_get_env_list = build_env_list
def get_client(obj):
client = Client(
**{k: v for k, v in obj.VAULT_FOR_DYNACONF.items() if v is not None}
)
if obj.VAULT_ROLE_ID_FOR_DYNACONF is not None:
client.auth.approle.login(
role_id=obj.VAULT_ROLE_ID_FOR_DYNACONF,
secret_id=obj.get("VAULT_SECRET_ID_FOR_DYNACONF"),
)
elif obj.VAULT_ROOT_TOKEN_FOR_DYNACONF is not None:
client.token = obj.VAULT_ROOT_TOKEN_FOR_DYNACONF
elif obj.VAULT_AUTH_WITH_IAM_FOR_DYNACONF:
if boto3 is None:
raise ImportError(
"boto3 package is not installed in your environment. "
"`pip install boto3` or disable the VAULT_AUTH_WITH_IAM"
)
session = boto3.Session()
credentials = session.get_credentials()
client.auth.aws.iam_login(
credentials.access_key,
credentials.secret_key,
credentials.token,
role=obj.VAULT_AUTH_ROLE_FOR_DYNACONF,
)
assert client.is_authenticated(), (
"Vault authentication error: is VAULT_TOKEN_FOR_DYNACONF or "
"VAULT_ROLE_ID_FOR_DYNACONF defined?"
)
client.secrets.kv.default_kv_version = obj.VAULT_KV_VERSION_FOR_DYNACONF
return client
def load(obj, env=None, silent=None, key=None):
"""Reads and loads in to "settings" a single key or all keys from vault
:param obj: the settings instance
:param env: settings env default='DYNACONF'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:return: None
"""
client = get_client(obj)
try:
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 2:
dirs = client.secrets.kv.v2.list_secrets(
path=obj.VAULT_PATH_FOR_DYNACONF,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)["data"]["keys"]
else:
dirs = client.secrets.kv.v1.list_secrets(
path=obj.VAULT_PATH_FOR_DYNACONF,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)["data"]["keys"]
except InvalidPath:
# The given path is not a directory
dirs = []
# First look for secrets into environments less store
if not obj.ENVIRONMENTS_FOR_DYNACONF:
# By adding '', dynaconf will now read secrets from environments-less
# store which are not written by `dynaconf write` to Vault store
env_list = [obj.MAIN_ENV_FOR_DYNACONF.lower(), ""]
# Finally, look for secret into all the environments
else:
env_list = dirs + build_env_list(obj, env)
for env in env_list:
path = "/".join([obj.VAULT_PATH_FOR_DYNACONF, env])
try:
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 2:
data = client.secrets.kv.v2.read_secret_version(
path, mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF
)
else:
data = client.secrets.kv.read_secret(
"data/" + path,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)
except InvalidPath:
# If the path doesn't exist, ignore it and set data to None
data = None
if data:
# There seems to be a data dict within a data dict,
# extract the inner data
data = data.get("data", {}).get("data", {})
try:
if (
obj.VAULT_KV_VERSION_FOR_DYNACONF == 2
and obj.ENVIRONMENTS_FOR_DYNACONF
and data
):
data = data.get("data", {})
if data and key:
value = parse_conf_data(
data.get(key), tomlfy=True, box_settings=obj
)
if value:
obj.set(key, value)
elif data:
obj.update(data, loader_identifier=IDENTIFIER, tomlfy=True)
except Exception:
if silent:
return False
raise
def write(obj, data=None, **kwargs):
"""Write a value in to loader source
:param obj: settings object
:param data: vars to be stored
:param kwargs: vars to be stored
:return:
"""
if obj.VAULT_ENABLED_FOR_DYNACONF is False:
raise RuntimeError(
"Vault is not configured \n"
"export VAULT_ENABLED_FOR_DYNACONF=true\n"
"and configure the VAULT_FOR_DYNACONF_* variables"
)
data = data or {}
data.update(kwargs)
if not data:
raise AttributeError("Data must be provided")
data = {"data": data}
client = get_client(obj)
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 1:
mount_point = obj.VAULT_MOUNT_POINT_FOR_DYNACONF + "/data"
else:
mount_point = obj.VAULT_MOUNT_POINT_FOR_DYNACONF
path = "/".join([obj.VAULT_PATH_FOR_DYNACONF, obj.current_env.lower()])
client.secrets.kv.create_or_update_secret(
path, secret=data, mount_point=mount_point
)
load(obj)
def list_envs(obj, path=""):
"""
This function is a helper to get a list of all the existing envs in
the source of data, the use case is:
existing_envs = vault_loader.list_envs(settings)
for env in exiting_envs:
with settings.using_env(env): # switch to the env
# do something with a key of that env
:param obj: settings object
:param path: path to the vault secrets
:return: list containing all the keys at the given path
"""
client = get_client(obj)
path = path or obj.get("VAULT_PATH_FOR_DYNACONF")
try:
return client.list(f"/secret/metadata/{path}")["data"]["keys"]
except TypeError:
return []

@ -0,0 +1,87 @@
from __future__ import annotations
import io
from pathlib import Path
from warnings import warn
from dynaconf import default_settings
from dynaconf.constants import YAML_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.utils.parse_conf import try_to_encode
from dynaconf.vendor.ruamel import yaml
# Add support for Dynaconf Lazy values to YAML dumper
yaml.SafeDumper.yaml_representers[
None
] = lambda self, data: yaml.representer.SafeRepresenter.represent_str(
self, try_to_encode(data)
)
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
# Resolve the loaders
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# Possible values are `safe_load, full_load, unsafe_load, load`
yaml_reader = getattr(
yaml, obj.get("YAML_LOADER_FOR_DYNACONF"), yaml.safe_load
)
if yaml_reader.__name__ == "unsafe_load": # pragma: no cover
warn(
"yaml.unsafe_load is deprecated."
" Please read https://msg.pyyaml.org/load for full details."
" Try to use full_load or safe_load."
)
loader = BaseLoader(
obj=obj,
env=env,
identifier="yaml",
extensions=YAML_EXTENSIONS,
file_reader=yaml_reader,
string_reader=yaml_reader,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(yaml.safe_load(open_file), settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
yaml.dump(
settings_data,
open_file,
Dumper=yaml.dumper.SafeDumper,
explicit_start=True,
indent=2,
default_flow_style=False,
)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save