From 55037cfde26ac72d74f1828551438751955b9f32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Louis=20V=C3=A9zina?= <5130500+morpheus65535@users.noreply.github.com> Date: Tue, 15 Oct 2019 17:18:51 -0400 Subject: [PATCH] Major fixes to database subsystem. --- bazarr/database.py | 67 ++++++++++++++++++++-------------------- bazarr/get_subtitle.py | 4 +-- bazarr/list_subtitles.py | 10 +++--- bazarr/main.py | 9 ++---- 4 files changed, 44 insertions(+), 46 deletions(-) diff --git a/bazarr/database.py b/bazarr/database.py index 94846abd2..9c96b7772 100644 --- a/bazarr/database.py +++ b/bazarr/database.py @@ -3,19 +3,16 @@ import atexit from get_args import args from peewee import * -from playhouse.sqliteq import SqliteQueueDatabase from playhouse.migrate import * from helper import path_replace, path_replace_movie, path_replace_reverse, path_replace_reverse_movie -database = SqliteQueueDatabase( - None, - use_gevent=False, - autostart=False, - queue_max_size=256, # Max. # of pending writes that can accumulate. - results_timeout=30.0) # Max. time to wait for query to be executed. - -migrator = SqliteMigrator(database) +database = SqliteDatabase(os.path.join(args.config_dir, 'db', 'bazarr.db')) +database.pragma('wal_checkpoint', 'TRUNCATE') # Run a checkpoint and merge remaining wal-journal. +database.timeout = 30 # Number of second to wait for database +database.cache_size = -1024 # Number of KB of cache for wal-journal. + # Must be negative because positive means number of pages. +database.wal_autocheckpoint = 50 # Run an automatic checkpoint every 50 write transactions. @database.func('path_substitution') @@ -61,10 +58,6 @@ class TableShows(BaseModel): tvdb_id = IntegerField(column_name='tvdbId', null=True, unique=True, primary_key=True) year = TextField(null=True) - migrate( - migrator.add_column('table_shows', 'forced', forced), - ) - class Meta: table_name = 'table_shows' @@ -87,10 +80,6 @@ class TableEpisodes(BaseModel): video_codec = TextField(null=True) episode_file_id = IntegerField(null=True) - migrate( - migrator.add_column('table_episodes', 'episode_file_id', episode_file_id), - ) - class Meta: table_name = 'table_episodes' primary_key = False @@ -123,11 +112,6 @@ class TableMovies(BaseModel): year = TextField(null=True) movie_file_id = IntegerField(null=True) - migrate( - migrator.add_column('table_movies', 'forced', forced), - migrator.add_column('table_movies', 'movie_file_id', movie_file_id), - ) - class Meta: table_name = 'table_movies' @@ -183,20 +167,37 @@ class TableSettingsNotifier(BaseModel): table_name = 'table_settings_notifier' -def database_init(): - database.init(os.path.join(args.config_dir, 'db', 'bazarr.db')) - database.start() - database.connect() +# Database tables creation if they don't exists +models_list = [TableShows, TableEpisodes, TableMovies, TableHistory, TableHistoryMovie, TableSettingsLanguages, + TableSettingsNotifier, System] +database.create_tables(models_list, safe=True) - database.pragma('wal_checkpoint', 'TRUNCATE') # Run a checkpoint and merge remaining wal-journal. - database.cache_size = -1024 # Number of KB of cache for wal-journal. - # Must be negative because positive means number of pages. - database.wal_autocheckpoint = 50 # Run an automatic checkpoint every 50 write transactions. - models_list = [TableShows, TableEpisodes, TableMovies, TableHistory, TableHistoryMovie, TableSettingsLanguages, - TableSettingsNotifier, System] +# Database migration +migrator = SqliteMigrator(database) - database.create_tables(models_list, safe=True) +# TableShows migration +table_shows_columns = [] +for column in database.get_columns('table_shows'): + table_shows_columns.append(column.name) +if 'forced' not in table_shows_columns: + migrate(migrator.add_column('table_shows', 'forced', TableShows.forced)) + +# TableEpisodes migration +table_episodes_columns = [] +for column in database.get_columns('table_episodes'): + table_episodes_columns.append(column.name) +if 'episode_file_id' not in table_episodes_columns: + migrate(migrator.add_column('table_episodes', 'episode_file_id', TableEpisodes.episode_file_id)) + +# TableMovies migration +table_movies_columns = [] +for column in database.get_columns('table_movies'): + table_movies_columns.append(column.name) +if 'forced' not in table_movies_columns: + migrate(migrator.add_column('table_movies', 'forced', TableMovies.forced)) +if 'movie_file_id' not in table_movies_columns: + migrate(migrator.add_column('table_movies', 'movie_file_id', TableMovies.movie_file_id)) def wal_cleaning(): diff --git a/bazarr/get_subtitle.py b/bazarr/get_subtitle.py index 65fc00e84..9207e6b9e 100644 --- a/bazarr/get_subtitle.py +++ b/bazarr/get_subtitle.py @@ -33,7 +33,7 @@ from get_providers import get_providers, get_providers_auth, provider_throttle, from get_args import args from queueconfig import notifications from pyprobe.pyprobe import VideoFileParser -from database import TableShows, TableEpisodes, TableMovies, TableHistory, TableHistoryMovie +from database import database, TableShows, TableEpisodes, TableMovies, TableHistory, TableHistoryMovie from peewee import fn, JOIN from analytics import track_event @@ -620,7 +620,7 @@ def series_download_subtitles(no): def episode_download_subtitles(no): episodes_details_clause = [ - (TableEpisodes.sonarr_series_id == no) + (TableEpisodes.sonarr_episode_id == no) ] if settings.sonarr.getboolean('only_monitored'): episodes_details_clause.append( diff --git a/bazarr/list_subtitles.py b/bazarr/list_subtitles.py index 2c15d4a31..58abb1762 100644 --- a/bazarr/list_subtitles.py +++ b/bazarr/list_subtitles.py @@ -222,11 +222,11 @@ def store_subtitles_movie(file): def list_missing_subtitles(no=None): - episodes_subtitles_clause = {TableShows.sonarr_series_id.is_null(False)} + episodes_subtitles_clause = (TableShows.sonarr_series_id.is_null(False)) if no is not None: - episodes_subtitles_clause = {TableShows.sonarr_series_id ** no} - + episodes_subtitles_clause = (TableShows.sonarr_series_id == no) episodes_subtitles = TableEpisodes.select( + TableShows.sonarr_series_id, TableEpisodes.sonarr_episode_id, TableEpisodes.subtitles, TableShows.languages, @@ -288,9 +288,9 @@ def list_missing_subtitles(no=None): def list_missing_subtitles_movies(no=None): - movies_subtitles_clause = {TableMovies.radarr_id.is_null(False)} + movies_subtitles_clause = (TableMovies.radarr_id.is_null(False)) if no is not None: - movies_subtitles_clause = {TableMovies.radarr_id ** no} + movies_subtitles_clause = (TableMovies.radarr_id == no) movies_subtitles = TableMovies.select( TableMovies.radarr_id, diff --git a/bazarr/main.py b/bazarr/main.py index 9b42594a4..b52688e47 100644 --- a/bazarr/main.py +++ b/bazarr/main.py @@ -23,12 +23,9 @@ from calendar import day_name from get_args import args from init import * -from database import database, database_init, TableEpisodes, TableShows, TableMovies, TableHistory, TableHistoryMovie, \ +from database import database, TableEpisodes, TableShows, TableMovies, TableHistory, TableHistoryMovie, \ TableSettingsLanguages, TableSettingsNotifier, System -# Initiate database -database_init() - from notifier import update_notifier from logger import configure_logging, empty_log @@ -736,7 +733,7 @@ def edit_series(no): TableShows.forced: forced } ).where( - TableShows.sonarr_series_id ** no + TableShows.sonarr_series_id == no ).execute() list_missing_subtitles(no) @@ -809,7 +806,7 @@ def episodes(no): fn.path_substitution(TableShows.path).alias('path'), TableShows.forced ).where( - TableShows.sonarr_series_id ** str(no) + TableShows.sonarr_series_id == no ).limit(1) for series in series_details: tvdbid = series.tvdb_id