Implement automatic garbage collection and forcing after a full scan

pull/56/merge
morpheus65535 7 years ago
parent 0a6b82eedd
commit 08559e4167

@ -1,5 +1,6 @@
bazarr_version = '0.4.0'
import gc
import os
import sys
sys.path.insert(0,os.path.join(os.path.dirname(__file__), 'libs/'))
@ -26,10 +27,6 @@ import urllib
import math
import ast
from init_db import *
from update_db import *
from update_modules import *
import logging
from logging.handlers import TimedRotatingFileHandler
@ -74,6 +71,10 @@ def configure_logging():
configure_logging()
from init_db import *
from update_db import *
from update_modules import *
from get_languages import *
from get_providers import *
@ -810,7 +811,6 @@ def api_history():
c.close()
return dict(subtitles=data)
logging.info('Bazarr is started and waiting for request on http://' + str(ip) + ':' + str(port) + str(base_url))
run(host=ip, port=port, server='waitress')
logging.info('Bazarr has been stopped.')

@ -1,3 +1,4 @@
import gc
import os
import enzyme
import babelfish
@ -103,6 +104,8 @@ def full_scan_subtitles():
for episode in episodes:
store_subtitles(path_replace(episode[0]))
gc.collect()
def series_scan_subtitles(no):
conn_db = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'data/db/bazarr.db'), timeout=30)
c_db = conn_db.cursor()

Loading…
Cancel
Save