Merge branch 'development'

pull/1097/head v0.9.0.2
Louis Vézina 4 years ago
commit f7c85f09e0

@ -76,9 +76,7 @@ If you need something that is not already part of Bazarr, feel free to create a
## Screenshot
You can get more in the [screenshot](https://github.com/morpheus65535/bazarr/tree/master/screenshot) directory but it should look familiar:
![Series](/screenshot/1-series/series-2-episodes.png?raw=true "Series")
![Bazarr](/screenshot/bazarr-screenshot.png?raw=true "Bazarr")
### License

@ -30,7 +30,7 @@ from list_subtitles import store_subtitles, store_subtitles_movie, series_scan_s
list_missing_subtitles, list_missing_subtitles_movies
from utils import history_log, history_log_movie, blacklist_log, blacklist_delete, blacklist_delete_all, \
blacklist_log_movie, blacklist_delete_movie, blacklist_delete_all_movie, get_sonarr_version, get_radarr_version, \
delete_subtitles
delete_subtitles, subtitles_apply_mods
from get_providers import get_providers, get_providers_auth, list_throttled_providers, reset_throttled_providers
from event_handler import event_stream
from scheduler import scheduler
@ -77,7 +77,7 @@ class Restart(Resource):
webserver.restart()
class Badges(Resource):
class BadgesSeries(Resource):
@authenticate
def get(self):
missing_episodes = database.execute("SELECT table_shows.tags, table_episodes.monitored, table_shows.seriesType "
@ -87,14 +87,30 @@ class Badges(Resource):
missing_episodes = filter_exclusions(missing_episodes, 'series')
missing_episodes = len(missing_episodes)
result = {
"missing_episodes": missing_episodes
}
return jsonify(result)
class BadgesMovies(Resource):
@authenticate
def get(self):
missing_movies = database.execute("SELECT tags, monitored FROM table_movies WHERE missing_subtitles is not "
"null AND missing_subtitles != '[]'")
missing_movies = filter_exclusions(missing_movies, 'movie')
missing_movies = len(missing_movies)
result = {
"missing_episodes": missing_episodes,
"missing_movies": missing_movies,
"missing_movies": missing_movies
}
return jsonify(result)
class BadgesProviders(Resource):
@authenticate
def get(self):
result = {
"throttled_providers": len(eval(str(settings.general.throtteled_providers)))
}
return jsonify(result)
@ -124,6 +140,7 @@ class Notifications(Resource):
database.execute("UPDATE table_settings_notifier SET enabled = ?, url = ? WHERE name = ?",
(item['enabled'], item['url'], item['name']))
save_settings(zip(request.form.keys(), request.form.listvalues()))
return '', 204
@ -364,6 +381,8 @@ class Series(Resource):
list_missing_subtitles(no=seriesId)
event_stream(type='series', action='update', series=seriesId)
return '', 204
@ -606,7 +625,8 @@ class EpisodesSubtitlesManualDownload(Resource):
subs_id = result[6]
subs_path = result[7]
history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id, subs_path)
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
store_subtitles(path, episodePath)
return result, 201
except OSError:
@ -653,7 +673,8 @@ class EpisodesSubtitlesUpload(Resource):
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subtitles_path=subs_path)
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
store_subtitles(path, episodePath)
return result, 201
@ -890,6 +911,8 @@ class Movies(Resource):
list_missing_subtitles_movies(no=radarrId)
event_stream(type='movies', action='update', movie=radarrId)
return '', 204
@ -1057,7 +1080,8 @@ class MovieSubtitlesManualDownload(Resource):
subs_id = result[6]
subs_path = result[7]
history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
send_notifications_movie(radarrId, message)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
return result, 201
except OSError:
@ -1103,7 +1127,8 @@ class MovieSubtitlesUpload(Resource):
provider = "manual"
score = 120
history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path)
send_notifications_movie(radarrId, message)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
return result, 201
@ -1743,6 +1768,18 @@ class SyncSubtitles(Resource):
return '', 200
class SubMods(Resource):
@authenticate
def post(self):
language = request.form.get('language')
subtitles_path = request.form.get('subtitlesPath')
mod = request.form.get('mod')
subtitles_apply_mods(language, subtitles_path, [mod])
return '', 200
class BrowseBazarrFS(Resource):
@authenticate
def get(self):
@ -1779,7 +1816,9 @@ class BrowseRadarrFS(Resource):
api.add_resource(Shutdown, '/shutdown')
api.add_resource(Restart, '/restart')
api.add_resource(Badges, '/badges')
api.add_resource(BadgesSeries, '/badges_series')
api.add_resource(BadgesMovies, '/badges_movies')
api.add_resource(BadgesProviders, '/badges_providers')
api.add_resource(Languages, '/languages')
api.add_resource(Notifications, '/notifications')
@ -1839,6 +1878,7 @@ api.add_resource(BlacklistMovieSubtitlesRemove, '/blacklist_movie_subtitles_remo
api.add_resource(BlacklistMovieSubtitlesRemoveAll, '/blacklist_movie_subtitles_remove_all')
api.add_resource(SyncSubtitles, '/sync_subtitles')
api.add_resource(SubMods, '/sub_mods')
api.add_resource(BrowseBazarrFS, '/browse_bazarr_filesystem')
api.add_resource(BrowseSonarrFS, '/browse_sonarr_filesystem')

@ -58,7 +58,9 @@ defaults = {
'upgrade_manual': 'True',
'anti_captcha_provider': 'None',
'wanted_search_frequency': '3',
'wanted_search_frequency_movie': '3'
'wanted_search_frequency_movie': '3',
'subzero_mods': '',
'dont_notify_manual_actions': 'False'
},
'auth': {
'type': 'None',

@ -41,7 +41,11 @@ def update_movies():
tagsDict = get_tags()
# Get movies data from radarr
url_radarr_api_movies = url_radarr() + "/api/movie?apikey=" + apikey_radarr
if radarr_version.startswith('0'):
url_radarr_api_movies = url_radarr() + "/api/movie?apikey=" + apikey_radarr
else:
url_radarr_api_movies = url_radarr() + "/api/v3/movie?apikey=" + apikey_radarr
try:
r = requests.get(url_radarr_api_movies, timeout=60, verify=False)
r.raise_for_status()
@ -98,10 +102,13 @@ def update_movies():
else:
sceneName = None
if 'alternativeTitles' in movie:
alternativeTitles = str([item['title'] for item in movie['alternativeTitles']])
alternativeTitles = None
if radarr_version.startswith('0'):
if 'alternativeTitles' in movie:
alternativeTitles = str([item['title'] for item in movie['alternativeTitles']])
else:
alternativeTitles = None
if 'alternateTitles' in movie:
alternativeTitles = str([item['title'] for item in movie['alternateTitles']])
if 'imdbId' in movie: imdbId = movie['imdbId']
else: imdbId = None
@ -117,14 +124,20 @@ def update_movies():
if 'mediaInfo' in movie['movieFile']:
videoFormat = videoCodecID = videoProfile = videoCodecLibrary = None
if 'videoFormat' in movie['movieFile']['mediaInfo']: videoFormat = movie['movieFile']['mediaInfo']['videoFormat']
if radarr_version.startswith('0'):
if 'videoFormat' in movie['movieFile']['mediaInfo']: videoFormat = movie['movieFile']['mediaInfo']['videoFormat']
else:
if 'videoCodec' in movie['movieFile']['mediaInfo']: videoFormat = movie['movieFile']['mediaInfo']['videoCodec']
if 'videoCodecID' in movie['movieFile']['mediaInfo']: videoCodecID = movie['movieFile']['mediaInfo']['videoCodecID']
if 'videoProfile' in movie['movieFile']['mediaInfo']: videoProfile = movie['movieFile']['mediaInfo']['videoProfile']
if 'videoCodecLibrary' in movie['movieFile']['mediaInfo']: videoCodecLibrary = movie['movieFile']['mediaInfo']['videoCodecLibrary']
videoCodec = RadarrFormatVideoCodec(videoFormat, videoCodecID, videoProfile, videoCodecLibrary)
videoCodec = RadarrFormatVideoCodec(videoFormat, videoCodecID, videoCodecLibrary)
audioFormat = audioCodecID = audioProfile = audioAdditionalFeatures = None
if 'audioFormat' in movie['movieFile']['mediaInfo']: audioFormat = movie['movieFile']['mediaInfo']['audioFormat']
if radarr_version.startswith('0'):
if 'audioFormat' in movie['movieFile']['mediaInfo']: audioFormat = movie['movieFile']['mediaInfo']['audioFormat']
else:
if 'audioCodec' in movie['movieFile']['mediaInfo']: audioFormat = movie['movieFile']['mediaInfo']['audioCodec']
if 'audioCodecID' in movie['movieFile']['mediaInfo']: audioCodecID = movie['movieFile']['mediaInfo']['audioCodecID']
if 'audioProfile' in movie['movieFile']['mediaInfo']: audioProfile = movie['movieFile']['mediaInfo']['audioProfile']
if 'audioAdditionalFeatures' in movie['movieFile']['mediaInfo']: audioAdditionalFeatures = movie['movieFile']['mediaInfo']['audioAdditionalFeatures']
@ -133,7 +146,12 @@ def update_movies():
videoCodec = None
audioCodec = None
audio_language = profile_id_to_language(movie['qualityProfileId'], audio_profiles)
audio_language = None
if radarr_version.startswith('0'):
audio_language = profile_id_to_language(movie['qualityProfileId'], audio_profiles)
else:
if len(movie['movieFile']['languages']):
audio_language = movie['movieFile']['languages'][0]['name']
tags = [d['label'] for d in tagsDict if d['id'] in movie['tags']]
@ -311,19 +329,19 @@ def RadarrFormatAudioCodec(audioFormat, audioCodecID, audioProfile, audioAdditio
return audioFormat
def RadarrFormatVideoCodec(videoFormat, videoCodecID, videoProfile, videoCodecLibrary):
def RadarrFormatVideoCodec(videoFormat, videoCodecID, videoCodecLibrary):
if videoFormat == "x264": return "h264"
if videoFormat == "AVC" or videoFormat == "V.MPEG4/ISO/AVC": return "h264"
if videoFormat == "HEVC" or videoFormat == "V_MPEGH/ISO/HEVC":
if videoCodecLibrary and (videoFormat == "HEVC" or videoFormat == "V_MPEGH/ISO/HEVC"):
if videoCodecLibrary.startswith("x265"): return "h265"
if videoFormat == "MPEG Video":
if videoCodecID and videoFormat == "MPEG Video":
if videoCodecID == "2" or videoCodecID == "V_MPEG2":
return "Mpeg2"
else:
return "Mpeg"
if videoFormat == "MPEG-1 Video": return "Mpeg"
if videoFormat == "MPEG-2 Video": return "Mpeg2"
if videoFormat == "MPEG-4 Visual":
if videoCodecLibrary and videoCodecID and videoFormat == "MPEG-4 Visual":
if videoCodecID.endswith("XVID") or videoCodecLibrary.startswith("XviD"): return "XviD"
if videoCodecID.endswith("DIV3") or videoCodecID.endswith("DIVX") or videoCodecID.endswith(
"DX50") or videoCodecLibrary.startswith("DivX"): return "DivX"

@ -5,6 +5,8 @@ import logging
import subliminal_patch
import pretty
import time
import socket
import requests
from get_args import args
from config import settings
@ -28,7 +30,8 @@ hours_until_end_of_day = time_until_end_of_day().seconds // 3600 + 1
VALID_THROTTLE_EXCEPTIONS = (TooManyRequests, DownloadLimitExceeded, ServiceUnavailable, APIThrottled,
ParseResponseError, IPAddressBlocked)
VALID_COUNT_EXCEPTIONS = ('TooManyRequests', 'ServiceUnavailable', 'APIThrottled')
VALID_COUNT_EXCEPTIONS = ('TooManyRequests', 'ServiceUnavailable', 'APIThrottled', requests.Timeout,
requests.ReadTimeout, socket.timeout)
PROVIDER_THROTTLE_MAP = {
"default": {
@ -37,6 +40,9 @@ PROVIDER_THROTTLE_MAP = {
ServiceUnavailable: (datetime.timedelta(minutes=20), "20 minutes"),
APIThrottled: (datetime.timedelta(minutes=10), "10 minutes"),
ParseResponseError: (datetime.timedelta(hours=6), "6 hours"),
requests.Timeout: (datetime.timedelta(hours=1), "1 hour"),
socket.timeout: (datetime.timedelta(hours=1), "1 hour"),
requests.ReadTimeout: (datetime.timedelta(hours=1), "1 hour"),
},
"opensubtitles": {
TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
@ -48,7 +54,6 @@ PROVIDER_THROTTLE_MAP = {
DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours"),
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
IPAddressBlocked: (datetime.timedelta(hours=1), "1 hours"),
},
"titulky": {
DownloadLimitExceeded: (datetime.timedelta(hours=3), "3 hours")
@ -195,6 +200,7 @@ def provider_throttle(name, exception):
logging.info("Throttling %s for %s, until %s, because of: %s. Exception info: %r", name,
throttle_description, throttle_until.strftime("%y/%m/%d %H:%M"), cls_name, exception.args[0]
if exception.args else None)
update_throttled_provider()
def throttled_count(name):
@ -252,7 +258,7 @@ def update_throttled_provider():
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
event_stream(type='badges')
event_stream(type='badges_providers')
def list_throttled_providers():

@ -18,6 +18,7 @@ from subliminal import region, score as subliminal_scores, \
from subliminal_patch.core import SZAsyncProviderPool, download_best_subtitles, save_subtitles, download_subtitles, \
list_all_subtitles, get_subtitle_path
from subliminal_patch.score import compute_score
from subliminal_patch.subtitle import Subtitle
from get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2, language_from_alpha2, \
alpha2_from_language, alpha3_from_language
from config import settings
@ -177,12 +178,16 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
logging.info("BAZARR All providers are throttled")
return None
subz_mods = settings.general.subzero_mods.strip().split(',') if settings.general.subzero_mods.strip() else None
saved_any = False
if downloaded_subtitles:
for video, subtitles in downloaded_subtitles.items():
if not subtitles:
continue
for s in subtitles:
s.mods = subz_mods
try:
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
@ -417,6 +422,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
os.environ["SZ_KEEP_ENCODING"] = "True"
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
subtitle.mods = settings.general.subzero_mods.strip().split(',') if settings.general.subzero_mods.strip() else None
use_postprocessing = settings.general.getboolean('use_postprocessing')
postprocessing_cmd = settings.general.postprocessing_cmd
single = settings.general.getboolean('single_language')
@ -551,12 +557,6 @@ def manual_upload_subtitle(path, language, forced, title, scene_name, media_type
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
dest_directory = get_target_folder(path)
fake_video_path = None
if dest_directory:
fake_video_path = os.path.join(dest_directory, os.path.split(path)[1])
_, ext = os.path.splitext(subtitle.filename)
language = alpha3_from_alpha2(language)
if language == 'pob':
@ -567,48 +567,37 @@ def manual_upload_subtitle(path, language, forced, title, scene_name, media_type
if forced:
lang_obj = Language.rebuild(lang_obj, forced=True)
subtitle_path = get_subtitle_path(video_path=force_unicode(fake_video_path if fake_video_path else path),
language=None if single else lang_obj,
extension=ext,
forced_tag=forced)
sub = Subtitle(
lang_obj,
mods=settings.general.subzero_mods.strip().split(',') if settings.general.subzero_mods.strip() else None
)
subtitle_path = force_unicode(subtitle_path)
if os.path.exists(subtitle_path):
os.remove(subtitle_path)
sub.content = subtitle.read()
if not sub.is_valid():
logging.exception('BAZARR Invalid subtitle file: ' + subtitle.filename)
sub.mods = None
if settings.general.getboolean('utf8_encode'):
try:
os.remove(subtitle_path + ".tmp")
except:
pass
sub.set_encoding("utf-8")
subtitle.save(subtitle_path + ".tmp")
with open(subtitle_path + ".tmp", 'rb') as fr:
text = fr.read()
try:
guess = chardet.detect(text)
text = text.decode(guess["encoding"])
text = text.encode('utf-8')
except UnicodeError:
logging.exception("BAZARR subtitles file doesn't seems to be text based. Skipping this file: " +
subtitle_path)
else:
with open(subtitle_path, 'wb') as fw:
fw.write(text)
finally:
try:
os.remove(subtitle_path + ".tmp")
except:
pass
else:
subtitle.save(subtitle_path)
if chmod:
os.chmod(subtitle_path, chmod)
saved_subtitles = []
try:
saved_subtitles = save_subtitles(path,
[sub],
single=single,
tags=None, # fixme
directory=get_target_folder(path),
chmod=chmod,
# formats=("srt", "vtt")
path_decoder=force_unicode)
except:
pass
if len(saved_subtitles) < 1:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
return
subtitle_path = saved_subtitles[0].storage_path
message = language_from_alpha3(language) + (" forced" if forced else "") + " Subtitles manually uploaded."
uploaded_language_code3 = language

@ -30,7 +30,9 @@ def store_subtitles(original_path, reversed_path):
subtitle_languages = embedded_subs_reader.list_languages(reversed_path)
for subtitle_language, subtitle_forced, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec == "PGS") or (settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec == "VOBSUB"):
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
"vobsub"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -106,7 +108,9 @@ def store_subtitles_movie(original_path, reversed_path):
subtitle_languages = embedded_subs_reader.list_languages(reversed_path)
for subtitle_language, subtitle_forced, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec == "PGS") or (settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec == "VOBSUB"):
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
"vobsub"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -232,7 +236,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
if send_event:
event_stream(type='episode', action='update', series=missing_subtitles_item[2],
episode=missing_subtitles_item[1])
event_stream(type='badges')
event_stream(type='badges_series')
def list_missing_subtitles_movies(no=None, send_event=True):
@ -293,7 +297,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
if send_event:
event_stream(type='movie', action='update', movie=missing_subtitles_item[1])
event_stream(type='badges')
event_stream(type='badges_movies')
def series_full_scan_subtitles():

@ -1,6 +1,6 @@
# coding=utf-8
bazarr_version = '0.9.0.1'
bazarr_version = '0.9.0.2'
import os
os.environ["BAZARR_VERSION"] = bazarr_version

@ -1,8 +1,6 @@
import logging
import os
from ffsubsync.ffsubsync import run
from ffsubsync.constants import *
from knowit import api
from ffsubsync.ffsubsync import run, make_parser
from utils import get_binary
from utils import history_log, history_log_movie
from get_languages import alpha2_from_alpha3, language_from_alpha3
@ -13,31 +11,17 @@ class SubSyncer:
def __init__(self):
self.reference = None
self.srtin = None
self.reference_stream = None
self.overwrite_input = True
self.ffmpeg_path = None
# unused attributes
self.encoding = DEFAULT_ENCODING
self.vlc_mode = None
self.make_test_case = None
self.gui_mode = None
self.srtout = None
self.ffmpeg_path = None
self.args = None
self.vad = 'subs_then_auditok'
self.reference_encoding = None
self.frame_rate = DEFAULT_FRAME_RATE
self.start_seconds = DEFAULT_START_SECONDS
self.no_fix_framerate = None
self.serialize_speech = None
self.max_offset_seconds = DEFAULT_MAX_OFFSET_SECONDS
self.merge_with_reference = None
self.output_encoding = 'same'
def sync(self, video_path, srt_path, srt_lang, media_type, sonarr_series_id=None, sonarr_episode_id=None,
radarr_id=None):
self.reference = video_path
self.srtin = srt_path
self.srtout = None
self.args = None
ffprobe_exe = get_binary('ffprobe')
if not ffprobe_exe:
@ -46,49 +30,6 @@ class SubSyncer:
else:
logging.debug('BAZARR FFprobe used is %s', ffprobe_exe)
api.initialize({'provider': 'ffmpeg', 'ffmpeg': ffprobe_exe})
data = api.know(self.reference)
using_what = None
if 'subtitle' in data:
for i, embedded_subs in enumerate(data['subtitle']):
if 'language' in embedded_subs:
language = embedded_subs['language'].alpha3
if language == "eng":
using_what = "English embedded subtitle track"
self.reference_stream = "s:{}".format(i)
break
if not self.reference_stream:
using_what = "{0} embedded subtitle track".format(
language_from_alpha3(embedded_subs['language'].alpha3) or 'unknown language embedded subtitles '
'track')
self.reference_stream = "s:0"
elif 'audio' in data:
audio_tracks = data['audio']
for i, audio_track in enumerate(audio_tracks):
if 'language' in audio_track:
language = audio_track['language'].alpha3
if language == srt_lang:
using_what = "{0} audio track".format(language_from_alpha3(audio_track['language'].alpha3) or
'unknown language audio track')
self.reference_stream = "a:{}".format(i)
break
if not self.reference_stream:
audio_tracks = data['audio']
for i, audio_track in enumerate(audio_tracks):
if 'language' in audio_track:
language = audio_track['language'].alpha3
if language == "eng":
using_what = "English audio track"
self.reference_stream = "a:{}".format(i)
break
if not self.reference_stream:
using_what = "first audio track"
self.reference_stream = "a:0"
else:
raise NoAudioTrack
ffmpeg_exe = get_binary('ffmpeg')
if not ffmpeg_exe:
logging.debug('BAZARR FFmpeg not found!')
@ -98,18 +39,19 @@ class SubSyncer:
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
try:
result = run(self)
unparsed_args = [self.reference, '-i', self.srtin, '--overwrite-input', '--ffmpegpath', self.ffmpeg_path,
'--vad', self.vad]
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
result = run(self.args)
except Exception as e:
logging.error('BAZARR an exception occurs during the synchronization process for this subtitles: ' +
self.srtin)
logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: '
'{0}'.format(self.srtin))
else:
if result['sync_was_successful']:
message = "{0} subtitles synchronization ended with an offset of {1} seconds and a framerate scale " \
"factor of {2} using {3} (0:{4}).".format(language_from_alpha3(srt_lang),
result['offset_seconds'],
result['framerate_scale_factor'],
using_what,
self.reference_stream)
"factor of {2}.".format(language_from_alpha3(srt_lang), result['offset_seconds'],
"{:.2f}".format(result['framerate_scale_factor']))
if media_type == 'series':
history_log(action=5, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,
@ -120,16 +62,9 @@ class SubSyncer:
video_path=path_mappings.path_replace_reverse_movie(self.reference),
language=alpha2_from_alpha3(srt_lang), subtitles_path=srt_path)
else:
logging.error('BAZARR unable to sync subtitles using {0}({1}): {2}'.format(using_what,
self.reference_stream,
self.srtin))
logging.error('BAZARR unable to sync subtitles: {0}'.format(self.srtin))
return result
class NoAudioTrack(Exception):
"""Exception raised if no audio track can be found in video file."""
pass
subsync = SubSyncer()

@ -11,10 +11,11 @@ from get_args import args
from config import settings, url_sonarr, url_radarr
from database import database
from event_handler import event_stream
from get_languages import alpha2_from_alpha3, language_from_alpha3
from get_languages import alpha2_from_alpha3, language_from_alpha3, alpha3_from_alpha2
from helper import path_mappings
from list_subtitles import store_subtitles, store_subtitles_movie
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language
from subliminal import region as subliminal_cache_region
import datetime
import glob
@ -250,3 +251,29 @@ def delete_subtitles(media_type, language, forced, media_path, subtitles_path, s
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
notify_radarr(radarr_id)
return True
def subtitles_apply_mods(language, subtitle_path, mods):
if language == 'pob':
lang_obj = Language('por', 'BR')
else:
lang_obj = Language(language)
sub = Subtitle(lang_obj, mods=mods)
with open(subtitle_path, 'rb') as f:
sub.content = f.read()
if not sub.is_valid():
logging.exception('BAZARR Invalid subtitle file: ' + subtitle_path)
return
content = sub.get_modified_content()
if content:
if os.path.exists(subtitle_path):
os.remove(subtitle_path)
with open(subtitle_path, 'wb') as f:
f.write(content)

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

Binary file not shown.

Binary file not shown.

@ -1,661 +1,243 @@
# ------------------------------------------------------------------------------- #
import logging
import re
import requests
import sys
import ssl
from collections import OrderedDict
from copy import deepcopy
from time import sleep
from collections import OrderedDict
from requests.adapters import HTTPAdapter
from requests.sessions import Session
from requests_toolbelt.utils import dump
from time import sleep
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.ssl_ import create_urllib3_context
# ------------------------------------------------------------------------------- #
from .interpreters import JavaScriptInterpreter
from .user_agent import User_Agent
try:
import brotli
from requests_toolbelt.utils import dump
except ImportError:
pass
try:
import copyreg
except ImportError:
import copy_reg as copyreg
try:
from HTMLParser import HTMLParser
import brotli
except ImportError:
if sys.version_info >= (3, 4):
import html
else:
from html.parser import HTMLParser
pass
try:
from urlparse import urlparse, urljoin
from urlparse import urlparse
from urlparse import urlunparse
except ImportError:
from urllib.parse import urlparse, urljoin
# ------------------------------------------------------------------------------- #
from urllib.parse import urlparse
from urllib.parse import urlunparse
from .exceptions import (
CloudflareLoopProtection,
CloudflareCode1020,
CloudflareIUAMError,
CloudflareChallengeError,
CloudflareReCaptchaError,
CloudflareReCaptchaProvider
)
from .interpreters import JavaScriptInterpreter
from .reCaptcha import reCaptcha
from .user_agent import User_Agent
##########################################################################################################################################################
# ------------------------------------------------------------------------------- #
__version__ = '1.1.9'
__version__ = '1.2.40'
BUG_REPORT = 'Cloudflare may have changed their technique, or there may be a bug in the script.'
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
class CipherSuiteAdapter(HTTPAdapter):
__attrs__ = [
'ssl_context',
'max_retries',
'config',
'_pool_connections',
'_pool_maxsize',
'_pool_block'
]
def __init__(self, *args, **kwargs):
self.ssl_context = kwargs.pop('ssl_context', None)
self.cipherSuite = kwargs.pop('cipherSuite', None)
def __init__(self, cipherSuite=None, **kwargs):
self.cipherSuite = cipherSuite
if not self.ssl_context:
self.ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
self.ssl_context.set_ciphers(self.cipherSuite)
self.ssl_context.set_ecdh_curve('prime256v1')
self.ssl_context.options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
if hasattr(ssl, 'PROTOCOL_TLS'):
self.ssl_context = create_urllib3_context(
ssl_version=getattr(ssl, 'PROTOCOL_TLSv1_3', ssl.PROTOCOL_TLSv1_2),
ciphers=self.cipherSuite
)
else:
self.ssl_context = create_urllib3_context(ssl_version=ssl.PROTOCOL_TLSv1)
super(CipherSuiteAdapter, self).__init__(**kwargs)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
def init_poolmanager(self, *args, **kwargs):
kwargs['ssl_context'] = self.ssl_context
return super(CipherSuiteAdapter, self).init_poolmanager(*args, **kwargs)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
def proxy_manager_for(self, *args, **kwargs):
kwargs['ssl_context'] = self.ssl_context
return super(CipherSuiteAdapter, self).proxy_manager_for(*args, **kwargs)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
class CloudScraper(Session):
def __init__(self, *args, **kwargs):
self.debug = kwargs.pop('debug', False)
self.delay = kwargs.pop('delay', None)
self.cipherSuite = kwargs.pop('cipherSuite', None)
self.ssl_context = kwargs.pop('ssl_context', None)
self.interpreter = kwargs.pop('interpreter', 'native')
self.recaptcha = kwargs.pop('recaptcha', {})
self.requestPreHook = kwargs.pop('requestPreHook', None)
self.requestPostHook = kwargs.pop('requestPostHook', None)
self.allow_brotli = kwargs.pop(
'allow_brotli',
True if 'brotli' in sys.modules.keys() else False
)
self.user_agent = User_Agent(
allow_brotli=self.allow_brotli,
browser=kwargs.pop('browser', None)
)
self._solveDepthCnt = 0
self.solveDepth = kwargs.pop('solveDepth', 3)
self.interpreter = kwargs.pop('interpreter', 'js2py')
self.allow_brotli = kwargs.pop('allow_brotli', True if 'brotli' in sys.modules.keys() else False)
self.cipherSuite = None
super(CloudScraper, self).__init__(*args, **kwargs)
# pylint: disable=E0203
if 'requests' in self.headers['User-Agent']:
# ------------------------------------------------------------------------------- #
# Set a random User-Agent if no custom User-Agent has been set
# ------------------------------------------------------------------------------- #
self.headers = self.user_agent.headers
if not self.cipherSuite:
self.cipherSuite = self.user_agent.cipherSuite
if isinstance(self.cipherSuite, list):
self.cipherSuite = ':'.join(self.cipherSuite)
self.mount(
'https://',
CipherSuiteAdapter(
cipherSuite=self.cipherSuite,
ssl_context=self.ssl_context
)
)
# purely to allow us to pickle dump
copyreg.pickle(ssl.SSLContext, lambda obj: (obj.__class__, (obj.protocol,)))
# ------------------------------------------------------------------------------- #
# Allow us to pickle our session back with all variables
# ------------------------------------------------------------------------------- #
def __getstate__(self):
return self.__dict__
self.headers = User_Agent(allow_brotli=self.allow_brotli).headers
# ------------------------------------------------------------------------------- #
# Raise an Exception with no stacktrace and reset depth counter.
# ------------------------------------------------------------------------------- #
self.mount('https://', CipherSuiteAdapter(self.loadCipherSuite()))
def simpleException(self, exception, msg):
self._solveDepthCnt = 0
sys.tracebacklimit = 0
raise exception(msg)
# ------------------------------------------------------------------------------- #
# debug the request via the response
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
@staticmethod
def debugRequest(req):
try:
print(dump.dump_all(req).decode('utf-8'))
except ValueError as e:
print("Debug Error: {}".format(getattr(e, 'message', e)))
except: # noqa
pass
# ------------------------------------------------------------------------------- #
# Unescape / decode html entities
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
@staticmethod
def unescape(html_text):
if sys.version_info >= (3, 0):
if sys.version_info >= (3, 4):
return html.unescape(html_text)
def loadCipherSuite(self):
if self.cipherSuite:
return self.cipherSuite
return HTMLParser().unescape(html_text)
self.cipherSuite = ''
return HTMLParser().unescape(html_text)
if hasattr(ssl, 'PROTOCOL_TLS'):
ciphers = [
'ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES128-GCM-SHA256', 'ECDHE-ECDSA-AES256-GCM-SHA384',
'ECDHE-RSA-AES256-GCM-SHA384', 'ECDHE-ECDSA-CHACHA20-POLY1305-SHA256', 'ECDHE-RSA-CHACHA20-POLY1305-SHA256',
'ECDHE-RSA-AES128-CBC-SHA', 'ECDHE-RSA-AES256-CBC-SHA', 'RSA-AES128-GCM-SHA256', 'RSA-AES256-GCM-SHA384',
'ECDHE-RSA-AES128-GCM-SHA256', 'RSA-AES256-SHA', '3DES-EDE-CBC'
]
# ------------------------------------------------------------------------------- #
# Decode Brotli on older versions of urllib3 manually
# ------------------------------------------------------------------------------- #
if hasattr(ssl, 'PROTOCOL_TLSv1_3'):
ciphers.insert(0, ['GREASE_3A', 'GREASE_6A', 'AES128-GCM-SHA256', 'AES256-GCM-SHA256', 'AES256-GCM-SHA384', 'CHACHA20-POLY1305-SHA256'])
def decodeBrotli(self, resp):
if requests.packages.urllib3.__version__ < '1.25.1' and resp.headers.get('Content-Encoding') == 'br':
if self.allow_brotli and resp._content:
resp._content = brotli.decompress(resp.content)
else:
logging.warning(
'You\'re running urllib3 {}, Brotli content detected, '
'Which requires manual decompression, '
'But option allow_brotli is set to False, '
'We will not continue to decompress.'.format(requests.packages.urllib3.__version__)
)
return resp
ctx = ssl.SSLContext(getattr(ssl, 'PROTOCOL_TLSv1_3', ssl.PROTOCOL_TLSv1_2))
# ------------------------------------------------------------------------------- #
# Our hijacker request function
# ------------------------------------------------------------------------------- #
for cipher in ciphers:
try:
ctx.set_ciphers(cipher)
self.cipherSuite = '{}:{}'.format(self.cipherSuite, cipher).rstrip(':')
except ssl.SSLError:
pass
def request(self, method, url, *args, **kwargs):
# pylint: disable=E0203
if kwargs.get('proxies') and kwargs.get('proxies') != self.proxies:
self.proxies = kwargs.get('proxies')
# ------------------------------------------------------------------------------- #
# Pre-Hook the request via user defined function.
# ------------------------------------------------------------------------------- #
if self.requestPreHook:
(method, url, args, kwargs) = self.requestPreHook(
self,
method,
url,
*args,
**kwargs
)
return self.cipherSuite
# ------------------------------------------------------------------------------- #
# Make the request via requests.
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
response = self.decodeBrotli(
super(CloudScraper, self).request(method, url, *args, **kwargs)
)
def request(self, method, url, *args, **kwargs):
ourSuper = super(CloudScraper, self)
resp = ourSuper.request(method, url, *args, **kwargs)
# ------------------------------------------------------------------------------- #
# Debug the request via the Response object.
# ------------------------------------------------------------------------------- #
if resp.headers.get('Content-Encoding') == 'br':
if self.allow_brotli and resp._content:
resp._content = brotli.decompress(resp.content)
else:
logging.warning('Brotli content detected, But option is disabled, we will not continue.')
return resp
# Debug request
if self.debug:
self.debugRequest(response)
# ------------------------------------------------------------------------------- #
# Post-Hook the request aka Post-Hook the response via user defined function.
# ------------------------------------------------------------------------------- #
if self.requestPostHook:
response = self.requestPostHook(self, response)
if self.debug:
self.debugRequest(response)
self.debugRequest(resp)
# Check if Cloudflare anti-bot is on
if self.is_Challenge_Request(response):
# ------------------------------------------------------------------------------- #
# Try to solve the challenge and send it back
# ------------------------------------------------------------------------------- #
if self._solveDepthCnt >= self.solveDepth:
_ = self._solveDepthCnt
self.simpleException(
CloudflareLoopProtection,
"!!Loop Protection!! We have tried to solve {} time(s) in a row.".format(_)
)
self._solveDepthCnt += 1
response = self.Challenge_Response(response, **kwargs)
else:
if not response.is_redirect and response.status_code not in [429, 503]:
self._solveDepthCnt = 0
return response
# ------------------------------------------------------------------------------- #
# check if the response contains a valid Cloudflare challenge
# ------------------------------------------------------------------------------- #
@staticmethod
def is_IUAM_Challenge(resp):
try:
return (
resp.headers.get('Server', '').startswith('cloudflare')
and resp.status_code in [429, 503]
and re.search(
r'<form .*?="challenge-form" action="/.*?__cf_chl_jschl_tk__=\S+"',
resp.text,
re.M | re.S
)
)
except AttributeError:
pass
return False
# ------------------------------------------------------------------------------- #
# check if the response contains new Cloudflare challenge
# ------------------------------------------------------------------------------- #
@staticmethod
def is_New_IUAM_Challenge(resp):
try:
return (
resp.headers.get('Server', '').startswith('cloudflare')
and resp.status_code in [429, 503]
and re.search(
r'cpo.src\s*=\s*"/cdn-cgi/challenge-platform/orchestrate/jsch/v1"',
resp.text,
re.M | re.S
)
)
except AttributeError:
pass
if self.isChallengeRequest(resp):
if resp.request.method != 'GET':
# Work around if the initial request is not a GET,
# Supersede with a GET then re-request the original METHOD.
self.request('GET', resp.url)
resp = ourSuper.request(method, url, *args, **kwargs)
else:
# Solve Challenge
resp = self.sendChallengeResponse(resp, **kwargs)
return False
return resp
# ------------------------------------------------------------------------------- #
# check if the response contains a valid Cloudflare reCaptcha challenge
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
@staticmethod
def is_reCaptcha_Challenge(resp):
try:
return (
resp.headers.get('Server', '').startswith('cloudflare')
and resp.status_code == 403
and re.search(
r'action="/.*?__cf_chl_captcha_tk__=\S+".*?data\-sitekey=.*?',
resp.text,
re.M | re.DOTALL
)
)
except AttributeError:
pass
return False
def isChallengeRequest(resp):
if resp.headers.get('Server', '').startswith('cloudflare'):
if b'why_captcha' in resp.content or b'/cdn-cgi/l/chk_captcha' in resp.content:
raise ValueError('Captcha')
# ------------------------------------------------------------------------------- #
# check if the response contains Firewall 1020 Error
# ------------------------------------------------------------------------------- #
@staticmethod
def is_Firewall_Blocked(resp):
try:
return (
resp.headers.get('Server', '').startswith('cloudflare')
and resp.status_code == 403
and re.search(
r'<span class="cf-error-code">1020</span>',
resp.text,
re.M | re.DOTALL
)
resp.status_code in [429, 503]
and all(s in resp.content for s in [b'jschl_vc', b'jschl_answer'])
)
except AttributeError:
pass
return False
# ------------------------------------------------------------------------------- #
# Wrapper for is_reCaptcha_Challenge, is_IUAM_Challenge, is_Firewall_Blocked
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
def is_Challenge_Request(self, resp):
if self.is_Firewall_Blocked(resp):
self.simpleException(
CloudflareCode1020,
'Cloudflare has blocked this request (Code 1020 Detected).'
)
def sendChallengeResponse(self, resp, **original_kwargs):
body = resp.text
if self.is_New_IUAM_Challenge(resp):
self.simpleException(
CloudflareChallengeError,
'Detected the new Cloudflare challenge.'
)
# Cloudflare requires a delay before solving the challenge
if not self.delay:
try:
delay = float(re.search(r'submit\(\);\r?\n\s*},\s*([0-9]+)', body).group(1)) / float(1000)
if isinstance(delay, (int, float)):
self.delay = delay
except: # noqa
pass
if self.is_reCaptcha_Challenge(resp) or self.is_IUAM_Challenge(resp):
if self.debug:
print('Detected Challenge.')
return True
sleep(self.delay)
return False
parsed_url = urlparse(resp.url)
domain = parsed_url.netloc
submit_url = '{}://{}/cdn-cgi/l/chk_jschl'.format(parsed_url.scheme, domain)
# ------------------------------------------------------------------------------- #
# Try to solve cloudflare javascript challenge.
# ------------------------------------------------------------------------------- #
cloudflare_kwargs = deepcopy(original_kwargs)
def IUAM_Challenge_Response(self, body, url, interpreter):
try:
formPayload = re.search(
r'<form (?P<form>.*?="challenge-form" '
r'action="(?P<challengeUUID>.*?'
r'__cf_chl_jschl_tk__=\S+)"(.*?)</form>)',
body,
re.M | re.DOTALL
).groupdict()
if not all(key in formPayload for key in ['form', 'challengeUUID']):
self.simpleException(
CloudflareIUAMError,
"Cloudflare IUAM detected, unfortunately we can't extract the parameters correctly."
)
params = OrderedDict()
payload = OrderedDict()
for challengeParam in re.findall(r'^\s*<input\s(.*?)/>', formPayload['form'], re.M | re.S):
inputPayload = dict(re.findall(r'(\S+)="(\S+)"', challengeParam))
if inputPayload.get('name') in ['r', 'jschl_vc', 'pass']:
payload.update({inputPayload['name']: inputPayload['value']})
s = re.search(r'name="s"\svalue="(?P<s_value>[^"]+)', body)
if s:
params['s'] = s.group('s_value')
except AttributeError:
self.simpleException(
CloudflareIUAMError,
"Cloudflare IUAM detected, unfortunately we can't extract the parameters correctly."
params.update(
[
('jschl_vc', re.search(r'name="jschl_vc" value="(\w+)"', body).group(1)),
('pass', re.search(r'name="pass" value="(.+?)"', body).group(1))
]
)
hostParsed = urlparse(url)
params = cloudflare_kwargs.setdefault('params', params)
try:
payload['jschl_answer'] = JavaScriptInterpreter.dynamicImport(
interpreter
).solveChallenge(body, hostParsed.netloc)
except Exception as e:
self.simpleException(
CloudflareIUAMError,
'Unable to parse Cloudflare anti-bots page: {}'.format(
getattr(e, 'message', e)
raise ValueError('Unable to parse Cloudflare anti-bots page: {} {}'.format(e.message, BUG_REPORT))
# Solve the Javascript challenge
params['jschl_answer'] = JavaScriptInterpreter.dynamicImport(self.interpreter).solveChallenge(body, domain)
# Requests transforms any request into a GET after a redirect,
# so the redirect has to be handled manually here to allow for
# performing other types of requests even as the first request.
cloudflare_kwargs['allow_redirects'] = False
redirect = self.request(resp.request.method, submit_url, **cloudflare_kwargs)
redirect_location = urlparse(redirect.headers['Location'])
if not redirect_location.netloc:
redirect_url = urlunparse(
(
parsed_url.scheme,
domain,
redirect_location.path,
redirect_location.params,
redirect_location.query,
redirect_location.fragment
)
)
return self.request(resp.request.method, redirect_url, **original_kwargs)
return {
'url': '{}://{}{}'.format(
hostParsed.scheme,
hostParsed.netloc,
self.unescape(formPayload['challengeUUID'])
),
'data': payload
}
# ------------------------------------------------------------------------------- #
# Try to solve the reCaptcha challenge via 3rd party.
# ------------------------------------------------------------------------------- #
return self.request(resp.request.method, redirect.headers['Location'], **original_kwargs)
def reCaptcha_Challenge_Response(self, provider, provider_params, body, url):
try:
formPayload = re.search(
r'<form (?P<form>.*?="challenge-form" '
r'action="(?P<challengeUUID>.*?__cf_chl_captcha_tk__=\S+)"(.*?)</form>)',
body,
re.M | re.DOTALL
).groupdict()
if not all(key in formPayload for key in ['form', 'challengeUUID']):
self.simpleException(
CloudflareReCaptchaError,
"Cloudflare reCaptcha detected, unfortunately we can't extract the parameters correctly."
)
payload = OrderedDict(
re.findall(
r'(name="r"\svalue|data-ray|data-sitekey|name="cf_captcha_kind"\svalue)="(.*?)"',
formPayload['form']
)
)
captchaType = 'reCaptcha' if payload['name="cf_captcha_kind" value'] == 're' else 'hCaptcha'
except (AttributeError, KeyError):
self.simpleException(
CloudflareReCaptchaError,
"Cloudflare reCaptcha detected, unfortunately we can't extract the parameters correctly."
)
captchaResponse = reCaptcha.dynamicImport(
provider.lower()
).solveCaptcha(
captchaType,
url,
payload['data-sitekey'],
provider_params
)
dataPayload = OrderedDict([
('r', payload.get('name="r" value', '')),
('cf_captcha_kind', payload['name="cf_captcha_kind" value']),
('id', payload.get('data-ray')),
('g-recaptcha-response', captchaResponse)
])
if captchaType == 'hCaptcha':
dataPayload.update({'h-captcha-response': captchaResponse})
hostParsed = urlparse(url)
return {
'url': '{}://{}{}'.format(
hostParsed.scheme,
hostParsed.netloc,
self.unescape(formPayload['challengeUUID'])
),
'data': dataPayload
}
# ------------------------------------------------------------------------------- #
# Attempt to handle and send the challenge response back to cloudflare
# ------------------------------------------------------------------------------- #
def Challenge_Response(self, resp, **kwargs):
if self.is_reCaptcha_Challenge(resp):
# ------------------------------------------------------------------------------- #
# double down on the request as some websites are only checking
# if cfuid is populated before issuing reCaptcha.
# ------------------------------------------------------------------------------- #
resp = self.decodeBrotli(
super(CloudScraper, self).request(resp.request.method, resp.url, **kwargs)
)
if not self.is_reCaptcha_Challenge(resp):
return resp
# ------------------------------------------------------------------------------- #
# if no reCaptcha provider raise a runtime error.
# ------------------------------------------------------------------------------- #
if not self.recaptcha or not isinstance(self.recaptcha, dict) or not self.recaptcha.get('provider'):
self.simpleException(
CloudflareReCaptchaProvider,
"Cloudflare reCaptcha detected, unfortunately you haven't loaded an anti reCaptcha provider "
"correctly via the 'recaptcha' parameter."
)
# ------------------------------------------------------------------------------- #
# if provider is return_response, return the response without doing anything.
# ------------------------------------------------------------------------------- #
if self.recaptcha.get('provider') == 'return_response':
return resp
self.recaptcha['proxies'] = self.proxies
submit_url = self.reCaptcha_Challenge_Response(
self.recaptcha.get('provider'),
self.recaptcha,
resp.text,
resp.url
)
else:
# ------------------------------------------------------------------------------- #
# Cloudflare requires a delay before solving the challenge
# ------------------------------------------------------------------------------- #
if not self.delay:
try:
delay = float(
re.search(
r'submit\(\);\r?\n\s*},\s*([0-9]+)',
resp.text
).group(1)
) / float(1000)
if isinstance(delay, (int, float)):
self.delay = delay
except (AttributeError, ValueError):
self.simpleException(
CloudflareIUAMError,
"Cloudflare IUAM possibility malformed, issue extracing delay value."
)
sleep(self.delay)
# ------------------------------------------------------------------------------- #
submit_url = self.IUAM_Challenge_Response(
resp.text,
resp.url,
self.interpreter
)
# ------------------------------------------------------------------------------- #
# Send the Challenge Response back to Cloudflare
# ------------------------------------------------------------------------------- #
if submit_url:
def updateAttr(obj, name, newValue):
try:
obj[name].update(newValue)
return obj[name]
except (AttributeError, KeyError):
obj[name] = {}
obj[name].update(newValue)
return obj[name]
cloudflare_kwargs = deepcopy(kwargs)
cloudflare_kwargs['allow_redirects'] = False
cloudflare_kwargs['data'] = updateAttr(
cloudflare_kwargs,
'data',
submit_url['data']
)
urlParsed = urlparse(resp.url)
cloudflare_kwargs['headers'] = updateAttr(
cloudflare_kwargs,
'headers',
{
'Origin': '{}://{}'.format(urlParsed.scheme, urlParsed.netloc),
'Referer': resp.url
}
)
challengeSubmitResponse = self.request(
'POST',
submit_url['url'],
**cloudflare_kwargs
)
# ------------------------------------------------------------------------------- #
# Return response if Cloudflare is doing content pass through instead of 3xx
# else request with redirect URL also handle protocol scheme change http -> https
# ------------------------------------------------------------------------------- #
if not challengeSubmitResponse.is_redirect:
return challengeSubmitResponse
else:
cloudflare_kwargs = deepcopy(kwargs)
cloudflare_kwargs['headers'] = updateAttr(
cloudflare_kwargs,
'headers',
{'Referer': challengeSubmitResponse.url}
)
if not urlparse(challengeSubmitResponse.headers['Location']).netloc:
redirect_location = urljoin(
challengeSubmitResponse.url,
challengeSubmitResponse.headers['Location']
)
else:
redirect_location = challengeSubmitResponse.headers['Location']
return self.request(
resp.request.method,
redirect_location,
**cloudflare_kwargs
)
# ------------------------------------------------------------------------------- #
# We shouldn't be here...
# Re-request the original query and/or process again....
# ------------------------------------------------------------------------------- #
return self.request(resp.request.method, resp.url, **kwargs)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
@classmethod
def create_scraper(cls, sess=None, **kwargs):
@ -665,30 +247,24 @@ class CloudScraper(Session):
scraper = cls(**kwargs)
if sess:
for attr in ['auth', 'cert', 'cookies', 'headers', 'hooks', 'params', 'proxies', 'data']:
attrs = ['auth', 'cert', 'cookies', 'headers', 'hooks', 'params', 'proxies', 'data']
for attr in attrs:
val = getattr(sess, attr, None)
if val:
setattr(scraper, attr, val)
return scraper
# ------------------------------------------------------------------------------- #
# Functions for integrating cloudscraper with other applications and scripts
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
# Functions for integrating cloudscraper with other applications and scripts
@classmethod
def get_tokens(cls, url, **kwargs):
scraper = cls.create_scraper(
**{
field: kwargs.pop(field, None) for field in [
'allow_brotli',
'browser',
'debug',
'delay',
'interpreter',
'recaptcha'
] if field in kwargs
}
debug=kwargs.pop('debug', False),
delay=kwargs.pop('delay', None),
interpreter=kwargs.pop('interpreter', 'js2py'),
allow_brotli=kwargs.pop('allow_brotli', True),
)
try:
@ -707,11 +283,7 @@ class CloudScraper(Session):
cookie_domain = d
break
else:
cls.simpleException(
CloudflareIUAMError,
"Unable to find Cloudflare cookies. Does the site actually "
"have Cloudflare IUAM (I'm Under Attack Mode) enabled?"
)
raise ValueError('Unable to find Cloudflare cookies. Does the site actually have Cloudflare IUAM ("I\'m Under Attack Mode") enabled?')
return (
{
@ -721,7 +293,7 @@ class CloudScraper(Session):
scraper.headers['User-Agent']
)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
@classmethod
def get_cookie_string(cls, url, **kwargs):
@ -732,18 +304,7 @@ class CloudScraper(Session):
return '; '.join('='.join(pair) for pair in tokens.items()), user_agent
# ------------------------------------------------------------------------------- #
if ssl.OPENSSL_VERSION_INFO < (1, 1, 1):
print(
"DEPRECATION: The OpenSSL being used by this python install ({}) does not meet the minimum supported "
"version (>= OpenSSL 1.1.1) in order to support TLS 1.3 required by Cloudflare, "
"You may encounter an unexpected reCaptcha or cloudflare 1020 blocks.".format(
ssl.OPENSSL_VERSION
)
)
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
create_scraper = CloudScraper.create_scraper
get_tokens = CloudScraper.get_tokens

@ -1,111 +0,0 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------- #
"""
cloudscraper.exceptions
~~~~~~~~~~~~~~~~~~~
This module contains the set of cloudscraper exceptions.
"""
# ------------------------------------------------------------------------------- #
class CloudflareException(Exception):
"""
Base exception class for cloudscraper for Cloudflare
"""
class CloudflareLoopProtection(CloudflareException):
"""
Raise an exception for recursive depth protection
"""
class CloudflareCode1020(CloudflareException):
"""
Raise an exception for Cloudflare code 1020 block
"""
class CloudflareIUAMError(CloudflareException):
"""
Raise an error for problem extracting IUAM paramters
from Cloudflare payload
"""
class CloudflareChallengeError(CloudflareException):
"""
Raise an error when detected new Cloudflare challenge
"""
class CloudflareSolveError(CloudflareException):
"""
Raise an error when issue with solving Cloudflare challenge
"""
class CloudflareReCaptchaError(CloudflareException):
"""
Raise an error for problem extracting reCaptcha paramters
from Cloudflare payload
"""
class CloudflareReCaptchaProvider(CloudflareException):
"""
Raise an exception for no reCaptcha provider loaded for Cloudflare.
"""
# ------------------------------------------------------------------------------- #
class reCaptchaException(Exception):
"""
Base exception class for cloudscraper reCaptcha Providers
"""
class reCaptchaServiceUnavailable(reCaptchaException):
"""
Raise an exception for external services that cannot be reached
"""
class reCaptchaAPIError(reCaptchaException):
"""
Raise an error for error from API response.
"""
class reCaptchaAccountError(reCaptchaException):
"""
Raise an error for reCaptcha provider account problem.
"""
class reCaptchaTimeout(reCaptchaException):
"""
Raise an exception for reCaptcha provider taking too long.
"""
class reCaptchaParameter(reCaptchaException):
"""
Raise an exception for bad or missing Parameter.
"""
class reCaptchaBadJobID(reCaptchaException):
"""
Raise an exception for invalid job id.
"""
class reCaptchaReportError(reCaptchaException):
"""
Raise an error for reCaptcha provider unable to report bad solve.
"""

@ -1,79 +0,0 @@
import json
import platform
import requests
import ssl
import sys
import urllib3
from collections import OrderedDict
from . import __version__ as cloudscraper_version
# ------------------------------------------------------------------------------- #
def getPossibleCiphers():
try:
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
context.set_ciphers('ALL')
return sorted([cipher['name'] for cipher in context.get_ciphers()])
except AttributeError:
return 'get_ciphers() is unsupported'
# ------------------------------------------------------------------------------- #
def _pythonVersion():
interpreter = platform.python_implementation()
interpreter_version = platform.python_version()
if interpreter == 'PyPy':
interpreter_version = '{}.{}.{}'.format(
sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro
)
if sys.pypy_version_info.releaselevel != 'final':
interpreter_version = '{}{}'.format(
interpreter_version,
sys.pypy_version_info.releaselevel
)
return {
'name': interpreter,
'version': interpreter_version
}
# ------------------------------------------------------------------------------- #
def systemInfo():
try:
platform_info = {
'system': platform.system(),
'release': platform.release(),
}
except IOError:
platform_info = {
'system': 'Unknown',
'release': 'Unknown',
}
return OrderedDict([
('platform', platform_info),
('interpreter', _pythonVersion()),
('cloudscraper', cloudscraper_version),
('requests', requests.__version__),
('urllib3', urllib3.__version__),
('OpenSSL', OrderedDict(
[
('version', ssl.OPENSSL_VERSION),
('ciphers', getPossibleCiphers())
]
))
])
# ------------------------------------------------------------------------------- #
if __name__ == '__main__':
print(json.dumps(systemInfo(), indent=4))

@ -1,31 +1,27 @@
import re
import sys
import logging
import abc
from ..exceptions import CloudflareSolveError
if sys.version_info >= (3, 4):
ABC = abc.ABC # noqa
else:
ABC = abc.ABCMeta('ABC', (), {})
# ------------------------------------------------------------------------------- #
interpreters = {}
##########################################################################################################################################################
# ------------------------------------------------------------------------------- #
BUG_REPORT = 'Cloudflare may have changed their technique, or there may be a bug in the script.'
##########################################################################################################################################################
class JavaScriptInterpreter(ABC):
interpreters = {}
# ------------------------------------------------------------------------------- #
class JavaScriptInterpreter(ABC):
@abc.abstractmethod
def __init__(self, name):
interpreters[name] = self
# ------------------------------------------------------------------------------- #
@classmethod
def dynamicImport(cls, name):
if name not in interpreters:
@ -39,18 +35,55 @@ class JavaScriptInterpreter(ABC):
return interpreters[name]
# ------------------------------------------------------------------------------- #
@abc.abstractmethod
def eval(self, jsEnv, js):
pass
# ------------------------------------------------------------------------------- #
def solveChallenge(self, body, domain):
try:
return '{0:.10f}'.format(float(self.eval(body, domain)))
js = re.search(
r'setTimeout\(function\(\){\s+(var s,t,o,p,b,r,e,a,k,i,n,g,f.+?\r?\n[\s\S]+?a\.value =.+?)\r?\n',
body
).group(1)
except Exception:
raise CloudflareSolveError(
'Error trying to solve Cloudflare IUAM Javascript, they may have changed their technique.'
raise ValueError('Unable to identify Cloudflare IUAM Javascript on website. {}'.format(BUG_REPORT))
js = re.sub(r'\s{2,}', ' ', js, flags=re.MULTILINE | re.DOTALL).replace('\'; 121\'', '')
js += '\na.value;'
jsEnv = '''
String.prototype.italics=function(str) {{return "<i>" + this + "</i>";}};
var document = {{
createElement: function () {{
return {{ firstChild: {{ href: "https://{domain}/" }} }}
}},
getElementById: function () {{
return {{"innerHTML": "{innerHTML}"}};
}}
}};
'''
try:
innerHTML = re.search(
r'<div(?: [^<>]*)? id="([^<>]*?)">([^<>]*?)</div>',
body,
re.MULTILINE | re.DOTALL
)
innerHTML = innerHTML.group(2) if innerHTML else ''
except: # noqa
logging.error('Error extracting Cloudflare IUAM Javascript. {}'.format(BUG_REPORT))
raise
try:
result = self.eval(
re.sub(r'\s{2,}', ' ', jsEnv.format(domain=domain, innerHTML=innerHTML), flags=re.MULTILINE | re.DOTALL),
js
)
float(result)
except Exception:
logging.error('Error executing Cloudflare IUAM Javascript. {}'.format(BUG_REPORT))
raise
return result

@ -1,103 +0,0 @@
from __future__ import absolute_import
import os
import sys
import ctypes.util
from ctypes import c_void_p, c_size_t, byref, create_string_buffer, CDLL
from . import JavaScriptInterpreter
from .encapsulated import template
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
# ------------------------------------------------------------------------------- #
def __init__(self):
super(ChallengeInterpreter, self).__init__('chakracore')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
chakraCoreLibrary = None
# check current working directory.
for _libraryFile in ['libChakraCore.so', 'libChakraCore.dylib', 'ChakraCore.dll']:
if os.path.isfile(os.path.join(os.getcwd(), _libraryFile)):
chakraCoreLibrary = os.path.join(os.getcwd(), _libraryFile)
continue
if not chakraCoreLibrary:
chakraCoreLibrary = ctypes.util.find_library('ChakraCore')
if not chakraCoreLibrary:
sys.tracebacklimit = 0
raise RuntimeError(
'ChakraCore library not found in current path or any of your system library paths, '
'please download from https://www.github.com/VeNoMouS/cloudscraper/tree/ChakraCore/, '
'or https://github.com/Microsoft/ChakraCore/'
)
try:
chakraCore = CDLL(chakraCoreLibrary)
except OSError:
sys.tracebacklimit = 0
raise RuntimeError('There was an error loading the ChakraCore library {}'.format(chakraCoreLibrary))
if sys.platform != 'win32':
chakraCore.DllMain(0, 1, 0)
chakraCore.DllMain(0, 2, 0)
script = create_string_buffer(template(body, domain).encode('utf-16'))
runtime = c_void_p()
chakraCore.JsCreateRuntime(0, 0, byref(runtime))
context = c_void_p()
chakraCore.JsCreateContext(runtime, byref(context))
chakraCore.JsSetCurrentContext(context)
fname = c_void_p()
chakraCore.JsCreateString(
'iuam-challenge.js',
len('iuam-challenge.js'),
byref(fname)
)
scriptSource = c_void_p()
chakraCore.JsCreateExternalArrayBuffer(
script,
len(script),
0,
0,
byref(scriptSource)
)
jsResult = c_void_p()
chakraCore.JsRun(scriptSource, 0, fname, 0x02, byref(jsResult))
resultJSString = c_void_p()
chakraCore.JsConvertValueToString(jsResult, byref(resultJSString))
stringLength = c_size_t()
chakraCore.JsCopyString(resultJSString, 0, 0, byref(stringLength))
resultSTR = create_string_buffer(stringLength.value + 1)
chakraCore.JsCopyString(
resultJSString,
byref(resultSTR),
stringLength.value + 1,
0
)
chakraCore.JsDisposeRuntime(runtime)
return resultSTR.value
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()

@ -1,62 +0,0 @@
import logging
import re
# ------------------------------------------------------------------------------- #
def template(body, domain):
BUG_REPORT = 'Cloudflare may have changed their technique, or there may be a bug in the script.'
try:
js = re.search(
r'setTimeout\(function\(\){\s+(.*?a\.value\s*=\s*\S+toFixed\(10\);)',
body,
re.M | re.S
).group(1)
except Exception:
raise ValueError('Unable to identify Cloudflare IUAM Javascript on website. {}'.format(BUG_REPORT))
jsEnv = '''String.prototype.italics=function(str) {{return "<i>" + this + "</i>";}};
var subVars= {{{subVars}}};
var document = {{
createElement: function () {{
return {{ firstChild: {{ href: "https://{domain}/" }} }}
}},
getElementById: function (str) {{
return {{"innerHTML": subVars[str]}};
}}
}};
'''
try:
js = js.replace(
r"(setInterval(function(){}, 100),t.match(/https?:\/\//)[0]);",
r"t.match(/https?:\/\//)[0];"
)
k = re.search(r" k\s*=\s*'(?P<k>\S+)';", body).group('k')
r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(k))
subVars = ''
for m in r.finditer(body):
subVars = '{}\n\t\t{}{}: {},\n'.format(subVars, k, m.group('id'), m.group('jsfuck'))
subVars = subVars[:-2]
except: # noqa
logging.error('Error extracting Cloudflare IUAM Javascript. {}'.format(BUG_REPORT))
raise
return '{}{}'.format(
re.sub(
r'\s{2,}',
' ',
jsEnv.format(
domain=domain,
subVars=subVars
),
re.MULTILINE | re.DOTALL
),
js
)
# ------------------------------------------------------------------------------- #

@ -6,39 +6,27 @@ import base64
from . import JavaScriptInterpreter
from .encapsulated import template
from .jsunfuck import jsunfuck
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
# ------------------------------------------------------------------------------- #
def __init__(self):
super(ChallengeInterpreter, self).__init__('js2py')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
jsPayload = template(body, domain)
def eval(self, jsEnv, js):
if js2py.eval_js('(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]') == '1':
logging.warning('WARNING - Please upgrade your js2py https://github.com/PiotrDabkowski/Js2Py, applying work around for the meantime.')
jsPayload = jsunfuck(jsPayload)
js = jsunfuck(js)
def atob(s):
return base64.b64decode('{}'.format(s)).decode('utf-8')
js2py.disable_pyimport()
context = js2py.EvalJs({'atob': atob})
result = context.eval(jsPayload)
result = context.eval('{}{}'.format(jsEnv, js))
return result
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()

@ -1,233 +0,0 @@
from __future__ import absolute_import
import ast
import re
import operator as op
import pyparsing
from ..exceptions import CloudflareSolveError
from . import JavaScriptInterpreter
# ------------------------------------------------------------------------------- #
_OP_MAP = {
ast.Add: op.add,
ast.Sub: op.sub,
ast.Mult: op.mul,
ast.Div: op.truediv,
ast.Invert: op.neg,
}
# ------------------------------------------------------------------------------- #
class Calc(ast.NodeVisitor):
def visit_BinOp(self, node):
return _OP_MAP[type(node.op)](self.visit(node.left), self.visit(node.right))
# ------------------------------------------------------------------------------- #
def visit_Num(self, node):
return node.n
# ------------------------------------------------------------------------------- #
def visit_Expr(self, node):
return self.visit(node.value)
# ------------------------------------------------------------------------------- #
@classmethod
def doMath(cls, expression):
tree = ast.parse(expression)
calc = cls()
return calc.visit(tree.body[0])
# ------------------------------------------------------------------------------- #
class Parentheses(object):
def fix(self, s):
res = []
self.visited = set([s])
self.dfs(s, self.invalid(s), res)
return res
# ------------------------------------------------------------------------------- #
def dfs(self, s, n, res):
if n == 0:
res.append(s)
return
for i in range(len(s)):
if s[i] in ['(', ')']:
s_new = s[:i] + s[i + 1:]
if s_new not in self.visited and self.invalid(s_new) < n:
self.visited.add(s_new)
self.dfs(s_new, self.invalid(s_new), res)
# ------------------------------------------------------------------------------- #
def invalid(self, s):
plus = minus = 0
memo = {"(": 1, ")": -1}
for c in s:
plus += memo.get(c, 0)
minus += 1 if plus < 0 else 0
plus = max(0, plus)
return plus + minus
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
def __init__(self):
super(ChallengeInterpreter, self).__init__('native')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
operators = {
'+': op.add,
'-': op.sub,
'*': op.mul,
'/': op.truediv
}
# ------------------------------------------------------------------------------- #
def flatten(lists):
return sum(map(flatten, lists), []) if isinstance(lists, list) else [lists]
# ------------------------------------------------------------------------------- #
def jsfuckToNumber(jsFuck):
# "Clean Up" JSFuck
jsFuck = jsFuck.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')
jsFuck = jsFuck.lstrip('+').replace('(+', '(').replace(' ', '')
jsFuck = Parentheses().fix(jsFuck)[0]
# Hackery Parser for Math
stack = []
bstack = []
for i in flatten(pyparsing.nestedExpr().parseString(jsFuck).asList()):
if i == '+':
stack.append(bstack)
bstack = []
continue
bstack.append(i)
stack.append(bstack)
return int(''.join([str(Calc.doMath(''.join(i))) for i in stack]))
# ------------------------------------------------------------------------------- #
def divisorMath(payload, needle, domain):
jsfuckMath = payload.split('/')
if needle in jsfuckMath[1]:
expression = re.findall(r"^(.*?)(.)\(function", jsfuckMath[1])[0]
expression_value = operators[expression[1]](
float(jsfuckToNumber(expression[0])),
float(ord(domain[jsfuckToNumber(jsfuckMath[1][
jsfuckMath[1].find('"("+p+")")}') + len('"("+p+")")}'):-2
])]))
)
else:
expression_value = jsfuckToNumber(jsfuckMath[1])
expression_value = jsfuckToNumber(jsfuckMath[0]) / float(expression_value)
return expression_value
# ------------------------------------------------------------------------------- #
def challengeSolve(body, domain):
jschl_answer = 0
try:
jsfuckChallenge = re.search(
r"setTimeout\(function\(\){\s+var.*?f,\s*(?P<variable>\w+).*?:(?P<init>\S+)};"
r".*?\('challenge-form'\);.*?;(?P<challenge>.*?a\.value)\s*=\s*\S+\.toFixed\(10\);",
body,
re.DOTALL | re.MULTILINE
).groupdict()
except AttributeError:
raise CloudflareSolveError('There was an issue extracting "jsfuckChallenge" from the Cloudflare challenge.')
kJSFUCK = re.search(r'(;|)\s*k.=(?P<kJSFUCK>\S+);', jsfuckChallenge['challenge'], re.S | re.M)
if kJSFUCK:
try:
kJSFUCK = jsfuckToNumber(kJSFUCK.group('kJSFUCK'))
except IndexError:
raise CloudflareSolveError('There was an issue extracting "kJSFUCK" from the Cloudflare challenge.')
try:
kID = re.search(r"\s*k\s*=\s*'(?P<kID>\S+)';", body).group('kID')
except IndexError:
raise CloudflareSolveError('There was an issue extracting "kID" from the Cloudflare challenge.')
try:
r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(kID))
kValues = {}
for m in r.finditer(body):
kValues[int(m.group('id'))] = m.group('jsfuck')
jsfuckChallenge['k'] = kValues[kJSFUCK]
except (AttributeError, IndexError):
raise CloudflareSolveError('There was an issue extracting "kValues" from the Cloudflare challenge.')
jsfuckChallenge['challenge'] = re.finditer(
r'{}.*?([+\-*/])=(.*?);(?=a\.value|{})'.format(
jsfuckChallenge['variable'],
jsfuckChallenge['variable']
),
jsfuckChallenge['challenge']
)
# ------------------------------------------------------------------------------- #
if '/' in jsfuckChallenge['init']:
val = jsfuckChallenge['init'].split('/')
jschl_answer = jsfuckToNumber(val[0]) / float(jsfuckToNumber(val[1]))
else:
jschl_answer = jsfuckToNumber(jsfuckChallenge['init'])
# ------------------------------------------------------------------------------- #
for expressionMatch in jsfuckChallenge['challenge']:
oper, expression = expressionMatch.groups()
if '/' in expression:
expression_value = divisorMath(expression, 'function(p)', domain)
else:
if 'Element' in expression:
expression_value = divisorMath(jsfuckChallenge['k'], '"("+p+")")}', domain)
else:
expression_value = jsfuckToNumber(expression)
jschl_answer = operators[oper](jschl_answer, expression_value)
# ------------------------------------------------------------------------------- #
# if not jsfuckChallenge['k'] and '+ t.length' in body:
# jschl_answer += len(domain)
# ------------------------------------------------------------------------------- #
return '{0:.10f}'.format(jschl_answer)
# ------------------------------------------------------------------------------- #
return challengeSolve(body, domain)
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()

@ -1,23 +1,22 @@
import base64
import logging
import subprocess
import sys
from . import JavaScriptInterpreter
from .encapsulated import template
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
BUG_REPORT = 'Cloudflare may have changed their technique, or there may be a bug in the script.'
##########################################################################################################################################################
class ChallengeInterpreter(JavaScriptInterpreter):
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
def __init__(self):
super(ChallengeInterpreter, self).__init__('nodejs')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
def eval(self, jsEnv, js):
try:
js = 'var atob = function(str) {return Buffer.from(str, "base64").toString("binary");};' \
'var challenge = atob("%s");' \
@ -25,25 +24,23 @@ class ChallengeInterpreter(JavaScriptInterpreter):
'var options = {filename: "iuam-challenge.js", timeout: 4000};' \
'var answer = require("vm").runInNewContext(challenge, context, options);' \
'process.stdout.write(String(answer));' \
% base64.b64encode(template(body, domain).encode('UTF-8')).decode('ascii')
% base64.b64encode('{}{}'.format(jsEnv, js).encode('UTF-8')).decode('ascii')
return subprocess.check_output(['node', '-e', js])
except OSError as e:
if e.errno == 2:
raise EnvironmentError(
'Missing Node.js runtime. Node is required and must be in the PATH (check with `node -v`).\n\n'
'Your Node binary may be called `nodejs` rather than `node`, '
'in which case you may need to run `apt-get install nodejs-legacy` on some Debian-based systems.\n\n'
'(Please read the cloudscraper README\'s Dependencies section: '
'https://github.com/VeNoMouS/cloudscraper#dependencies.)'
'Missing Node.js runtime. Node is required and must be in the PATH (check with `node -v`). Your Node binary may be called `nodejs` rather than `node`, '
'in which case you may need to run `apt-get install nodejs-legacy` on some Debian-based systems. (Please read the cloudscraper'
' README\'s Dependencies section: https://github.com/VeNoMouS/cloudscraper#dependencies.'
)
raise
except Exception:
sys.tracebacklimit = 0
raise RuntimeError('Error executing Cloudflare IUAM Javascript in nodejs')
logging.error('Error executing Cloudflare IUAM Javascript. %s' % BUG_REPORT)
raise
pass
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()

@ -1,33 +0,0 @@
from __future__ import absolute_import
import sys
try:
import v8eval
except ImportError:
sys.tracebacklimit = 0
raise RuntimeError('Please install the python module v8eval either via pip or download it from https://github.com/sony/v8eval')
from . import JavaScriptInterpreter
from .encapsulated import template
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
def __init__(self):
super(ChallengeInterpreter, self).__init__('v8')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
try:
return v8eval.V8().eval(template(body, domain))
except (TypeError, v8eval.V8Error):
RuntimeError('We encountered an error running the V8 Engine.')
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()

@ -1,248 +0,0 @@
from __future__ import absolute_import
import requests
from ..exceptions import (
reCaptchaServiceUnavailable,
reCaptchaAPIError,
reCaptchaTimeout,
reCaptchaParameter,
reCaptchaBadJobID,
reCaptchaReportError
)
try:
import polling
except ImportError:
raise ImportError(
"Please install the python module 'polling' via pip or download it from "
"https://github.com/justiniso/polling/"
)
from . import reCaptcha
class captchaSolver(reCaptcha):
def __init__(self):
super(captchaSolver, self).__init__('2captcha')
self.host = 'https://2captcha.com'
self.session = requests.Session()
# ------------------------------------------------------------------------------- #
@staticmethod
def checkErrorStatus(response, request_type):
if response.status_code in [500, 502]:
raise reCaptchaServiceUnavailable('2Captcha: Server Side Error {}'.format(response.status_code))
errors = {
'in.php': {
"ERROR_WRONG_USER_KEY": "You've provided api_key parameter value is in incorrect format, it should contain 32 symbols.",
"ERROR_KEY_DOES_NOT_EXIST": "The api_key you've provided does not exists.",
"ERROR_ZERO_BALANCE": "You don't have sufficient funds on your account.",
"ERROR_PAGEURL": "pageurl parameter is missing in your request.",
"ERROR_NO_SLOT_AVAILABLE":
"No Slots Available.\nYou can receive this error in two cases:\n"
"1. If you solve ReCaptcha: the queue of your captchas that are not distributed to workers is too long. "
"Queue limit changes dynamically and depends on total amount of captchas awaiting solution and usually it's between 50 and 100 captchas.\n"
"2. If you solve Normal Captcha: your maximum rate for normal captchas is lower than current rate on the server."
"You can change your maximum rate in your account's settings.",
"ERROR_IP_NOT_ALLOWED": "The request is sent from the IP that is not on the list of your allowed IPs.",
"IP_BANNED": "Your IP address is banned due to many frequent attempts to access the server using wrong authorization keys.",
"ERROR_BAD_TOKEN_OR_PAGEURL":
"You can get this error code when sending ReCaptcha V2. "
"That happens if your request contains invalid pair of googlekey and pageurl. "
"The common reason for that is that ReCaptcha is loaded inside an iframe hosted on another domain/subdomain.",
"ERROR_GOOGLEKEY":
"You can get this error code when sending ReCaptcha V2. "
"That means that sitekey value provided in your request is incorrect: it's blank or malformed.",
"MAX_USER_TURN": "You made more than 60 requests within 3 seconds.Your account is banned for 10 seconds. Ban will be lifted automatically."
},
'res.php': {
"ERROR_CAPTCHA_UNSOLVABLE":
"We are unable to solve your captcha - three of our workers were unable solve it "
"or we didn't get an answer within 90 seconds (300 seconds for ReCaptcha V2). "
"We will not charge you for that request.",
"ERROR_WRONG_USER_KEY": "You've provided api_key parameter value in incorrect format, it should contain 32 symbols.",
"ERROR_KEY_DOES_NOT_EXIST": "The api_key you've provided does not exists.",
"ERROR_WRONG_ID_FORMAT": "You've provided captcha ID in wrong format. The ID can contain numbers only.",
"ERROR_WRONG_CAPTCHA_ID": "You've provided incorrect captcha ID.",
"ERROR_BAD_DUPLICATES":
"Error is returned when 100% accuracy feature is enabled. "
"The error means that max numbers of tries is reached but min number of matches not found.",
"REPORT_NOT_RECORDED": "Error is returned to your complain request if you already complained lots of correctly solved captchas.",
"ERROR_IP_ADDRES":
"You can receive this error code when registering a pingback (callback) IP or domain."
"That happes if your request is coming from an IP address that doesn't match the IP address of your pingback IP or domain.",
"ERROR_TOKEN_EXPIRED": "You can receive this error code when sending GeeTest. That error means that challenge value you provided is expired.",
"ERROR_EMPTY_ACTION": "Action parameter is missing or no value is provided for action parameter."
}
}
if response.json().get('status') == 0 and response.json().get('request') in errors.get(request_type):
raise reCaptchaAPIError(
'{} {}'.format(
response.json().get('request'),
errors.get(request_type).get(response.json().get('request'))
)
)
# ------------------------------------------------------------------------------- #
def reportJob(self, jobID):
if not jobID:
raise reCaptchaBadJobID(
"2Captcha: Error bad job id to request reCaptcha."
)
def _checkRequest(response):
if response.ok and response.json().get('status') == 1:
return response
self.checkErrorStatus(response, 'res.php')
return None
response = polling.poll(
lambda: self.session.get(
'{}/res.php'.format(self.host),
params={
'key': self.api_key,
'action': 'reportbad',
'id': jobID,
'json': '1'
},
timeout=30
),
check_success=_checkRequest,
step=5,
timeout=180
)
if response:
return True
else:
raise reCaptchaReportError(
"2Captcha: Error - Failed to report bad reCaptcha solve."
)
# ------------------------------------------------------------------------------- #
def requestJob(self, jobID):
if not jobID:
raise reCaptchaBadJobID("2Captcha: Error bad job id to request reCaptcha.")
def _checkRequest(response):
if response.ok and response.json().get('status') == 1:
return response
self.checkErrorStatus(response, 'res.php')
return None
response = polling.poll(
lambda: self.session.get(
'{}/res.php'.format(self.host),
params={
'key': self.api_key,
'action': 'get',
'id': jobID,
'json': '1'
},
timeout=30
),
check_success=_checkRequest,
step=5,
timeout=180
)
if response:
return response.json().get('request')
else:
raise reCaptchaTimeout(
"2Captcha: Error failed to solve reCaptcha."
)
# ------------------------------------------------------------------------------- #
def requestSolve(self, captchaType, url, siteKey):
def _checkRequest(response):
if response.ok and response.json().get("status") == 1 and response.json().get('request'):
return response
self.checkErrorStatus(response, 'in.php')
return None
data = {
'key': self.api_key,
'pageurl': url,
'json': 1,
'soft_id': 5507698
}
data.update(
{
'method': 'userrcaptcha',
'googlekey': siteKey
} if captchaType == 'reCaptcha' else {
'method': 'hcaptcha',
'sitekey': siteKey
}
)
response = polling.poll(
lambda: self.session.post(
'{}/in.php'.format(self.host),
data=data,
allow_redirects=False,
timeout=30
),
check_success=_checkRequest,
step=5,
timeout=180
)
if response:
return response.json().get('request')
else:
raise reCaptchaBadJobID(
'2Captcha: Error no job id was returned.'
)
# ------------------------------------------------------------------------------- #
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
jobID = None
if not reCaptchaParams.get('api_key'):
raise reCaptchaParameter(
"2Captcha: Missing api_key parameter."
)
self.api_key = reCaptchaParams.get('api_key')
if reCaptchaParams.get('proxy'):
self.session.proxies = reCaptchaParams.get('proxies')
try:
jobID = self.requestSolve(captchaType, url, siteKey)
return self.requestJob(jobID)
except polling.TimeoutException:
try:
if jobID:
self.reportJob(jobID)
except polling.TimeoutException:
raise reCaptchaTimeout(
"2Captcha: reCaptcha solve took to long and also failed reporting the job the job id {}.".format(jobID)
)
raise reCaptchaTimeout(
"2Captcha: reCaptcha solve took to long to execute job id {}, aborting.".format(jobID)
)
# ------------------------------------------------------------------------------- #
captchaSolver()

@ -1,212 +0,0 @@
from __future__ import absolute_import
import re
import requests
try:
import polling
except ImportError:
raise ImportError(
"Please install the python module 'polling' via pip or download it from "
"https://github.com/justiniso/polling/"
)
from ..exceptions import (
reCaptchaServiceUnavailable,
reCaptchaAPIError,
reCaptchaTimeout,
reCaptchaParameter,
reCaptchaBadJobID
)
from . import reCaptcha
class captchaSolver(reCaptcha):
def __init__(self):
super(captchaSolver, self).__init__('9kw')
self.host = 'https://www.9kw.eu/index.cgi'
self.maxtimeout = 180
self.session = requests.Session()
# ------------------------------------------------------------------------------- #
@staticmethod
def checkErrorStatus(response):
if response.status_code in [500, 502]:
raise reCaptchaServiceUnavailable(
'9kw: Server Side Error {}'.format(response.status_code)
)
error_codes = {
1: 'No API Key available.',
2: 'No API key found.',
3: 'No active API key found.',
4: 'API Key has been disabled by the operator. ',
5: 'No user found.',
6: 'No data found.',
7: 'Found No ID.',
8: 'found No captcha.',
9: 'No image found.',
10: 'Image size not allowed.',
11: 'credit is not sufficient.',
12: 'what was done.',
13: 'No answer contain.',
14: 'Captcha already been answered.',
15: 'Captcha to quickly filed.',
16: 'JD check active.',
17: 'Unknown problem.',
18: 'Found No ID.',
19: 'Incorrect answer.',
20: 'Do not timely filed (Incorrect UserID).',
21: 'Link not allowed.',
22: 'Prohibited submit.',
23: 'Entering prohibited.',
24: 'Too little credit.',
25: 'No entry found.',
26: 'No Conditions accepted.',
27: 'No coupon code found in the database.',
28: 'Already unused voucher code.',
29: 'maxTimeout under 60 seconds.',
30: 'User not found.',
31: 'An account is not yet 24 hours in system.',
32: 'An account does not have the full rights.',
33: 'Plugin needed a update.',
34: 'No HTTPS allowed.',
35: 'No HTTP allowed.',
36: 'Source not allowed.',
37: 'Transfer denied.',
38: 'Incorrect answer without space',
39: 'Incorrect answer with space',
40: 'Incorrect answer with not only numbers',
41: 'Incorrect answer with not only A-Z, a-z',
42: 'Incorrect answer with not only 0-9, A-Z, a-z',
43: 'Incorrect answer with not only [0-9,- ]',
44: 'Incorrect answer with not only [0-9A-Za-z,- ]',
45: 'Incorrect answer with not only coordinates',
46: 'Incorrect answer with not only multiple coordinates',
47: 'Incorrect answer with not only data',
48: 'Incorrect answer with not only rotate number',
49: 'Incorrect answer with not only text',
50: 'Incorrect answer with not only text and too short',
51: 'Incorrect answer with not enough chars',
52: 'Incorrect answer with too many chars',
53: 'Incorrect answer without no or yes',
54: 'Assignment was not found.'
}
if response.text.startswith('{'):
if response.json().get('error'):
raise reCaptchaAPIError(error_codes.get(int(response.json().get('error'))))
else:
error_code = int(re.search(r'^00(?P<error_code>\d+)', response.text).groupdict().get('error_code', 0))
if error_code:
raise reCaptchaAPIError(error_codes.get(error_code))
# ------------------------------------------------------------------------------- #
def requestJob(self, jobID):
if not jobID:
raise reCaptchaBadJobID(
"9kw: Error bad job id to request reCaptcha against."
)
def _checkRequest(response):
if response.ok and response.json().get('answer') != 'NO DATA':
return response
self.checkErrorStatus(response)
return None
response = polling.poll(
lambda: self.session.get(
self.host,
params={
'apikey': self.api_key,
'action': 'usercaptchacorrectdata',
'id': jobID,
'info': 1,
'json': 1
}
),
check_success=_checkRequest,
step=10,
timeout=(self.maxtimeout + 10)
)
if response:
return response.json().get('answer')
else:
raise reCaptchaTimeout("9kw: Error failed to solve reCaptcha.")
# ------------------------------------------------------------------------------- #
def requestSolve(self, captchaType, url, siteKey):
def _checkRequest(response):
if response.ok and response.text.startswith('{') and response.json().get('captchaid'):
return response
self.checkErrorStatus(response)
return None
captchaMap = {
'reCaptcha': 'recaptchav2',
'hCaptcha': 'hcaptcha'
}
response = polling.poll(
lambda: self.session.post(
self.host,
data={
'apikey': self.api_key,
'action': 'usercaptchaupload',
'interactive': 1,
'file-upload-01': siteKey,
'oldsource': captchaMap[captchaType],
'pageurl': url,
'maxtimeout': self.maxtimeout,
'json': 1
},
allow_redirects=False
),
check_success=_checkRequest,
step=5,
timeout=(self.maxtimeout + 10)
)
if response:
return response.json().get('captchaid')
else:
raise reCaptchaBadJobID('9kw: Error no valid job id was returned.')
# ------------------------------------------------------------------------------- #
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
jobID = None
if not reCaptchaParams.get('api_key'):
raise reCaptchaParameter("9kw: Missing api_key parameter.")
self.api_key = reCaptchaParams.get('api_key')
if reCaptchaParams.get('maxtimeout'):
self.maxtimeout = reCaptchaParams.get('maxtimeout')
if reCaptchaParams.get('proxy'):
self.session.proxies = reCaptchaParams.get('proxies')
try:
jobID = self.requestSolve(captchaType, url, siteKey)
return self.requestJob(jobID)
except polling.TimeoutException:
raise reCaptchaTimeout(
"9kw: reCaptcha solve took to long to execute 'captchaid' {}, aborting.".format(jobID)
)
# ------------------------------------------------------------------------------- #
captchaSolver()

@ -1,46 +0,0 @@
import abc
import logging
import sys
if sys.version_info >= (3, 4):
ABC = abc.ABC # noqa
else:
ABC = abc.ABCMeta('ABC', (), {})
# ------------------------------------------------------------------------------- #
captchaSolvers = {}
# ------------------------------------------------------------------------------- #
class reCaptcha(ABC):
@abc.abstractmethod
def __init__(self, name):
captchaSolvers[name] = self
# ------------------------------------------------------------------------------- #
@classmethod
def dynamicImport(cls, name):
if name not in captchaSolvers:
try:
__import__('{}.{}'.format(cls.__module__, name))
if not isinstance(captchaSolvers.get(name), reCaptcha):
raise ImportError('The anti reCaptcha provider was not initialized.')
except ImportError:
logging.error("Unable to load {} anti reCaptcha provider".format(name))
raise
return captchaSolvers[name]
# ------------------------------------------------------------------------------- #
@abc.abstractmethod
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
pass
# ------------------------------------------------------------------------------- #
def solveCaptcha(self, captchaType, url, siteKey, reCaptchaParams):
return self.getCaptchaAnswer(captchaType, url, siteKey, reCaptchaParams)

@ -1,75 +0,0 @@
from __future__ import absolute_import
from ..exceptions import (
reCaptchaParameter,
reCaptchaTimeout,
reCaptchaAPIError
)
try:
from python_anticaptcha import (
AnticaptchaClient,
NoCaptchaTaskProxylessTask,
HCaptchaTaskProxyless,
AnticaptchaException
)
except ImportError:
raise ImportError(
"Please install/upgrade the python module 'python_anticaptcha' via "
"pip install python-anticaptcha or https://github.com/ad-m/python-anticaptcha/"
)
import sys
from . import reCaptcha
class captchaSolver(reCaptcha):
def __init__(self):
if sys.modules['python_anticaptcha'].__version__ < '0.6':
raise ImportError(
"Please upgrade the python module 'python_anticaptcha' via "
"pip install -U python-anticaptcha or https://github.com/ad-m/python-anticaptcha/"
)
super(captchaSolver, self).__init__('anticaptcha')
# ------------------------------------------------------------------------------- #
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
if not reCaptchaParams.get('api_key'):
raise reCaptchaParameter("anticaptcha: Missing api_key parameter.")
client = AnticaptchaClient(reCaptchaParams.get('api_key'))
if reCaptchaParams.get('proxy'):
client.session.proxies = reCaptchaParams.get('proxies')
captchaMap = {
'reCaptcha': NoCaptchaTaskProxylessTask,
'hCaptcha': HCaptchaTaskProxyless
}
task = captchaMap[captchaType](url, siteKey)
if not hasattr(client, 'createTaskSmee'):
raise NotImplementedError(
"Please upgrade 'python_anticaptcha' via pip or download it from "
"https://github.com/ad-m/python-anticaptcha/tree/hcaptcha"
)
job = client.createTaskSmee(task, timeout=180)
try:
job.join(maximum_time=180)
except (AnticaptchaException) as e:
raise reCaptchaTimeout('{}'.format(getattr(e, 'message', e)))
if 'solution' in job._last_result:
return job.get_solution_response()
else:
raise reCaptchaAPIError('Job did not return `solution` key in payload.')
# ------------------------------------------------------------------------------- #
captchaSolver()

@ -1,233 +0,0 @@
from __future__ import absolute_import
import json
import requests
try:
import polling
except ImportError:
raise ImportError(
"Please install the python module 'polling' via pip or download it from "
"https://github.com/justiniso/polling/"
)
from ..exceptions import (
reCaptchaException,
reCaptchaServiceUnavailable,
reCaptchaAccountError,
reCaptchaTimeout,
reCaptchaParameter,
reCaptchaBadJobID,
reCaptchaReportError
)
from . import reCaptcha
class captchaSolver(reCaptcha):
def __init__(self):
super(captchaSolver, self).__init__('deathbycaptcha')
self.host = 'http://api.dbcapi.me/api'
self.session = requests.Session()
# ------------------------------------------------------------------------------- #
@staticmethod
def checkErrorStatus(response):
errors = dict(
[
(400, "DeathByCaptcha: 400 Bad Request"),
(403, "DeathByCaptcha: 403 Forbidden - Invalid credentails or insufficient credits."),
# (500, "DeathByCaptcha: 500 Internal Server Error."),
(503, "DeathByCaptcha: 503 Service Temporarily Unavailable.")
]
)
if response.status_code in errors:
raise reCaptchaServiceUnavailable(errors.get(response.status_code))
# ------------------------------------------------------------------------------- #
def login(self, username, password):
self.username = username
self.password = password
def _checkRequest(response):
if response.ok:
if response.json().get('is_banned'):
raise reCaptchaAccountError('DeathByCaptcha: Your account is banned.')
if response.json().get('balanace') == 0:
raise reCaptchaAccountError('DeathByCaptcha: insufficient credits.')
return response
self.checkErrorStatus(response)
return None
response = polling.poll(
lambda: self.session.post(
'{}/user'.format(self.host),
headers={'Accept': 'application/json'},
data={
'username': self.username,
'password': self.password
}
),
check_success=_checkRequest,
step=10,
timeout=120
)
self.debugRequest(response)
# ------------------------------------------------------------------------------- #
def reportJob(self, jobID):
if not jobID:
raise reCaptchaBadJobID(
"DeathByCaptcha: Error bad job id to report failed reCaptcha."
)
def _checkRequest(response):
if response.status_code == 200:
return response
self.checkErrorStatus(response)
return None
response = polling.poll(
lambda: self.session.post(
'{}/captcha/{}/report'.format(self.host, jobID),
headers={'Accept': 'application/json'},
data={
'username': self.username,
'password': self.password
}
),
check_success=_checkRequest,
step=10,
timeout=180
)
if response:
return True
else:
raise reCaptchaReportError(
"DeathByCaptcha: Error report failed reCaptcha."
)
# ------------------------------------------------------------------------------- #
def requestJob(self, jobID):
if not jobID:
raise reCaptchaBadJobID(
"DeathByCaptcha: Error bad job id to request reCaptcha."
)
def _checkRequest(response):
if response.ok and response.json().get('text'):
return response
self.checkErrorStatus(response)
return None
response = polling.poll(
lambda: self.session.get(
'{}/captcha/{}'.format(self.host, jobID),
headers={'Accept': 'application/json'}
),
check_success=_checkRequest,
step=10,
timeout=180
)
if response:
return response.json().get('text')
else:
raise reCaptchaTimeout(
"DeathByCaptcha: Error failed to solve reCaptcha."
)
# ------------------------------------------------------------------------------- #
def requestSolve(self, url, siteKey):
def _checkRequest(response):
if response.ok and response.json().get("is_correct") and response.json().get('captcha'):
return response
self.checkErrorStatus(response)
return None
response = polling.poll(
lambda: self.session.post(
'{}/captcha'.format(self.host),
headers={'Accept': 'application/json'},
data={
'username': self.username,
'password': self.password,
'type': '4',
'token_params': json.dumps({
'googlekey': siteKey,
'pageurl': url
})
},
allow_redirects=False
),
check_success=_checkRequest,
step=10,
timeout=180
)
if response:
return response.json().get('captcha')
else:
raise reCaptchaBadJobID(
'DeathByCaptcha: Error no job id was returned.'
)
# ------------------------------------------------------------------------------- #
def getCaptchaAnswer(self, captchaType, url, siteKey, reCaptchaParams):
jobID = None
for param in ['username', 'password']:
if not reCaptchaParams.get(param):
raise reCaptchaParameter(
"DeathByCaptcha: Missing '{}' parameter.".format(param)
)
setattr(self, param, reCaptchaParams.get(param))
if captchaType == 'hCaptcha':
raise reCaptchaException(
'Provider does not support hCaptcha.'
)
if reCaptchaParams.get('proxy'):
self.session.proxies = reCaptchaParams.get('proxies')
try:
jobID = self.requestSolve(url, siteKey)
return self.requestJob(jobID)
except polling.TimeoutException:
try:
if jobID:
self.reportJob(jobID)
except polling.TimeoutException:
raise reCaptchaTimeout(
"DeathByCaptcha: reCaptcha solve took to long and also failed reporting the job id {}.".format(jobID)
)
raise reCaptchaTimeout(
"DeathByCaptcha: reCaptcha solve took to long to execute job id {}, aborting.".format(jobID)
)
# ------------------------------------------------------------------------------- #
captchaSolver()

@ -1,117 +1,38 @@
import json
import os
import json
import random
import re
import sys
import ssl
import logging
from collections import OrderedDict
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
class User_Agent():
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
def __init__(self, *args, **kwargs):
self.headers = None
self.cipherSuite = []
self.loadUserAgent(*args, **kwargs)
# ------------------------------------------------------------------------------- #
def loadHeaders(self, user_agents, user_agent_version):
if user_agents.get(self.browser).get('releases').get(user_agent_version).get('headers'):
self.headers = user_agents.get(self.browser).get('releases').get(user_agent_version).get('headers')
else:
self.headers = user_agents.get(self.browser).get('default_headers')
# ------------------------------------------------------------------------------- #
def filterAgents(self, releases):
filtered = {}
for release in releases:
if self.mobile and releases[release]['User-Agent']['mobile']:
filtered[release] = filtered.get(release, []) + releases[release]['User-Agent']['mobile']
if self.desktop and releases[release]['User-Agent']['desktop']:
filtered[release] = filtered.get(release, []) + releases[release]['User-Agent']['desktop']
return filtered
# ------------------------------------------------------------------------------- #
def tryMatchCustom(self, user_agents):
for browser in user_agents:
for release in user_agents[browser]['releases']:
for platform in ['mobile', 'desktop']:
if re.search(re.escape(self.custom), ' '.join(user_agents[browser]['releases'][release]['User-Agent'][platform])):
self.browser = browser
self.loadHeaders(user_agents, release)
self.headers['User-Agent'] = self.custom
self.cipherSuite = user_agents[self.browser].get('cipherSuite', [])
return True
return False
# ------------------------------------------------------------------------------- #
##########################################################################################################################################################
def loadUserAgent(self, *args, **kwargs):
self.browser = kwargs.pop('browser', None)
if isinstance(self.browser, dict):
self.custom = self.browser.get('custom', None)
self.desktop = self.browser.get('desktop', True)
self.mobile = self.browser.get('mobile', True)
self.browser = self.browser.get('browser', None)
else:
self.custom = kwargs.pop('custom', None)
self.desktop = kwargs.pop('desktop', True)
self.mobile = kwargs.pop('mobile', True)
if not self.desktop and not self.mobile:
sys.tracebacklimit = 0
raise RuntimeError("Sorry you can't have mobile and desktop disabled at the same time.")
with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as fp:
user_agents = json.load(
fp,
object_pairs_hook=OrderedDict
)
if self.custom:
if not self.tryMatchCustom(user_agents):
self.cipherSuite = [
ssl._DEFAULT_CIPHERS,
'!AES128-SHA',
'!ECDHE-RSA-AES256-SHA',
]
self.headers = OrderedDict([
('User-Agent', self.custom),
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'),
('Accept-Language', 'en-US,en;q=0.9'),
('Accept-Encoding', 'gzip, deflate, br')
])
else:
if self.browser and not user_agents.get(self.browser):
sys.tracebacklimit = 0
raise RuntimeError('Sorry "{}" browser User-Agent was not found.'.format(self.browser))
if not self.browser:
self.browser = random.SystemRandom().choice(list(user_agents))
self.cipherSuite = user_agents.get(self.browser).get('cipherSuite', [])
browser = kwargs.pop('browser', 'chrome')
filteredAgents = self.filterAgents(user_agents.get(self.browser).get('releases'))
with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as file:
user_agents = json.load(file, object_pairs_hook=OrderedDict)
user_agent_version = random.SystemRandom().choice(list(filteredAgents))
if not user_agents.get(browser):
logging.error('Sorry "{}" browser User-Agent was not found.'.format(browser))
raise
self.loadHeaders(user_agents, user_agent_version)
user_agent = random.choice(user_agents.get(browser))
self.headers['User-Agent'] = random.SystemRandom().choice(filteredAgents[user_agent_version])
self.headers = user_agent.get('headers')
self.headers['User-Agent'] = random.choice(user_agent.get('User-Agent'))
if not kwargs.get('allow_brotli', False) and 'br' in self.headers['Accept-Encoding']:
self.headers['Accept-Encoding'] = ','.join([
encoding for encoding in self.headers['Accept-Encoding'].split(',') if encoding.strip() != 'br'
]).strip()
if not kwargs.get('allow_brotli', False):
if 'br' in self.headers['Accept-Encoding']:
self.headers['Accept-Encoding'] = ','.join([encoding for encoding in self.headers['Accept-Encoding'].split(',') if encoding.strip() != 'br']).strip()

File diff suppressed because it is too large Load Diff

@ -23,9 +23,9 @@ def get_keywords():
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
git_refnames = " (HEAD -> master)"
git_full = "997749de8aac74ec19137a2e641b97ef1bba81ea"
git_date = "2020-08-04 20:06:18 -0700"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords

@ -0,0 +1,77 @@
# -*- coding: utf-8 -*-
import logging
import os
import platform
import subprocess
from .constants import SUBSYNC_RESOURCES_ENV_MAGIC
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# ref: https://github.com/pyinstaller/pyinstaller/wiki/Recipe-subprocess
# Create a set of arguments which make a ``subprocess.Popen`` (and
# variants) call work with or without Pyinstaller, ``--noconsole`` or
# not, on Windows and Linux. Typical use::
#
# subprocess.call(['program_to_run', 'arg_1'], **subprocess_args())
#
# When calling ``check_output``::
#
# subprocess.check_output(['program_to_run', 'arg_1'],
# **subprocess_args(False))
def subprocess_args(include_stdout=True):
# The following is true only on Windows.
if hasattr(subprocess, 'STARTUPINFO'):
# On Windows, subprocess calls will pop up a command window by default
# when run from Pyinstaller with the ``--noconsole`` option. Avoid this
# distraction.
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Windows doesn't search the path by default. Pass it an environment so
# it will.
env = os.environ
else:
si = None
env = None
# ``subprocess.check_output`` doesn't allow specifying ``stdout``::
#
# Traceback (most recent call last):
# File "test_subprocess.py", line 58, in <module>
# **subprocess_args(stdout=None))
# File "C:\Python27\lib\subprocess.py", line 567, in check_output
# raise ValueError('stdout argument not allowed, it will be overridden.')
# ValueError: stdout argument not allowed, it will be overridden.
#
# So, add it only if it's needed.
if include_stdout:
ret = {'stdout': subprocess.PIPE}
else:
ret = {}
# On Windows, running this from the binary produced by Pyinstaller
# with the ``--noconsole`` option requires redirecting everything
# (stdin, stdout, stderr) to avoid an OSError exception
# "[Error 6] the handle is invalid."
ret.update({'stdin': subprocess.PIPE,
'stderr': subprocess.PIPE,
'startupinfo': si,
'env': env})
return ret
def ffmpeg_bin_path(bin_name, gui_mode, ffmpeg_resources_path=None):
if platform.system() == 'Windows':
bin_name = '{}.exe'.format(bin_name)
if ffmpeg_resources_path is not None:
return os.path.join(ffmpeg_resources_path, bin_name)
try:
resource_path = os.environ[SUBSYNC_RESOURCES_ENV_MAGIC]
if len(resource_path) > 0:
return os.path.join(resource_path, 'ffmpeg-bin', bin_name)
except KeyError:
if gui_mode:
logger.info("Couldn't find resource path; falling back to searching system path")
return bin_name

@ -5,12 +5,14 @@ from datetime import datetime
import logging
import os
import shutil
import subprocess
import sys
import numpy as np
from .aligners import FFTAligner, MaxScoreAligner, FailedToFindAlignmentException
from .constants import *
from .ffmpeg_utils import ffmpeg_bin_path
from .sklearn_shim import Pipeline
from .speech_transformers import (
VideoSpeechTransformer,
@ -19,7 +21,7 @@ from .speech_transformers import (
)
from .subtitle_parser import make_subtitle_parser
from .subtitle_transformers import SubtitleMerger, SubtitleShifter
from .version import __version__
from .version import get_version
logger = logging.getLogger(__name__)
@ -30,42 +32,94 @@ def override(args, **kwargs):
return args_dict
def run(args):
result = {'retval': 0,
'offset_seconds': None,
'framerate_scale_factor': None,
'sync_was_successful': None}
if args.vlc_mode:
logger.setLevel(logging.CRITICAL)
if args.make_test_case and not args.gui_mode: # this validation not necessary for gui mode
if args.srtin is None or args.srtout is None:
logger.error('need to specify input and output srt files for test cases')
result['retval'] = 1
return result
if args.overwrite_input:
if args.srtin is None:
logger.error('need to specify input srt if --overwrite-input is specified since we cannot overwrite stdin')
result['retval'] = 1
return result
if args.srtout is not None:
logger.error('overwrite input set but output file specified; refusing to run in case this was not intended')
result['retval'] = 1
return result
args.srtout = args.srtin
if args.gui_mode and args.srtout is None:
args.srtout = '{}.synced.srt'.format(os.path.splitext(args.srtin)[0])
ref_format = args.reference[-3:]
if args.merge_with_reference and ref_format not in SUBTITLE_EXTENSIONS:
logger.error('merging synced output with reference only valid '
'when reference composed of subtitles')
return 1
if args.make_test_case:
handler = logging.FileHandler('ffsubsync.log')
logger.addHandler(handler)
def _ref_format(ref_fname):
return ref_fname[-3:]
def make_test_case(args, npy_savename, sync_was_successful):
if npy_savename is None:
raise ValueError('need non-null npy_savename')
tar_dir = '{}.{}'.format(
args.reference,
datetime.now().strftime('%Y-%m-%d-%H:%M:%S')
)
logger.info('creating test archive {}.tar.gz...'.format(tar_dir))
os.mkdir(tar_dir)
try:
shutil.move('ffsubsync.log', tar_dir)
shutil.copy(args.srtin, tar_dir)
if sync_was_successful:
shutil.move(args.srtout, tar_dir)
if _ref_format(args.reference) in SUBTITLE_EXTENSIONS:
shutil.copy(args.reference, tar_dir)
elif args.serialize_speech or args.reference == npy_savename:
shutil.copy(npy_savename, tar_dir)
else:
shutil.move(npy_savename, tar_dir)
supported_formats = set(list(zip(*shutil.get_archive_formats()))[0])
preferred_formats = ['gztar', 'bztar', 'xztar', 'zip', 'tar']
for archive_format in preferred_formats:
if archive_format in supported_formats:
shutil.make_archive(tar_dir, 'gztar', os.curdir, tar_dir)
break
else:
logger.error('failed to create test archive; no formats supported '
'(this should not happen)')
return 1
logger.info('...done')
finally:
shutil.rmtree(tar_dir)
return 0
def try_sync(args, reference_pipe, srt_pipes, result):
sync_was_successful = True
try:
logger.info('extracting speech segments from subtitles file %s...', args.srtin)
for srt_pipe in srt_pipes:
srt_pipe.fit(args.srtin)
logger.info('...done')
logger.info('computing alignments...')
offset_samples, best_srt_pipe = MaxScoreAligner(
FFTAligner, SAMPLE_RATE, args.max_offset_seconds
).fit_transform(
reference_pipe.transform(args.reference),
srt_pipes,
)
logger.info('...done')
offset_seconds = offset_samples / float(SAMPLE_RATE)
scale_step = best_srt_pipe.named_steps['scale']
logger.info('offset seconds: %.3f', offset_seconds)
logger.info('framerate scale factor: %.3f', scale_step.scale_factor)
output_steps = [('shift', SubtitleShifter(offset_seconds))]
if args.merge_with_reference:
output_steps.append(
('merge',
SubtitleMerger(reference_pipe.named_steps['parse'].subs_))
)
output_pipe = Pipeline(output_steps)
out_subs = output_pipe.fit_transform(scale_step.subs_)
if args.output_encoding != 'same':
out_subs = out_subs.set_encoding(args.output_encoding)
logger.info('writing output to {}'.format(args.srtout or 'stdout'))
out_subs.write_file(args.srtout)
except FailedToFindAlignmentException as e:
sync_was_successful = False
logger.error(e)
else:
result['offset_seconds'] = offset_seconds
result['framerate_scale_factor'] = scale_step.scale_factor
finally:
result['sync_was_successful'] = sync_was_successful
return sync_was_successful
def make_reference_pipe(args):
ref_format = _ref_format(args.reference)
if ref_format in SUBTITLE_EXTENSIONS:
if args.vad is not None:
logger.warning('Vad specified, but reference was not a movie')
reference_pipe = make_subtitle_speech_pipeline(
return make_subtitle_speech_pipeline(
fmt=ref_format,
**override(
args,
@ -75,7 +129,7 @@ def run(args):
elif ref_format in ('npy', 'npz'):
if args.vad is not None:
logger.warning('Vad specified, but reference was not a movie')
reference_pipe = Pipeline([
return Pipeline([
('deserialize', DeserializeSpeechTransformer())
])
else:
@ -85,7 +139,7 @@ def run(args):
ref_stream = args.reference_stream
if ref_stream is not None and not ref_stream.startswith('0:'):
ref_stream = '0:' + ref_stream
reference_pipe = Pipeline([
return Pipeline([
('speech_extract', VideoSpeechTransformer(vad=vad,
sample_rate=SAMPLE_RATE,
frame_rate=args.frame_rate,
@ -95,12 +149,128 @@ def run(args):
vlc_mode=args.vlc_mode,
gui_mode=args.gui_mode))
])
def make_srt_pipes(args):
if args.no_fix_framerate:
framerate_ratios = [1.]
else:
framerate_ratios = np.concatenate([
[1.], np.array(FRAMERATE_RATIOS), 1./np.array(FRAMERATE_RATIOS)
])
parser = make_subtitle_parser(fmt=os.path.splitext(args.srtin)[-1][1:], caching=True, **args.__dict__)
srt_pipes = [
make_subtitle_speech_pipeline(
**override(args, scale_factor=scale_factor, parser=parser)
)
for scale_factor in framerate_ratios
]
return srt_pipes
def extract_subtitles_from_reference(args):
stream = args.extract_subs_from_stream
if not stream.startswith('0:s:'):
stream = '0:s:{}'.format(stream)
elif not stream.startswith('0:') and stream.startswith('s:'):
stream = '0:{}'.format(stream)
if not stream.startswith('0:s:'):
logger.error('invalid stream for subtitle extraction: %s', args.extract_subs_from_stream)
ffmpeg_args = [ffmpeg_bin_path('ffmpeg', args.gui_mode, ffmpeg_resources_path=args.ffmpeg_path)]
ffmpeg_args.extend([
'-y',
'-nostdin',
'-loglevel', 'fatal',
'-i', args.reference,
'-map', '{}'.format(stream),
'-f', 'srt',
])
if args.srtout is None:
ffmpeg_args.append('-')
else:
ffmpeg_args.append(args.srtout)
logger.info('attempting to extract subtitles to {} ...'.format('stdout' if args.srtout is None else args.srtout))
retcode = subprocess.call(ffmpeg_args)
if retcode == 0:
logger.info('...done')
else:
logger.error('ffmpeg unable to extract subtitles from reference; return code %d', retcode)
return retcode
def validate_args(args):
if args.vlc_mode:
logger.setLevel(logging.CRITICAL)
if args.make_test_case and not args.gui_mode: # this validation not necessary for gui mode
if args.srtin is None or args.srtout is None:
raise ValueError('need to specify input and output srt files for test cases')
if args.overwrite_input:
if args.extract_subs_from_stream is not None:
raise ValueError('input overwriting not allowed for extracting subtitles from reference')
if args.srtin is None:
raise ValueError(
'need to specify input srt if --overwrite-input is specified since we cannot overwrite stdin'
)
if args.srtout is not None:
raise ValueError(
'overwrite input set but output file specified; refusing to run in case this was not intended'
)
if args.extract_subs_from_stream is not None:
if args.make_test_case:
raise ValueError('test case is for sync and not subtitle extraction')
if args.srtin is not None:
raise ValueError('stream specified for reference subtitle extraction; -i flag for sync input not allowed')
def validate_file_permissions(args):
if not os.access(args.reference, os.R_OK):
raise ValueError('unable to read reference %s (try checking permissions)' % args.reference)
if not os.access(args.srtin, os.R_OK):
raise ValueError('unable to read input subtitles %s (try checking permissions)' % args.srtin)
if os.path.exists(args.srtout) and not os.access(args.srtout, os.W_OK):
raise ValueError('unable to write output subtitles %s (try checking permissions)' % args.srtout)
if args.make_test_case or args.serialize_speech:
npy_savename = os.path.splitext(args.reference)[0] + '.npz'
if os.path.exists(npy_savename) and not os.access(npy_savename, os.W_OK):
raise ValueError('unable to write test case file archive %s (try checking permissions)' % npy_savename)
def run(args):
result = {
'retval': 0,
'offset_seconds': None,
'framerate_scale_factor': None,
'sync_was_successful': None
}
try:
validate_args(args)
except ValueError as e:
logger.error(e)
result['retval'] = 1
return result
if args.overwrite_input:
args.srtout = args.srtin
if args.gui_mode and args.srtout is None:
args.srtout = '{}.synced.srt'.format(os.path.splitext(args.srtin)[0])
try:
validate_file_permissions(args)
except ValueError as e:
logger.error(e)
result['retval'] = 1
return result
ref_format = _ref_format(args.reference)
if args.merge_with_reference and ref_format not in SUBTITLE_EXTENSIONS:
logger.error('merging synced output with reference only valid '
'when reference composed of subtitles')
result['retval'] = 1
return result
if args.make_test_case:
handler = logging.FileHandler('ffsubsync.log')
logger.addHandler(handler)
if args.extract_subs_from_stream is not None:
result['retval'] = extract_subtitles_from_reference(args)
return result
reference_pipe = make_reference_pipe(args)
logger.info("extracting speech segments from reference '%s'...", args.reference)
reference_pipe.fit(args.reference)
logger.info('...done')
@ -113,83 +283,10 @@ def run(args):
if args.srtin is None:
logger.info('unsynchronized subtitle file not specified; skipping synchronization')
return result
parser = make_subtitle_parser(fmt=os.path.splitext(args.srtin)[-1][1:], caching=True, **args.__dict__)
logger.info("extracting speech segments from subtitles '%s'...", args.srtin)
srt_pipes = [
make_subtitle_speech_pipeline(
**override(args, scale_factor=scale_factor, parser=parser)
).fit(args.srtin)
for scale_factor in framerate_ratios
]
logger.info('...done')
logger.info('computing alignments...')
max_offset_seconds = args.max_offset_seconds
try:
sync_was_successful = True
offset_samples, best_srt_pipe = MaxScoreAligner(
FFTAligner, SAMPLE_RATE, max_offset_seconds
).fit_transform(
reference_pipe.transform(args.reference),
srt_pipes,
)
logger.info('...done')
offset_seconds = offset_samples / float(SAMPLE_RATE)
scale_step = best_srt_pipe.named_steps['scale']
logger.info('offset seconds: %.3f', offset_seconds)
logger.info('framerate scale factor: %.3f', scale_step.scale_factor)
output_steps = [('shift', SubtitleShifter(offset_seconds))]
if args.merge_with_reference:
output_steps.append(
('merge',
SubtitleMerger(reference_pipe.named_steps['parse'].subs_))
)
output_pipe = Pipeline(output_steps)
out_subs = output_pipe.fit_transform(scale_step.subs_)
if args.output_encoding != 'same':
out_subs = out_subs.set_encoding(args.output_encoding)
logger.info('writing output to {}'.format(args.srtout or 'stdout'))
out_subs.write_file(args.srtout)
except FailedToFindAlignmentException as e:
sync_was_successful = False
logger.error(e)
else:
result['offset_seconds'] = offset_seconds
result['framerate_scale_factor'] = scale_step.scale_factor
finally:
result['sync_was_successful'] = sync_was_successful
srt_pipes = make_srt_pipes(args)
sync_was_successful = try_sync(args, reference_pipe, srt_pipes, result)
if args.make_test_case:
if npy_savename is None:
raise ValueError('need non-null npy_savename')
tar_dir = '{}.{}'.format(
args.reference,
datetime.now().strftime('%Y-%m-%d-%H:%M:%S')
)
logger.info('creating test archive {}.tar.gz...'.format(tar_dir))
os.mkdir(tar_dir)
try:
shutil.move('ffsubsync.log', tar_dir)
shutil.copy(args.srtin, tar_dir)
if sync_was_successful:
shutil.move(args.srtout, tar_dir)
if ref_format in SUBTITLE_EXTENSIONS:
shutil.copy(args.reference, tar_dir)
elif args.serialize_speech or args.reference == npy_savename:
shutil.copy(npy_savename, tar_dir)
else:
shutil.move(npy_savename, tar_dir)
supported_formats = set(list(zip(*shutil.get_archive_formats()))[0])
preferred_formats = ['gztar', 'bztar', 'xztar', 'zip', 'tar']
for archive_format in preferred_formats:
if archive_format in supported_formats:
shutil.make_archive(tar_dir, 'gztar', os.curdir, tar_dir)
break
else:
logger.error('failed to create test archive; no formats supported '
'(this should not happen)')
result['retval'] = 1
logger.info('...done')
finally:
shutil.rmtree(tar_dir)
result['retval'] += make_test_case(args, npy_savename, sync_was_successful)
return result
@ -206,11 +303,18 @@ def add_main_args_for_cli(parser):
help='If specified, serialize reference speech to a numpy array, '
'and create an archive with input/output subtitles '
'and serialized speech.')
parser.add_argument(
'--reference-stream', '--refstream', '--reference-track', '--reftrack',
default=None,
help='Which stream/track in the video file to use as reference, '
'formatted according to ffmpeg conventions. For example, s:0 '
'uses the first subtitle track; a:3 would use the third audio track.'
)
def add_cli_only_args(parser):
parser.add_argument('-v', '--version', action='version',
version='{package} {version}'.format(package=__package__, version=__version__))
# parser.add_argument('-v', '--version', action='version',
# version='{package} {version}'.format(package=__package__, version=get_version()))
parser.add_argument('--overwrite-input', action='store_true',
help='If specified, will overwrite the input srt instead of writing the output to a new file.')
parser.add_argument('--encoding', default=DEFAULT_ENCODING,
@ -243,13 +347,9 @@ def add_cli_only_args(parser):
'mismatch between reference and subtitles.')
parser.add_argument('--serialize-speech', action='store_true',
help='If specified, serialize reference speech to a numpy array.')
parser.add_argument(
'--reference-stream', '--refstream', '--reference-track', '--reftrack',
default=None,
help='Which stream/track in the video file to use as reference, '
'formatted according to ffmpeg conventions. For example, s:0 '
'uses the first subtitle track; a:3 would use the third audio track.'
)
parser.add_argument('--extract-subs-from-stream', default=None,
help='If specified, do not attempt sync; instead, just extract subtitles'
' from the specified stream using the reference.')
parser.add_argument(
'--ffmpeg-path', '--ffmpegpath', default=None,
help='Where to look for ffmpeg and ffprobe. Uses the system PATH by default.'
@ -268,7 +368,7 @@ def make_parser():
def main():
parser = make_parser()
args = parser.parse_args()
return run(args)
return run(args)['retval']
if __name__ == "__main__":

@ -17,8 +17,11 @@ from .constants import (
COPYRIGHT_YEAR,
SUBSYNC_RESOURCES_ENV_MAGIC,
)
# set the env magic so that we look for resources in the right place
if SUBSYNC_RESOURCES_ENV_MAGIC not in os.environ:
os.environ[SUBSYNC_RESOURCES_ENV_MAGIC] = getattr(sys, '_MEIPASS', '')
from .ffsubsync import run, add_cli_only_args
from .version import __version__, update_available
from .version import get_version, update_available
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@ -33,7 +36,7 @@ _menu = [
'menuTitle': 'About',
'name': PROJECT_NAME,
'description': LONG_DESCRIPTION,
'version': __version__,
'version': get_version(),
'copyright': COPYRIGHT_YEAR,
'website': WEBSITE,
'developer': DEV_WEBSITE,
@ -49,11 +52,6 @@ _menu = [
]
# set the env magic so that we look for resources in the right place
if SUBSYNC_RESOURCES_ENV_MAGIC not in os.environ:
os.environ[SUBSYNC_RESOURCES_ENV_MAGIC] = getattr(sys, '_MEIPASS', '')
@Gooey(
program_name=PROJECT_NAME,
image_dir=os.path.join(os.environ[SUBSYNC_RESOURCES_ENV_MAGIC], 'img'),

@ -2,8 +2,6 @@
from contextlib import contextmanager
import logging
import io
import os
import platform
import subprocess
import sys
from datetime import timedelta
@ -15,6 +13,7 @@ from .sklearn_shim import Pipeline
import tqdm
from .constants import *
from .ffmpeg_utils import ffmpeg_bin_path, subprocess_args
from .subtitle_parser import make_subtitle_parser
from .subtitle_transformers import SubtitleScaler
@ -22,73 +21,6 @@ logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# ref: https://github.com/pyinstaller/pyinstaller/wiki/Recipe-subprocess
# Create a set of arguments which make a ``subprocess.Popen`` (and
# variants) call work with or without Pyinstaller, ``--noconsole`` or
# not, on Windows and Linux. Typical use::
#
# subprocess.call(['program_to_run', 'arg_1'], **subprocess_args())
#
# When calling ``check_output``::
#
# subprocess.check_output(['program_to_run', 'arg_1'],
# **subprocess_args(False))
def _subprocess_args(include_stdout=True):
# The following is true only on Windows.
if hasattr(subprocess, 'STARTUPINFO'):
# On Windows, subprocess calls will pop up a command window by default
# when run from Pyinstaller with the ``--noconsole`` option. Avoid this
# distraction.
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Windows doesn't search the path by default. Pass it an environment so
# it will.
env = os.environ
else:
si = None
env = None
# ``subprocess.check_output`` doesn't allow specifying ``stdout``::
#
# Traceback (most recent call last):
# File "test_subprocess.py", line 58, in <module>
# **subprocess_args(stdout=None))
# File "C:\Python27\lib\subprocess.py", line 567, in check_output
# raise ValueError('stdout argument not allowed, it will be overridden.')
# ValueError: stdout argument not allowed, it will be overridden.
#
# So, add it only if it's needed.
if include_stdout:
ret = {'stdout': subprocess.PIPE}
else:
ret = {}
# On Windows, running this from the binary produced by Pyinstaller
# with the ``--noconsole`` option requires redirecting everything
# (stdin, stdout, stderr) to avoid an OSError exception
# "[Error 6] the handle is invalid."
ret.update({'stdin': subprocess.PIPE,
'stderr': subprocess.PIPE,
'startupinfo': si,
'env': env})
return ret
def _ffmpeg_bin_path(bin_name, gui_mode, ffmpeg_resources_path=None):
if platform.system() == 'Windows':
bin_name = '{}.exe'.format(bin_name)
if ffmpeg_resources_path is not None:
return os.path.join(ffmpeg_resources_path, bin_name)
try:
resource_path = os.environ[SUBSYNC_RESOURCES_ENV_MAGIC]
if len(resource_path) > 0:
return os.path.join(resource_path, 'ffmpeg-bin', bin_name)
except KeyError as e:
if gui_mode:
logger.info("Couldn't find resource path; falling back to searching system path")
return bin_name
def make_subtitle_speech_pipeline(
fmt='srt',
encoding=DEFAULT_ENCODING,
@ -212,7 +144,7 @@ class VideoSpeechTransformer(TransformerMixin):
else:
streams_to_try = [self.ref_stream]
for stream in streams_to_try:
ffmpeg_args = [_ffmpeg_bin_path('ffmpeg', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)]
ffmpeg_args = [ffmpeg_bin_path('ffmpeg', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)]
ffmpeg_args.extend([
'-loglevel', 'fatal',
'-nostdin',
@ -221,7 +153,7 @@ class VideoSpeechTransformer(TransformerMixin):
'-f', 'srt',
'-'
])
process = subprocess.Popen(ffmpeg_args, **_subprocess_args(include_stdout=True))
process = subprocess.Popen(ffmpeg_args, **subprocess_args(include_stdout=True))
output = io.BytesIO(process.communicate()[0])
if process.returncode != 0:
break
@ -245,7 +177,7 @@ class VideoSpeechTransformer(TransformerMixin):
logger.info(e)
try:
total_duration = float(ffmpeg.probe(
fname, cmd=_ffmpeg_bin_path('ffprobe', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)
fname, cmd=ffmpeg_bin_path('ffprobe', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)
)['format']['duration']) - self.start_seconds
except Exception as e:
logger.warning(e)
@ -257,7 +189,7 @@ class VideoSpeechTransformer(TransformerMixin):
else:
raise ValueError('unknown vad: %s' % self.vad)
media_bstring = []
ffmpeg_args = [_ffmpeg_bin_path('ffmpeg', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)]
ffmpeg_args = [ffmpeg_bin_path('ffmpeg', self.gui_mode, ffmpeg_resources_path=self.ffmpeg_path)]
if self.start_seconds > 0:
ffmpeg_args.extend([
'-ss', str(timedelta(seconds=self.start_seconds)),
@ -276,7 +208,7 @@ class VideoSpeechTransformer(TransformerMixin):
'-ar', str(self.frame_rate),
'-'
])
process = subprocess.Popen(ffmpeg_args, **_subprocess_args(include_stdout=True))
process = subprocess.Popen(ffmpeg_args, **subprocess_args(include_stdout=True))
bytes_per_frame = 2
frames_per_window = bytes_per_frame * self.frame_rate // self.sample_rate
windows_per_buffer = 10000

@ -3,9 +3,9 @@ from datetime import timedelta
import logging
try:
import cchardet
import cchardet as chardet
except ImportError:
import chardet as cchardet
import chardet
import pysubs2
from .sklearn_shim import TransformerMixin
import srt
@ -82,7 +82,7 @@ class GenericSubtitleParser(SubsMixin, TransformerMixin):
with open_file(fname, 'rb') as f:
subs = f.read()
if self.encoding == 'infer':
encodings_to_try = (cchardet.detect(subs)['encoding'],)
encodings_to_try = (chardet.detect(subs)['encoding'],)
self.detected_encoding_ = encodings_to_try[0]
logger.info('detected encoding: %s' % self.detected_encoding_)
exc = None

@ -1,9 +1,19 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import os
from .constants import SUBSYNC_RESOURCES_ENV_MAGIC
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
def get_version():
if 'unknown' in __version__.lower():
with open(os.path.join(os.environ[SUBSYNC_RESOURCES_ENV_MAGIC], '__version__')) as f:
return f.read().strip()
else:
return __version__
def make_version_tuple(vstr=None):
if vstr is None:
vstr = __version__
@ -31,4 +41,4 @@ def update_available():
return False
if not resp.ok:
return False
return make_version_tuple(__version__) < make_version_tuple(latest_vstr)
return make_version_tuple(get_version()) < make_version_tuple(latest_vstr)

@ -114,10 +114,12 @@ class SZProviderPool(ProviderPool):
try:
logger.info('Terminating provider %s', name)
self.initialized_providers[name].terminate()
except (requests.Timeout, socket.timeout):
except (requests.Timeout, socket.timeout) as e:
logger.error('Provider %r timed out, improperly terminated', name)
except:
self.throttle_callback(name, e)
except Exception as e:
logger.exception('Provider %r terminated unexpectedly', name)
self.throttle_callback(name, e)
del self.initialized_providers[name]
@ -189,8 +191,9 @@ class SZProviderPool(ProviderPool):
return out
except (requests.Timeout, socket.timeout):
except (requests.Timeout, socket.timeout) as e:
logger.error('Provider %r timed out', provider)
self.throttle_callback(provider, e)
except Exception as e:
logger.exception('Unexpected error in provider %r: %s', provider, traceback.format_exc())
@ -269,10 +272,11 @@ class SZProviderPool(ProviderPool):
requests.exceptions.ProxyError,
requests.exceptions.SSLError,
requests.Timeout,
socket.timeout):
socket.timeout) as e:
logger.error('Provider %r connection error', subtitle.provider_name)
self.throttle_callback(subtitle.provider_name, e)
except ResponseNotReady:
except ResponseNotReady as e:
logger.error('Provider %r response error, reinitializing', subtitle.provider_name)
try:
self[subtitle.provider_name].terminate()
@ -280,6 +284,7 @@ class SZProviderPool(ProviderPool):
except:
logger.error('Provider %r reinitialization error: %s', subtitle.provider_name,
traceback.format_exc())
self.throttle_callback(subtitle.provider_name, e)
except rarfile.BadRarFile:
logger.error('Malformed RAR file from provider %r, skipping subtitle.', subtitle.provider_name)

@ -9,14 +9,13 @@ from random import randint
from dogpile.cache.api import NO_VALUE
from requests import Session
from requests.exceptions import ConnectionError, ConnectTimeout
from subliminal.cache import region
from subliminal.exceptions import DownloadLimitExceeded, AuthenticationError, ConfigurationError
from subliminal.providers.addic7ed import Addic7edProvider as _Addic7edProvider, \
Addic7edSubtitle as _Addic7edSubtitle, ParserBeautifulSoup
from subliminal.subtitle import fix_line_ending
from subliminal_patch.utils import sanitize
from subliminal_patch.exceptions import TooManyRequests, IPAddressBlocked
from subliminal_patch.exceptions import TooManyRequests
from subliminal_patch.pitcher import pitchers, load_verification, store_verification
from subzero.language import Language
@ -92,19 +91,15 @@ class Addic7edProvider(_Addic7edProvider):
# login
if self.username and self.password:
def check_verification(cache_region):
try:
rr = self.session.get(self.server_url + 'panel.php', allow_redirects=False, timeout=10,
headers={"Referer": self.server_url})
if rr.status_code == 302:
logger.info('Addic7ed: Login expired')
cache_region.delete("addic7ed_data")
else:
logger.info('Addic7ed: Re-using old login')
self.logged_in = True
return True
except (ConnectionError, ConnectTimeout) as e:
logger.debug("Addic7ed: There was a problem reaching the server: %s." % e)
raise IPAddressBlocked("Addic7ed: Your IP is temporarily blocked.")
rr = self.session.get(self.server_url + 'panel.php', allow_redirects=False, timeout=10,
headers={"Referer": self.server_url})
if rr.status_code == 302:
logger.info('Addic7ed: Login expired')
cache_region.delete("addic7ed_data")
else:
logger.info('Addic7ed: Re-using old login')
self.logged_in = True
return True
if load_verification("addic7ed", self.session, callback=check_verification):
return

@ -57,7 +57,6 @@ class ZimukuSubtitle(Subtitle):
# episode
if isinstance(video, Episode):
# always make year a match
info = guessit(self.version, {"type": "episode"})
# other properties
matches |= guess_matches(video, info, partial=True)
@ -145,6 +144,19 @@ class ZimukuProvider(Provider):
logger.debug("No data returned from provider")
return []
html = r.content.decode("utf-8", "ignore")
# parse window location
pattern = r"url\s*=\s*'([^']*)'\s*\+\s*url"
parts = re.findall(pattern, html)
redirect_url = search_link
while parts:
parts.reverse()
redirect_url = urljoin(self.server_url, "".join(parts))
r = self.session.get(redirect_url, timeout=30)
html = r.content.decode("utf-8", "ignore")
parts = re.findall(pattern, html)
logger.debug("search url located: " + redirect_url)
soup = ParserBeautifulSoup(
r.content.decode("utf-8", "ignore"), ["lxml", "html.parser"]
)
@ -154,8 +166,12 @@ class ZimukuProvider(Provider):
logger.debug("enter a non-shooter page")
for item in soup.find_all("div", {"class": "item"}):
title_a = item.find("p", class_="tt clearfix").find("a")
subs_year = re.findall(r"\d{4}", title_a.text) or None
subs_year = year
if season:
# episode year in zimuku is the season's year not show's year
actual_subs_year = re.findall(r"\d{4}", title_a.text) or None
if actual_subs_year:
subs_year = int(actual_subs_year[0]) - season + 1
title = title_a.text
season_cn1 = re.search("第(.*)季", title)
if not season_cn1:

@ -0,0 +1,7 @@
# coding=utf-8
class EmptyEntryError(Exception):
pass
class EmptyLineError(Exception):
pass

@ -7,7 +7,8 @@ import pysubs2
import logging
import time
from .mods import EMPTY_TAG_PROCESSOR, EmptyEntryError
from .mods import EMPTY_TAG_PROCESSOR
from .exc import EmptyEntryError
from .registry import registry
from subzero.language import Language
import six
@ -15,8 +16,6 @@ import six
logger = logging.getLogger(__name__)
lowercase_re = re.compile(r'(?sux)[a-zà-ž]')
class SubtitleModifications(object):
debug = False
@ -189,7 +188,7 @@ class SubtitleModifications(object):
sub = processor.process(sub)
if sub.strip():
if lowercase_re.search(sub):
if not sub.isupper():
return False
entry_used = True
@ -302,11 +301,11 @@ class SubtitleModifications(object):
mod = self.initialized_mods[identifier]
try:
line = mod.modify(line.strip(), entry=entry.text, debug=self.debug, parent=self, index=index,
line = mod.modify(line.strip(), entry=t, debug=self.debug, parent=self, index=index,
**args)
except EmptyEntryError:
if self.debug:
logger.debug(u"%d: %s: %r -> ''", index, identifier, entry.text)
logger.debug(u"%d: %s: %r -> ''", index, identifier, t)
skip_entry = True
break
@ -331,11 +330,11 @@ class SubtitleModifications(object):
mod = self.initialized_mods[identifier]
try:
line = mod.modify(line.strip(), entry=entry.text, debug=self.debug, parent=self, index=index,
line = mod.modify(line.strip(), entry=t, debug=self.debug, parent=self, index=index,
procs=["last_process"], **args)
except EmptyEntryError:
if self.debug:
logger.debug(u"%d: %s: %r -> ''", index, identifier, entry.text)
logger.debug(u"%d: %s: %r -> ''", index, identifier, t)
skip_entry = True
break

@ -109,9 +109,3 @@ empty_line_post_processors = [
]
class EmptyEntryError(Exception):
pass
class EmptyLineError(Exception):
pass

@ -9,6 +9,7 @@ from subzero.modification.mods import SubtitleTextModification, empty_line_post_
from subzero.modification.processors import FuncProcessor
from subzero.modification.processors.re_processor import NReProcessor
from subzero.modification import registry
from tld import get_tld
ENGLISH = Language("eng")
@ -30,7 +31,7 @@ class CommonFixes(SubtitleTextModification):
NReProcessor(re.compile(r'(?u)(\w|\b|\s|^)(-\s?-{1,2})'), r"\1—", name="CM_multidash"),
# line = _/-/\s
NReProcessor(re.compile(r'(?u)(^\W*[-_.:>~]+\W*$)'), "", name="<CM_non_word_only"),
NReProcessor(re.compile(r'(?u)(^\W*[-_.:<>~"\']+\W*$)'), "", name="CM_non_word_only"),
# remove >>
NReProcessor(re.compile(r'(?u)^\s?>>\s*'), "", name="CM_leading_crocodiles"),
@ -115,7 +116,9 @@ class CommonFixes(SubtitleTextModification):
NReProcessor(re.compile(r'(?u)(?:(?<=^)|(?<=\w)) +([!?.,](?![!?.,]| \.))'), r"\1", name="CM_punctuation_space"),
# add space after punctuation
NReProcessor(re.compile(r'(?u)([!?.,:])([A-zÀ-ž]{2,})'), r"\1 \2", name="CM_punctuation_space2"),
NReProcessor(re.compile(r'(?u)(([^\s]*)([!?.,:])([A-zÀ-ž]{2,}))'),
lambda match: u"%s%s %s" % (match.group(2), match.group(3), match.group(4)) if not get_tld(match.group(1), fail_silently=True, fix_protocol=True) else match.group(1),
name="CM_punctuation_space2"),
# fix lowercase I in english
NReProcessor(re.compile(r'(?u)(\b)i(\b)'), r"\1I\2", name="CM_EN_lowercase_i",

@ -3,7 +3,8 @@ from __future__ import absolute_import
from __future__ import unicode_literals
import re
from subzero.modification.mods import SubtitleTextModification, empty_line_post_processors, EmptyEntryError, TAG
from subzero.modification.mods import SubtitleTextModification, empty_line_post_processors, TAG
from subzero.modification.exc import EmptyEntryError
from subzero.modification.processors.re_processor import NReProcessor
from subzero.modification import registry
@ -41,14 +42,14 @@ class HearingImpaired(SubtitleTextModification):
# possibly with a dash in front; try not breaking actual sentences with a colon at the end by not matching if
# a space is inside the text; ignore anything ending with a quote
NReProcessor(re.compile(r'(?u)(?:(?<=^)|(?<=[.\-!?\"]))([\s\->~]*((?=[A-zÀ-ž&+]\s*[A-zÀ-ž&+]\s*[A-zÀ-ž&+])'
r'[A-zÀ-ž-_0-9\s\"\'&+()\[\]]+:)(?![\"’ʼ❜‘‛”“‟„])\s*)(?![0-9])'),
r'[A-zÀ-ž-_0-9\s\"\'&+()\[\]]+:)(?![\"’ʼ❜‘‛”“‟„])\s*)(?![0-9]|//)'),
lambda match:
match.group(1) if (match.group(2).count(" ") > 0 or match.group(1).count("-") > 0)
else "" if not match.group(1).startswith(" ") else " ",
name="HI_before_colon_noncaps"),
# brackets (only remove if at least 3 chars in brackets)
NReProcessor(re.compile(r'(?sux)-?%(t)s[([][^([)\]]+?(?=[A-zÀ-ž"\'.]{3,})[^([)\]]+[)\]][\s:]*%(t)s' %
NReProcessor(re.compile(r'(?sux)-?%(t)s["\']*[([][^([)\]]+?(?=[A-zÀ-ž"\'.]{3,})[^([)\]]+[)\]]["\']*[\s:]*%(t)s' %
{"t": TAG}), "", name="HI_brackets"),
#NReProcessor(re.compile(r'(?sux)-?%(t)s[([]%(t)s(?=[A-zÀ-ž"\'.]{3,})[^([)\]]+%(t)s$' % {"t": TAG}),
@ -92,8 +93,8 @@ class HearingImpaired(SubtitleTextModification):
"", name="HI_music_symbols_only"),
# remove music entries
NReProcessor(re.compile(r'(?ums)(^[-\s>~]*[♫♪]+\s*.+|.+\s*[♫♪]+\s*$)'),
"", name="HI_music"),
NReProcessor(re.compile(r'(?ums)(^[-\s>~]*[*#¶♫♪]+\s*.+|.+\s*[*#¶♫♪]+\s*$)'),
"", name="HI_music", entry=True),
]

@ -3,6 +3,7 @@ from __future__ import absolute_import
import re
import logging
from subzero.modification.exc import EmptyEntryError
from subzero.modification.processors import Processor
logger = logging.getLogger(__name__)
@ -15,13 +16,22 @@ class ReProcessor(Processor):
pattern = None
replace_with = None
def __init__(self, pattern, replace_with, name=None, supported=None):
def __init__(self, pattern, replace_with, name=None, supported=None, entry=False, **kwargs):
super(ReProcessor, self).__init__(name=name, supported=supported)
self.pattern = pattern
self.replace_with = replace_with
self.use_entry = entry
def process(self, content, debug=False, **kwargs):
return self.pattern.sub(self.replace_with, content)
def process(self, content, debug=False, entry=None, **kwargs):
if not self.use_entry:
return self.pattern.sub(self.replace_with, content)
ret = self.pattern.sub(self.replace_with, entry)
if not ret:
raise EmptyEntryError()
elif ret != entry:
return ret
return content
class NReProcessor(ReProcessor):
@ -37,7 +47,7 @@ class MultipleWordReProcessor(ReProcessor):
}
replaces found key in pattern with the corresponding value in data
"""
def __init__(self, snr_dict, name=None, parent=None, supported=None):
def __init__(self, snr_dict, name=None, parent=None, supported=None, **kwargs):
super(ReProcessor, self).__init__(name=name, supported=supported)
self.snr_dict = snr_dict

@ -0,0 +1,24 @@
from .utils import (
get_fld,
get_tld,
get_tld_names,
is_tld,
parse_tld,
Result,
update_tld_names,
)
__title__ = 'tld'
__version__ = '0.12.2'
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'get_fld',
'get_tld',
'get_tld_names',
'is_tld',
'parse_tld',
'Result',
'update_tld_names',
)

@ -0,0 +1,68 @@
from codecs import open as codecs_open
from urllib.request import urlopen
from typing import Optional
from .exceptions import (
TldIOError,
TldImproperlyConfigured,
)
from .helpers import project_dir
from .registry import Registry
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = ('BaseTLDSourceParser',)
class BaseTLDSourceParser(metaclass=Registry):
"""Base TLD source parser."""
uid: Optional[str] = None
source_url: str
local_path: str
@classmethod
def validate(cls):
"""Constructor."""
if not cls.uid:
raise TldImproperlyConfigured(
"The `uid` property of the TLD source parser shall be defined."
)
@classmethod
def get_tld_names(cls, fail_silently: bool = False, retry_count: int = 0):
"""Get tld names.
:param fail_silently:
:param retry_count:
:return:
"""
cls.validate()
raise NotImplementedError(
"Your TLD source parser shall implement `get_tld_names` method."
)
@classmethod
def update_tld_names(cls, fail_silently: bool = False) -> bool:
"""Update the local copy of the TLD file.
:param fail_silently:
:return:
"""
try:
remote_file = urlopen(cls.source_url)
local_file = codecs_open(
project_dir(cls.local_path),
'wb',
encoding='utf8'
)
local_file.write(remote_file.read().decode('utf8'))
local_file.close()
remote_file.close()
except Exception as err:
if fail_silently:
return False
raise TldIOError(err)
return True

@ -0,0 +1,58 @@
from typing import Any
from . import defaults
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'get_setting',
'reset_settings',
'set_setting',
'settings',
)
class Settings(object):
"""Settings registry."""
def __init__(self):
self._settings = {}
self._settings_get = self._settings.get
def set(self, name: str, value: Any) -> None:
"""
Override default settings.
:param str name:
:param mixed value:
"""
self._settings[name] = value
def get(self, name: str, default: Any = None) -> Any:
"""
Gets a variable from local settings.
:param str name:
:param mixed default: Default value.
:return mixed:
"""
if name in self._settings:
return self._settings_get(name, default)
elif hasattr(defaults, name):
return getattr(defaults, name, default)
return default
def reset(self) -> None:
"""Reset settings."""
for name in defaults.__all__:
self.set(name, getattr(defaults, name))
settings = Settings()
get_setting = settings.get
set_setting = settings.set
reset_settings = settings.reset

@ -0,0 +1,14 @@
from os.path import dirname
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'DEBUG',
'NAMES_LOCAL_PATH_PARENT',
)
# Absolute base path that is prepended to NAMES_LOCAL_PATH
NAMES_LOCAL_PATH_PARENT = dirname(__file__)
DEBUG = False

@ -0,0 +1,56 @@
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'TldBadUrl',
'TldDomainNotFound',
'TldImproperlyConfigured',
'TldIOError',
)
class TldIOError(IOError):
"""TldIOError.
Supposed to be thrown when problems with reading/writing occur.
"""
class TldDomainNotFound(ValueError):
"""TldDomainNotFound.
Supposed to be thrown when domain name is not found (didn't match) the
local TLD policy.
"""
def __init__(self, domain_name):
super(TldDomainNotFound, self).__init__(
"Domain %s didn't match any existing TLD name!" % domain_name
)
class TldBadUrl(ValueError):
"""TldBadUrl.
Supposed to be thrown when bad URL is given.
"""
def __init__(self, url):
super(TldBadUrl, self).__init__("Is not a valid URL %s!" % url)
class TldImproperlyConfigured(Exception):
"""TldImproperlyConfigured.
Supposed to be thrown when code is improperly configured. Typical use-case
is when user tries to use `get_tld` function with both `search_public` and
`search_private` set to False.
"""
def __init__(self, msg=None):
if msg is None:
msg = "Improperly configured."
else:
msg = "Improperly configured. %s" % msg
super(TldImproperlyConfigured, self).__init__(msg)

@ -0,0 +1,22 @@
from os.path import abspath, join
from .conf import get_setting
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'project_dir',
'PROJECT_DIR',
)
def project_dir(base: str) -> str:
"""Project dir."""
tld_names_local_path_parent = get_setting('NAMES_LOCAL_PATH_PARENT')
return abspath(
join(tld_names_local_path_parent, base).replace('\\', '/')
)
PROJECT_DIR = project_dir

@ -0,0 +1,45 @@
from typing import Dict
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'Registry',
)
class Registry(type):
REGISTRY = {} # type: Dict[str, Registry]
def __new__(mcs, name, bases, attrs):
new_cls = type.__new__(mcs, name, bases, attrs)
# Here the name of the class is used as key but it could be any class
# parameter.
if getattr(new_cls, '_uid', None):
mcs.REGISTRY[new_cls._uid] = new_cls
return new_cls
@property
def _uid(cls) -> str:
return getattr(cls, 'uid', cls.__name__)
@classmethod
def reset(mcs) -> None:
mcs.REGISTRY = {}
@classmethod
def get(mcs, key, default=None):
return mcs.REGISTRY.get(key, default)
@classmethod
def items(mcs):
return mcs.REGISTRY.items()
# @classmethod
# def get_registry(mcs) -> Dict[str, Type]:
# return dict(mcs.REGISTRY)
#
# @classmethod
# def pop(mcs, uid) -> None:
# mcs.REGISTRY.pop(uid)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,67 @@
from typing import Any, Dict
from urllib.parse import SplitResult
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'Result',
)
class Result(object):
"""Container."""
__slots__ = ('subdomain', 'domain', 'tld', '__fld', 'parsed_url')
def __init__(self,
tld: str,
domain: str,
subdomain: str,
parsed_url: SplitResult):
self.tld = tld
self.domain = domain if domain != '' else tld
self.subdomain = subdomain
self.parsed_url = parsed_url
if domain:
self.__fld = f"{self.domain}.{self.tld}"
else:
self.__fld = self.tld
@property
def extension(self) -> str:
"""Alias of ``tld``.
:return str:
"""
return self.tld
suffix = extension
@property
def fld(self) -> str:
"""First level domain.
:return:
:rtype: str
"""
return self.__fld
def __str__(self) -> str:
return self.tld
__repr__ = __str__
@property
def __dict__(self) -> Dict[str, Any]: # type: ignore
"""Mimic __dict__ functionality.
:return:
:rtype: dict
"""
return {
'tld': self.tld,
'domain': self.domain,
'subdomain': self.subdomain,
'fld': self.fld,
'parsed_url': self.parsed_url,
}

@ -0,0 +1,8 @@
import unittest
from .test_core import *
from .test_commands import *
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
from functools import lru_cache
import logging
import socket
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'internet_available_only',
'log_info',
)
LOG_INFO = True
LOGGER = logging.getLogger(__name__)
def log_info(func):
"""Log some useful info."""
if not LOG_INFO:
return func
def inner(self, *args, **kwargs):
"""Inner."""
result = func(self, *args, **kwargs)
LOGGER.debug('\n\n%s', func.__name__)
LOGGER.debug('============================')
if func.__doc__:
LOGGER.debug('""" %s """', func.__doc__.strip())
LOGGER.debug('----------------------------')
if result is not None:
LOGGER.debug(result)
LOGGER.debug('\n++++++++++++++++++++++++++++')
return result
return inner
@lru_cache(maxsize=32)
def is_internet_available(host="8.8.8.8", port=53, timeout=3):
"""Check if internet is available.
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except socket.error as ex:
print(ex)
return False
def internet_available_only(func):
def inner(self, *args, **kwargs):
"""Inner."""
if not is_internet_available():
LOGGER.debug('\n\n%s', func.__name__)
LOGGER.debug('============================')
if func.__doc__:
LOGGER.debug('""" %s """', func.__doc__.strip())
LOGGER.debug('----------------------------')
LOGGER.debug("Skipping because no Internet connection available.")
LOGGER.debug('\n++++++++++++++++++++++++++++')
return None
result = func(self, *args, **kwargs)
return result
return inner

File diff suppressed because it is too large Load Diff

@ -0,0 +1,42 @@
# -*- coding: utf-8 -*-
import logging
import unittest
import subprocess
from .base import log_info, internet_available_only
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('TestCommands',)
LOGGER = logging.getLogger(__name__)
class TestCommands(unittest.TestCase):
"""Tld commands tests."""
def setUp(self):
"""Set up."""
@internet_available_only
@log_info
def test_1_update_tld_names_command(self):
"""Test updating the tld names (re-fetch mozilla source)."""
res = subprocess.check_output(['update-tld-names']).strip()
self.assertEqual(res, b'')
return res
@internet_available_only
@log_info
def test_1_update_tld_names_mozilla_command(self):
"""Test updating the tld names (re-fetch mozilla source)."""
res = subprocess.check_output(['update-tld-names', 'mozilla']).strip()
self.assertEqual(res, b'')
return res
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,842 @@
# -*- coding: utf-8 -*-
import copy
import logging
from os.path import abspath, join
import unittest
from tempfile import gettempdir
from typing import Type
from urllib.parse import urlsplit
from faker import Faker # type: ignore
from .. import defaults
from ..base import BaseTLDSourceParser
from ..conf import get_setting, reset_settings, set_setting
from ..exceptions import (
TldBadUrl,
TldDomainNotFound,
TldImproperlyConfigured,
TldIOError,
)
from ..helpers import project_dir
from ..registry import Registry
from ..utils import (
get_fld,
get_tld,
get_tld_names,
get_tld_names_container,
is_tld,
MozillaTLDSourceParser,
BaseMozillaTLDSourceParser,
parse_tld,
reset_tld_names,
update_tld_names,
update_tld_names_cli,
)
from .base import internet_available_only, log_info
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = ('TestCore',)
LOGGER = logging.getLogger(__name__)
class TestCore(unittest.TestCase):
"""Core tld functionality tests."""
@classmethod
def setUpClass(cls):
cls.faker = Faker()
cls.temp_dir = gettempdir()
def setUp(self):
"""Set up."""
self.good_patterns = [
{
'url': 'http://www.google.co.uk',
'fld': 'google.co.uk',
'subdomain': 'www',
'domain': 'google',
'suffix': 'co.uk',
'tld': 'co.uk',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://www.v2.google.co.uk',
'fld': 'google.co.uk',
'subdomain': 'www.v2',
'domain': 'google',
'suffix': 'co.uk',
'tld': 'co.uk',
'kwargs': {'fail_silently': True},
},
# No longer valid
# {
# 'url': 'http://www.me.congresodelalengua3.ar',
# 'tld': 'me.congresodelalengua3.ar',
# 'subdomain': 'www',
# 'domain': 'me',
# 'suffix': 'congresodelalengua3.ar',
# },
{
'url': u'http://хром.гугл.рф',
'fld': u'гугл.рф',
'subdomain': u'хром',
'domain': u'гугл',
'suffix': u'рф',
'tld': u'рф',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://www.google.co.uk:8001/lorem-ipsum/',
'fld': 'google.co.uk',
'subdomain': 'www',
'domain': 'google',
'suffix': 'co.uk',
'tld': 'co.uk',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://www.me.cloudfront.net',
'fld': 'me.cloudfront.net',
'subdomain': 'www',
'domain': 'me',
'suffix': 'cloudfront.net',
'tld': 'cloudfront.net',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://www.v2.forum.tech.google.co.uk:8001/'
'lorem-ipsum/',
'fld': 'google.co.uk',
'subdomain': 'www.v2.forum.tech',
'domain': 'google',
'suffix': 'co.uk',
'tld': 'co.uk',
'kwargs': {'fail_silently': True},
},
{
'url': 'https://pantheon.io/',
'fld': 'pantheon.io',
'subdomain': '',
'domain': 'pantheon',
'suffix': 'io',
'tld': 'io',
'kwargs': {'fail_silently': True},
},
{
'url': 'v2.www.google.com',
'fld': 'google.com',
'subdomain': 'v2.www',
'domain': 'google',
'suffix': 'com',
'tld': 'com',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': '//v2.www.google.com',
'fld': 'google.com',
'subdomain': 'v2.www',
'domain': 'google',
'suffix': 'com',
'tld': 'com',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': 'http://foo@bar.com',
'fld': 'bar.com',
'subdomain': '',
'domain': 'bar',
'suffix': 'com',
'tld': 'com',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://user:foo@bar.com',
'fld': 'bar.com',
'subdomain': '',
'domain': 'bar',
'suffix': 'com',
'tld': 'com',
'kwargs': {'fail_silently': True},
},
{
'url': 'https://faguoren.xn--fiqs8s',
'fld': 'faguoren.xn--fiqs8s',
'subdomain': '',
'domain': 'faguoren',
'suffix': 'xn--fiqs8s',
'tld': 'xn--fiqs8s',
'kwargs': {'fail_silently': True},
},
{
'url': 'blogs.lemonde.paris',
'fld': 'lemonde.paris',
'subdomain': 'blogs',
'domain': 'lemonde',
'suffix': 'paris',
'tld': 'paris',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': 'axel.brighton.ac.uk',
'fld': 'brighton.ac.uk',
'subdomain': 'axel',
'domain': 'brighton',
'suffix': 'ac.uk',
'tld': 'ac.uk',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': 'm.fr.blogspot.com.au',
'fld': 'fr.blogspot.com.au',
'subdomain': 'm',
'domain': 'fr',
'suffix': 'blogspot.com.au',
'tld': 'blogspot.com.au',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': u'help.www.福岡.jp',
'fld': u'www.福岡.jp',
'subdomain': 'help',
'domain': 'www',
'suffix': u'福岡.jp',
'tld': u'福岡.jp',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': u'syria.arabic.variant.سوريا',
'fld': u'variant.سوريا',
'subdomain': 'syria.arabic',
'domain': 'variant',
'suffix': u'سوريا',
'tld': u'سوريا',
'kwargs': {'fail_silently': True, 'fix_protocol': True},
},
{
'url': u'http://www.help.kawasaki.jp',
'fld': u'www.help.kawasaki.jp',
'subdomain': '',
'domain': 'www',
'suffix': u'help.kawasaki.jp',
'tld': u'help.kawasaki.jp',
'kwargs': {'fail_silently': True},
},
{
'url': u'http://www.city.kawasaki.jp',
'fld': u'city.kawasaki.jp',
'subdomain': 'www',
'domain': 'city',
'suffix': u'kawasaki.jp',
'tld': u'kawasaki.jp',
'kwargs': {'fail_silently': True},
},
{
'url': u'http://fedoraproject.org',
'fld': u'fedoraproject.org',
'subdomain': '',
'domain': 'fedoraproject',
'suffix': u'org',
'tld': u'org',
'kwargs': {'fail_silently': True},
},
{
'url': u'http://www.cloud.fedoraproject.org',
'fld': u'www.cloud.fedoraproject.org',
'subdomain': '',
'domain': 'www',
'suffix': u'cloud.fedoraproject.org',
'tld': u'cloud.fedoraproject.org',
'kwargs': {'fail_silently': True},
},
{
'url': u'https://www.john.app.os.fedoraproject.org',
'fld': u'john.app.os.fedoraproject.org',
'subdomain': 'www',
'domain': 'john',
'suffix': u'app.os.fedoraproject.org',
'tld': u'app.os.fedoraproject.org',
'kwargs': {'fail_silently': True},
},
{
'url': 'ftp://www.xn--mxail5aa.xn--11b4c3d',
'fld': 'xn--mxail5aa.xn--11b4c3d',
'subdomain': 'www',
'domain': 'xn--mxail5aa',
'suffix': 'xn--11b4c3d',
'tld': 'xn--11b4c3d',
'kwargs': {'fail_silently': True},
},
{
'url': 'http://cloud.fedoraproject.org',
'fld': 'cloud.fedoraproject.org',
'subdomain': '',
'domain': 'cloud.fedoraproject.org',
'suffix': 'cloud.fedoraproject.org',
'tld': 'cloud.fedoraproject.org',
'kwargs': {'fail_silently': True}
},
{
'url': 'github.io',
'fld': 'github.io',
'subdomain': '',
'domain': 'github.io',
'suffix': 'github.io',
'tld': 'github.io',
'kwargs': {'fail_silently': True, 'fix_protocol': True}
},
{
'url': urlsplit('http://lemonde.fr/article.html'),
'fld': 'lemonde.fr',
'subdomain': '',
'domain': 'lemonde',
'suffix': 'fr',
'tld': 'fr',
'kwargs': {'fail_silently': True}
},
{
'url': 'https://github.com....../barseghyanartur/tld/',
'fld': 'github.com',
'subdomain': '',
'domain': 'github',
'suffix': 'com',
'tld': 'com',
'kwargs': {'fail_silently': True}
},
]
self.bad_patterns = {
'v2.www.google.com': {
'exception': TldBadUrl,
},
'/index.php?a=1&b=2': {
'exception': TldBadUrl,
},
'http://www.tld.doesnotexist': {
'exception': TldDomainNotFound,
},
'https://2001:0db8:0000:85a3:0000:0000:ac1f:8001': {
'exception': TldDomainNotFound,
},
'http://192.169.1.1': {
'exception': TldDomainNotFound,
},
'http://localhost:8080': {
'exception': TldDomainNotFound,
},
'https://localhost': {
'exception': TldDomainNotFound,
},
'https://localhost2': {
'exception': TldImproperlyConfigured,
'kwargs': {'search_public': False, 'search_private': False},
},
}
self.invalid_tlds = {
'v2.www.google.com',
'tld.doesnotexist',
'2001:0db8:0000:85a3:0000:0000:ac1f',
'192.169.1.1',
'localhost',
'google.com',
}
self.tld_names_local_path_custom = project_dir(
join(
'tests',
'res',
'effective_tld_names_custom.dat.txt'
)
)
self.good_patterns_custom_parser = [
{
'url': 'http://www.foreverchild',
'fld': 'www.foreverchild',
'subdomain': '',
'domain': 'www',
'suffix': 'foreverchild',
'tld': 'foreverchild',
'kwargs': {
'fail_silently': True,
# 'parser_class': self.get_custom_parser_class(),
},
},
{
'url': 'http://www.v2.foreverchild',
'fld': 'v2.foreverchild',
'subdomain': 'www',
'domain': 'v2',
'suffix': 'foreverchild',
'tld': 'foreverchild',
'kwargs': {
'fail_silently': True,
# 'parser_class': self.get_custom_parser_class(),
},
},
]
reset_settings()
def tearDown(self):
"""Tear down."""
reset_settings()
Registry.reset()
@property
def good_url(self):
return self.good_patterns[0]['url']
@property
def bad_url(self):
return list(self.bad_patterns.keys())[0]
def get_custom_parser_class(
self,
uid: str = 'custom_mozilla',
source_url: str = None,
local_path: str = 'tests/res/effective_tld_names_custom.dat.txt'
) -> Type[BaseTLDSourceParser]:
# Define a custom TLD source parser class
parser_class = type(
'CustomMozillaTLDSourceParser',
(BaseMozillaTLDSourceParser,),
{
'uid': uid,
'source_url': source_url,
'local_path': local_path,
}
)
return parser_class
@log_info
def test_0_tld_names_loaded(self):
"""Test if tld names are loaded."""
get_fld('http://www.google.co.uk')
from ..utils import tld_names
res = len(tld_names) > 0
self.assertTrue(res)
return res
@internet_available_only
@log_info
def test_1_update_tld_names(self):
"""Test updating the tld names (re-fetch mozilla source)."""
res = update_tld_names(fail_silently=False)
self.assertTrue(res)
return res
@log_info
def test_2_fld_good_patterns_pass(self):
"""Test good URL patterns."""
res = []
for data in self.good_patterns:
_res = get_fld(data['url'], **data['kwargs'])
self.assertEqual(_res, data['fld'])
res.append(_res)
return res
@log_info
def test_3_fld_bad_patterns_pass(self):
"""Test bad URL patterns."""
res = []
for url, params in self.bad_patterns.items():
_res = get_fld(url, fail_silently=True)
self.assertEqual(_res, None)
res.append(_res)
return res
@log_info
def test_4_override_settings(self):
"""Testing settings override."""
def override_settings():
"""Override settings."""
return get_setting('DEBUG')
self.assertEqual(defaults.DEBUG, override_settings())
set_setting('DEBUG', True)
self.assertEqual(True, override_settings())
return override_settings()
@log_info
def test_5_tld_good_patterns_pass_parsed_object(self):
"""Test good URL patterns."""
res = []
for data in self.good_patterns:
kwargs = copy.copy(data['kwargs'])
kwargs['as_object'] = True
_res = get_tld(data['url'], **kwargs)
self.assertEqual(_res.tld, data['tld'])
self.assertEqual(_res.subdomain, data['subdomain'])
self.assertEqual(_res.domain, data['domain'])
self.assertEqual(_res.suffix, data['suffix'])
self.assertEqual(_res.fld, data['fld'])
self.assertEqual(
str(_res).encode('utf8'),
data['tld'].encode('utf8')
)
self.assertEqual(
_res.__dict__,
{
'tld': _res.tld,
'domain': _res.domain,
'subdomain': _res.subdomain,
'fld': _res.fld,
'parsed_url': _res.parsed_url,
}
)
res.append(_res)
return res
@log_info
def test_6_override_full_names_path(self):
default = project_dir('dummy.txt')
override_base = '/tmp/test'
set_setting('NAMES_LOCAL_PATH_PARENT', override_base)
modified = project_dir('dummy.txt')
self.assertNotEqual(default, modified)
self.assertEqual(modified, abspath('/tmp/test/dummy.txt'))
@log_info
def test_7_public_private(self):
res = get_fld(
'http://silly.cc.ua',
fail_silently=True,
search_private=False
)
self.assertEqual(res, None)
res = get_fld(
'http://silly.cc.ua',
fail_silently=True,
search_private=True
)
self.assertEqual(res, 'silly.cc.ua')
res = get_fld(
'mercy.compute.amazonaws.com',
fail_silently=True,
search_private=False,
fix_protocol=True
)
self.assertEqual(res, None)
res = get_fld(
'http://whatever.com',
fail_silently=True,
search_public=False
)
self.assertEqual(res, None)
@log_info
def test_8_fld_bad_patterns_exceptions(self):
"""Test exceptions."""
res = []
for url, params in self.bad_patterns.items():
kwargs = params['kwargs'] if 'kwargs' in params else {}
kwargs['fail_silently'] = False
with self.assertRaises(params['exception']):
_res = get_fld(url, **kwargs)
res.append(_res)
return res
@log_info
def test_9_tld_good_patterns_pass(self):
"""Test `get_tld` good URL patterns."""
res = []
for data in self.good_patterns:
_res = get_tld(data['url'], **data['kwargs'])
self.assertEqual(_res, data['tld'])
res.append(_res)
return res
@log_info
def test_10_tld_bad_patterns_pass(self):
"""Test `get_tld` bad URL patterns."""
res = []
for url, params in self.bad_patterns.items():
_res = get_tld(url, fail_silently=True)
self.assertEqual(_res, None)
res.append(_res)
return res
@log_info
def test_11_parse_tld_good_patterns(self):
"""Test `parse_tld` good URL patterns."""
res = []
for data in self.good_patterns:
_res = parse_tld(data['url'], **data['kwargs'])
self.assertEqual(
_res,
(data['tld'], data['domain'], data['subdomain'])
)
res.append(_res)
return res
@log_info
def test_12_is_tld_good_patterns(self):
"""Test `is_tld` good URL patterns."""
for data in self.good_patterns:
self.assertTrue(is_tld(data['tld']))
@log_info
def test_13_is_tld_bad_patterns(self):
"""Test `is_tld` bad URL patterns."""
for _tld in self.invalid_tlds:
self.assertFalse(is_tld(_tld))
@log_info
def test_14_fail_update_tld_names(self):
"""Test fail `update_tld_names`."""
parser_class = self.get_custom_parser_class(
uid='custom_mozilla_2',
source_url='i-do-not-exist'
)
# Assert raise TldIOError on wrong NAMES_SOURCE_URL
with self.assertRaises(TldIOError):
update_tld_names(fail_silently=False, parser_uid=parser_class.uid)
# Assert return False on wrong NAMES_SOURCE_URL
self.assertFalse(
update_tld_names(fail_silently=True, parser_uid=parser_class.uid)
)
@log_info
def test_15_fail_get_tld_names(self):
"""Test fail `update_tld_names`."""
parser_class = self.get_custom_parser_class(
uid='custom_mozilla_3',
source_url='i-do-not-exist',
local_path='/srv/tests/res/effective_tld_names_custom_3.dat.txt'
)
reset_tld_names()
# Assert raise TldIOError on wrong NAMES_SOURCE_URL
for params in self.good_patterns:
kwargs = {'url': params['url']}
kwargs.update(params['kwargs'])
kwargs['fail_silently'] = False
kwargs['parser_class'] = parser_class
with self.assertRaises(TldIOError):
get_tld(**kwargs)
@log_info
def test_15_fail_get_fld_wrong_kwargs(self):
"""Test fail `get_fld` with wrong kwargs."""
with self.assertRaises(TldImproperlyConfigured):
get_fld(self.good_url, as_object=True)
@log_info
def test_16_fail_parse_tld(self):
"""Test fail `parse_tld`.
Assert raise TldIOError on wrong `NAMES_SOURCE_URL` for `parse_tld`.
"""
parser_class = self.get_custom_parser_class(
source_url='i-do-not-exist'
)
parsed_tld = parse_tld(
self.bad_url,
fail_silently=False,
parser_class=parser_class
)
self.assertEqual(parsed_tld, (None, None, None))
@log_info
def test_17_get_tld_names_and_reset_tld_names(self):
"""Test fail `get_tld_names` and repair using `reset_tld_names`."""
tmp_filename = join(
gettempdir(),
f'{self.faker.uuid4()}.dat.txt'
)
parser_class = self.get_custom_parser_class(
source_url='i-do-not-exist',
local_path=tmp_filename
)
reset_tld_names()
with self.subTest('Assert raise TldIOError'):
# Assert raise TldIOError on wrong NAMES_SOURCE_URL for
# `get_tld_names`
with self.assertRaises(TldIOError):
get_tld_names(
fail_silently=False,
parser_class=parser_class
)
tmp_filename = join(
gettempdir(),
f'{self.faker.uuid4()}.dat.txt'
)
parser_class_2 = self.get_custom_parser_class(
source_url='i-do-not-exist-2',
local_path=tmp_filename
)
reset_tld_names()
with self.subTest('Assert get None'):
# Assert get None on wrong `NAMES_SOURCE_URL` for `get_tld_names`
self.assertIsNone(
get_tld_names(
fail_silently=True,
parser_class=parser_class_2
)
)
@internet_available_only
@log_info
def test_18_update_tld_names_cli(self):
"""Test the return code of the CLI version of `update_tld_names`."""
reset_tld_names()
res = update_tld_names_cli()
self.assertEqual(res, 0)
@log_info
def test_19_parse_tld_custom_tld_names_good_patterns(self):
"""Test `parse_tld` good URL patterns for custom tld names."""
res = []
for data in self.good_patterns_custom_parser:
kwargs = copy.copy(data['kwargs'])
kwargs['parser_class'] = self.get_custom_parser_class()
_res = parse_tld(data['url'], **kwargs)
self.assertEqual(
_res,
(data['tld'], data['domain'], data['subdomain'])
)
res.append(_res)
return res
@log_info
def test_20_tld_custom_tld_names_good_patterns_pass_parsed_object(self):
"""Test `get_tld` good URL patterns for custom tld names."""
res = []
for data in self.good_patterns_custom_parser:
kwargs = copy.copy(data['kwargs'])
kwargs.update({
'as_object': True,
'parser_class': self.get_custom_parser_class(),
})
_res = get_tld(data['url'], **kwargs)
self.assertEqual(_res.tld, data['tld'])
self.assertEqual(_res.subdomain, data['subdomain'])
self.assertEqual(_res.domain, data['domain'])
self.assertEqual(_res.suffix, data['suffix'])
self.assertEqual(_res.fld, data['fld'])
self.assertEqual(
str(_res).encode('utf8'),
data['tld'].encode('utf8')
)
self.assertEqual(
_res.__dict__,
{
'tld': _res.tld,
'domain': _res.domain,
'subdomain': _res.subdomain,
'fld': _res.fld,
'parsed_url': _res.parsed_url,
}
)
res.append(_res)
return res
@log_info
def test_21_reset_tld_names_for_custom_parser(self):
"""Test `reset_tld_names` for `tld_names_local_path`."""
res = []
parser_class = self.get_custom_parser_class()
for data in self.good_patterns_custom_parser:
kwargs = copy.copy(data['kwargs'])
kwargs.update({
'as_object': True,
'parser_class': self.get_custom_parser_class(),
})
_res = get_tld(data['url'], **kwargs)
self.assertEqual(_res.tld, data['tld'])
self.assertEqual(_res.subdomain, data['subdomain'])
self.assertEqual(_res.domain, data['domain'])
self.assertEqual(_res.suffix, data['suffix'])
self.assertEqual(_res.fld, data['fld'])
self.assertEqual(
str(_res).encode('utf8'),
data['tld'].encode('utf8')
)
self.assertEqual(
_res.__dict__,
{
'tld': _res.tld,
'domain': _res.domain,
'subdomain': _res.subdomain,
'fld': _res.fld,
'parsed_url': _res.parsed_url,
}
)
res.append(_res)
tld_names = get_tld_names_container()
self.assertIn(parser_class.local_path, tld_names)
reset_tld_names(parser_class.local_path)
self.assertNotIn(parser_class.local_path, tld_names)
return res
@log_info
def test_22_fail_define_custom_parser_class_without_uid(self):
"""Test fail define custom parser class without `uid`."""
class CustomParser(BaseTLDSourceParser):
pass
class AnotherCustomParser(BaseTLDSourceParser):
uid = 'another-custom-parser'
# Assert raise TldImproperlyConfigured
with self.assertRaises(TldImproperlyConfigured):
CustomParser.get_tld_names()
# Assert raise NotImplementedError
with self.assertRaises(NotImplementedError):
AnotherCustomParser.get_tld_names()
@log_info
def test_23_len_trie_nodes(self):
"""Test len of the trie nodes."""
get_tld('http://delusionalinsanity.com')
tld_names = get_tld_names_container()
self.assertGreater(
len(tld_names[MozillaTLDSourceParser.local_path]),
0
)
@log_info
def test_24_get_tld_names_no_arguments(self):
"""Test len of the trie nodes."""
tld_names = get_tld_names()
self.assertGreater(
len(tld_names),
0
)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,64 @@
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'Trie',
'TrieNode',
)
class TrieNode(object):
"""Class representing a single Trie node."""
__slots__ = ('children', 'exception', 'leaf', 'private')
def __init__(self):
self.children = None
self.exception = None
self.leaf = False
self.private = False
class Trie(object):
"""An adhoc Trie data structure to store tlds in reverse notation order."""
def __init__(self):
self.root = TrieNode()
self.__nodes = 0
def __len__(self):
return self.__nodes
def add(self, tld: str, private: bool = False) -> None:
node = self.root
# Iterating over the tld parts in reverse order
# for part in reversed(tld.split('.')):
tld_split = tld.split('.')
tld_split.reverse()
for part in tld_split:
if part.startswith('!'):
node.exception = part[1:]
break
# To save up some RAM, we initialize the children dict only
# when strictly necessary
if node.children is None:
node.children = {}
child = TrieNode()
else:
child = node.children.get(part)
if child is None:
child = TrieNode()
node.children[part] = child
node = child
node.leaf = True
if private:
node.private = True
self.__nodes += 1

@ -0,0 +1,624 @@
from __future__ import unicode_literals
import argparse
from codecs import open as codecs_open
from functools import lru_cache
# codecs_open = open
from os.path import isabs
import sys
from typing import Dict, Type, Union, Tuple, List, Optional
from urllib.parse import urlsplit, SplitResult
from .base import BaseTLDSourceParser
from .exceptions import (
TldBadUrl,
TldDomainNotFound,
TldImproperlyConfigured,
TldIOError,
)
from .helpers import project_dir
from .trie import Trie
from .registry import Registry
from .result import Result
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2020 Artur Barseghyan'
__license__ = 'MPL-1.1 OR GPL-2.0-only OR LGPL-2.1-or-later'
__all__ = (
'BaseMozillaTLDSourceParser',
'get_fld',
'get_tld',
'get_tld_names',
'get_tld_names_container',
'is_tld',
'MozillaTLDSourceParser',
'parse_tld',
'pop_tld_names_container',
'process_url',
'reset_tld_names',
'Result',
'tld_names',
'update_tld_names',
'update_tld_names_cli',
'update_tld_names_container',
)
tld_names: Dict[str, Trie] = {}
def get_tld_names_container() -> Dict[str, Trie]:
"""Get container of all tld names.
:return:
:rtype dict:
"""
global tld_names
return tld_names
def update_tld_names_container(tld_names_local_path: str,
trie_obj: Trie) -> None:
"""Update TLD Names container item.
:param tld_names_local_path:
:param trie_obj:
:return:
"""
global tld_names
# tld_names.update({tld_names_local_path: trie_obj})
tld_names[tld_names_local_path] = trie_obj
def pop_tld_names_container(tld_names_local_path: str) -> None:
"""Remove TLD names container item.
:param tld_names_local_path:
:return:
"""
global tld_names
tld_names.pop(tld_names_local_path, None)
@lru_cache(maxsize=128, typed=True)
def update_tld_names(
fail_silently: bool = False,
parser_uid: str = None
) -> bool:
"""Update TLD names.
:param fail_silently:
:param parser_uid:
:return:
"""
results: List[bool] = []
results_append = results.append
if parser_uid:
parser_cls = Registry.get(parser_uid, None)
if parser_cls and parser_cls.source_url:
results_append(
parser_cls.update_tld_names(fail_silently=fail_silently)
)
else:
for parser_uid, parser_cls in Registry.items():
if parser_cls and parser_cls.source_url:
results_append(
parser_cls.update_tld_names(fail_silently=fail_silently)
)
return all(results)
def update_tld_names_cli() -> int:
"""CLI wrapper for update_tld_names.
Since update_tld_names returns True on success, we need to negate the
result to match CLI semantics.
"""
parser = argparse.ArgumentParser(description='Update TLD names')
parser.add_argument(
'parser_uid',
nargs='?',
default=None,
help="UID of the parser to update TLD names for.",
)
parser.add_argument(
'--fail-silently',
dest="fail_silently",
default=False,
action='store_true',
help="Fail silently",
)
args = parser.parse_args(sys.argv[1:])
parser_uid = args.parser_uid
fail_silently = args.fail_silently
return int(
not update_tld_names(
parser_uid=parser_uid,
fail_silently=fail_silently
)
)
def get_tld_names(
fail_silently: bool = False,
retry_count: int = 0,
parser_class: Type[BaseTLDSourceParser] = None
) -> Dict[str, Trie]:
"""Build the ``tlds`` list if empty. Recursive.
:param fail_silently: If set to True, no exceptions are raised and None
is returned on failure.
:param retry_count: If greater than 1, we raise an exception in order
to avoid infinite loops.
:param parser_class:
:type fail_silently: bool
:type retry_count: int
:type parser_class: BaseTLDSourceParser
:return: List of TLD names
:rtype: obj:`tld.utils.Trie`
"""
if not parser_class:
parser_class = MozillaTLDSourceParser
return parser_class.get_tld_names(
fail_silently=fail_silently,
retry_count=retry_count
)
# **************************************************************************
# **************************** Parser classes ******************************
# **************************************************************************
class BaseMozillaTLDSourceParser(BaseTLDSourceParser):
@classmethod
def get_tld_names(
cls,
fail_silently: bool = False,
retry_count: int = 0
) -> Optional[Dict[str, Trie]]:
"""Parse.
:param fail_silently:
:param retry_count:
:return:
"""
if retry_count > 1:
if fail_silently:
return None
else:
raise TldIOError
global tld_names
_tld_names = tld_names
# _tld_names = get_tld_names_container()
# If already loaded, return
if (
cls.local_path in _tld_names
and _tld_names[cls.local_path] is not None
):
return _tld_names
try:
# Load the TLD names file
if isabs(cls.local_path):
local_path = cls.local_path
else:
local_path = project_dir(cls.local_path)
local_file = codecs_open(
local_path,
'r',
encoding='utf8'
)
trie = Trie()
trie_add = trie.add # Performance opt
# Make a list of it all, strip all garbage
private_section = False
for line in local_file:
if '===BEGIN PRIVATE DOMAINS===' in line:
private_section = True
# Puny code TLD names
if '// xn--' in line:
line = line.split()[1]
if line[0] in ('/', '\n'):
continue
trie_add(
f'{line.strip()}',
private=private_section
)
update_tld_names_container(cls.local_path, trie)
local_file.close()
except IOError as err:
# Grab the file
cls.update_tld_names(
fail_silently=fail_silently
)
# Increment ``retry_count`` in order to avoid infinite loops
retry_count += 1
# Run again
return cls.get_tld_names(
fail_silently=fail_silently,
retry_count=retry_count
)
except Exception as err:
if fail_silently:
return None
else:
raise err
finally:
try:
local_file.close()
except Exception:
pass
return _tld_names
class MozillaTLDSourceParser(BaseMozillaTLDSourceParser):
"""Mozilla TLD source."""
uid: str = 'mozilla'
source_url: str = 'https://publicsuffix.org/list/public_suffix_list.dat'
local_path: str = 'res/effective_tld_names.dat.txt'
# **************************************************************************
# **************************** Core functions ******************************
# **************************************************************************
def process_url(
url: str,
fail_silently: bool = False,
fix_protocol: bool = False,
search_public: bool = True,
search_private: bool = True,
parser_class: Type[BaseTLDSourceParser] = MozillaTLDSourceParser
) -> Union[Tuple[List[str], int, SplitResult], Tuple[None, None, SplitResult]]:
"""Process URL.
:param parser_class:
:param url:
:param fail_silently:
:param fix_protocol:
:param search_public:
:param search_private:
:return:
"""
if not (search_public or search_private):
raise TldImproperlyConfigured(
"Either `search_public` or `search_private` (or both) shall be "
"set to True."
)
# Init
_tld_names = get_tld_names(
fail_silently=fail_silently,
parser_class=parser_class
)
if not isinstance(url, SplitResult):
url = url.lower()
if (
fix_protocol and not url.startswith(('//', 'http://', 'https://'))
):
url = f'https://{url}'
# Get parsed URL as we might need it later
parsed_url = urlsplit(url)
else:
parsed_url = url
# Get (sub) domain name
domain_name = parsed_url.hostname
if not domain_name:
if fail_silently:
return None, None, parsed_url
else:
raise TldBadUrl(url=url)
# This will correctly handle dots at the end of domain name in URLs like
# https://github.com............/barseghyanartur/tld/
if domain_name.endswith('.'):
domain_name = domain_name.rstrip('.')
domain_parts = domain_name.split('.')
tld_names_local_path = parser_class.local_path
# Now we query our Trie iterating on the domain parts in reverse order
node = _tld_names[tld_names_local_path].root
current_length = 0
tld_length = 0
match = None
len_domain_parts = len(domain_parts)
for i in range(len_domain_parts-1, -1, -1):
part = domain_parts[i]
# Cannot go deeper
if node.children is None:
break
# Exception
if part == node.exception:
break
child = node.children.get(part)
# Wildcards
if child is None:
child = node.children.get('*')
# If the current part is not in current node's children, we can stop
if child is None:
break
# Else we move deeper and increment our tld offset
current_length += 1
node = child
if node.leaf:
tld_length = current_length
match = node
# Checking the node we finished on is a leaf and is one we allow
if (
(match is None) or
(not match.leaf) or
(not search_public and not match.private) or
(not search_private and match.private)
):
if fail_silently:
return None, None, parsed_url
else:
raise TldDomainNotFound(domain_name=domain_name)
if len_domain_parts == tld_length:
non_zero_i = -1 # hostname = tld
else:
non_zero_i = max(1, len_domain_parts - tld_length)
return domain_parts, non_zero_i, parsed_url
def get_fld(
url: str,
fail_silently: bool = False,
fix_protocol: bool = False,
search_public: bool = True,
search_private: bool = True,
parser_class: Type[BaseTLDSourceParser] = MozillaTLDSourceParser,
**kwargs
) -> Optional[str]:
"""Extract the first level domain.
Extract the top level domain based on the mozilla's effective TLD names
dat file. Returns a string. May throw ``TldBadUrl`` or
``TldDomainNotFound`` exceptions if there's bad URL provided or no TLD
match found respectively.
:param url: URL to get top level domain from.
:param fail_silently: If set to True, no exceptions are raised and None
is returned on failure.
:param fix_protocol: If set to True, missing or wrong protocol is
ignored (https is appended instead).
:param search_public: If set to True, search in public domains.
:param search_private: If set to True, search in private domains.
:param parser_class:
:type url: str
:type fail_silently: bool
:type fix_protocol: bool
:type search_public: bool
:type search_private: bool
:return: String with top level domain (if ``as_object`` argument
is set to False) or a ``tld.utils.Result`` object (if ``as_object``
argument is set to True); returns None on failure.
:rtype: str
"""
if 'as_object' in kwargs:
raise TldImproperlyConfigured(
"`as_object` argument is deprecated for `get_fld`. Use `get_tld` "
"instead."
)
domain_parts, non_zero_i, parsed_url = process_url(
url=url,
fail_silently=fail_silently,
fix_protocol=fix_protocol,
search_public=search_public,
search_private=search_private,
parser_class=parser_class
)
if domain_parts is None:
return None
# This should be None when domain_parts is None
# but mypy isn't quite smart enough to figure that out yet
assert non_zero_i is not None
if non_zero_i < 0:
# hostname = tld
return parsed_url.hostname
return ".".join(domain_parts[non_zero_i-1:])
def get_tld(
url: str,
fail_silently: bool = False,
as_object: bool = False,
fix_protocol: bool = False,
search_public: bool = True,
search_private: bool = True,
parser_class: Type[BaseTLDSourceParser] = MozillaTLDSourceParser
) -> Optional[Union[str, Result]]:
"""Extract the top level domain.
Extract the top level domain based on the mozilla's effective TLD names
dat file. Returns a string. May throw ``TldBadUrl`` or
``TldDomainNotFound`` exceptions if there's bad URL provided or no TLD
match found respectively.
:param url: URL to get top level domain from.
:param fail_silently: If set to True, no exceptions are raised and None
is returned on failure.
:param as_object: If set to True, ``tld.utils.Result`` object is returned,
``domain``, ``suffix`` and ``tld`` properties.
:param fix_protocol: If set to True, missing or wrong protocol is
ignored (https is appended instead).
:param search_public: If set to True, search in public domains.
:param search_private: If set to True, search in private domains.
:param parser_class:
:type url: str
:type fail_silently: bool
:type as_object: bool
:type fix_protocol: bool
:type search_public: bool
:type search_private: bool
:return: String with top level domain (if ``as_object`` argument
is set to False) or a ``tld.utils.Result`` object (if ``as_object``
argument is set to True); returns None on failure.
:rtype: str
"""
domain_parts, non_zero_i, parsed_url = process_url(
url=url,
fail_silently=fail_silently,
fix_protocol=fix_protocol,
search_public=search_public,
search_private=search_private,
parser_class=parser_class
)
if domain_parts is None:
return None
# This should be None when domain_parts is None
# but mypy isn't quite smart enough to figure that out yet
assert non_zero_i is not None
if not as_object:
if non_zero_i < 0:
# hostname = tld
return parsed_url.hostname
return ".".join(domain_parts[non_zero_i:])
if non_zero_i < 0:
# hostname = tld
subdomain = ""
domain = ""
# This is checked in process_url but the type is ambiguous (Optional[str])
# so this assertion is just to satisfy mypy
assert parsed_url.hostname is not None, "No hostname in URL"
_tld = parsed_url.hostname
else:
subdomain = ".".join(domain_parts[:non_zero_i-1])
domain = ".".join(
domain_parts[non_zero_i-1:non_zero_i]
)
_tld = ".".join(domain_parts[non_zero_i:])
return Result(
subdomain=subdomain,
domain=domain,
tld=_tld,
parsed_url=parsed_url
)
def parse_tld(
url: str,
fail_silently: bool = False,
fix_protocol: bool = False,
search_public: bool = True,
search_private: bool = True,
parser_class: Type[BaseTLDSourceParser] = MozillaTLDSourceParser
) -> Union[Tuple[None, None, None], Tuple[str, str, str]]:
"""Parse TLD into parts.
:param url:
:param fail_silently:
:param fix_protocol:
:param search_public:
:param search_private:
:param parser_class:
:return: Tuple (tld, domain, subdomain)
:rtype: tuple
"""
try:
obj = get_tld(
url,
fail_silently=fail_silently,
as_object=True,
fix_protocol=fix_protocol,
search_public=search_public,
search_private=search_private,
parser_class=parser_class
)
if obj is None:
return None, None, None
return obj.tld, obj.domain, obj.subdomain # type: ignore
except (
TldBadUrl,
TldDomainNotFound,
TldImproperlyConfigured,
TldIOError
):
pass
return None, None, None
def is_tld(
value: str,
search_public: bool = True,
search_private: bool = True,
parser_class: Type[BaseTLDSourceParser] = MozillaTLDSourceParser
) -> bool:
"""Check if given URL is tld.
:param value: URL to get top level domain from.
:param search_public: If set to True, search in public domains.
:param search_private: If set to True, search in private domains.
:param parser_class:
:type value: str
:type search_public: bool
:type search_private: bool
:return:
:rtype: bool
"""
_tld = get_tld(
url=value,
fail_silently=True,
fix_protocol=True,
search_public=search_public,
search_private=search_private,
parser_class=parser_class
)
return value == _tld
def reset_tld_names(tld_names_local_path: str = None) -> None:
"""Reset the ``tld_names`` to empty value.
If ``tld_names_local_path`` is given, removes specified
entry from ``tld_names`` instead.
:param tld_names_local_path:
:type tld_names_local_path: str
:return:
"""
if tld_names_local_path:
pop_tld_names_container(tld_names_local_path)
else:
global tld_names
tld_names = {}

@ -8,6 +8,7 @@ bottle-fdsend=0.1.1
chardet=3.0.4
dogpile.cache=0.6.5
enzyme=0.4.1
ffsubsync=2020-08-04
Flask=1.1.1
gevent-websocker=0.10.1
gitpython=2.1.9

Binary file not shown.

Before

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 300 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 504 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 656 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 300 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 278 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 155 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 374 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 347 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 358 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 128 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 129 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 256 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 237 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 437 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 855 KiB

@ -111,18 +111,20 @@ a.link:hover, a.link:focus {
color: #e1e2e3;
font-size: 14px; }
.sidebar-nav ul li a.active, .sidebar-nav ul li a:hover {
color: #911f93;
cursor: pointer; }
color: #ffffff;
cursor: pointer;
background: #911f93; }
.sidebar-nav ul li a.active {
font-weight: 400;
color: #911f93; }
color: #ffffff;
background: #252833; }
.sidebar-nav ul li ul {
padding-left: 60px;
padding-left: 0px;
padding-top: 0;
transition-delay: inherit;
border-left: 3px solid #911f93; }
.sidebar-nav ul li ul li a {
padding: 15px 35px 10px 15px; }
padding-left: 60px; }
.sidebar-nav ul li ul ul {
padding-left: 35px; }
.sidebar-nav ul li.nav-small-cap {
@ -134,17 +136,17 @@ a.link:hover, a.link:focus {
.sidebar-nav > ul > li {
margin-bottom: 0px; }
.sidebar-nav > ul > li.active > a {
color: #911f93;
color: #ffffff;
font-weight: 400;
border-left: 3px solid #911f93;
background: #252833;
padding: 8px 35px 8px 37px;
box-shadow: 0 0 0 rgba(0, 0, 0, 0.2); }
.sidebar-nav > ul > li.active > a i {
color: #911f93;
color: #ffffff;
font-size: 14px; }
.sidebar-nav > ul > li > a.active i, .sidebar-nav > ul > li > a:hover i {
color: #911f93; }
color: #ffffff; }
.sidebar-nav > ul > li > a i {
width: 27px;
height: 27px;
@ -157,8 +159,8 @@ a.link:hover, a.link:focus {
margin-top: 6px; }
.sidebar-nav > ul > li > a.active {
font-weight: 400;
background: #242933;
color: #911f93; }
background: #252833;
color: #ffffff; }
@media (min-width: 768px) {
.mini-sidebar .sidebar-nav #sidebarnav li {
@ -170,7 +172,7 @@ a.link:hover, a.link:focus {
top: 45px;
width: 200px;
z-index: 1001;
background: #f2f6f8;
background: #3a3f51;
display: none;
padding-left: 1px; }
@ -229,7 +231,9 @@ a.link:hover, a.link:focus {
.mini-sidebar .sidebar-nav #sidebarnav > li.active > a {
border-color: transparent; }
.mini-sidebar .sidebar-nav #sidebarnav > li.active > a.active {
border-color: #911f93; } }
border-color: #911f93; }
.mini-sidebar .sidebar-nav ul li ul li a {
padding-left: 15px; } }
/*******************
/*Buttons
*******************/

File diff suppressed because one or more lines are too long

@ -157,24 +157,27 @@ a.link {
&.active,
&:hover {
color: $topbar;
color: $white;
cursor: pointer;
background: $topbar;
}
&.active {
font-weight: 400;
color: $topbar;
color: $white;
background: $themecolor-alt;
}
}
ul {
padding-left: 60px;
padding-left: 0px;
padding-top: 0;
transition-delay: inherit;
border-left: 3px solid $topbar;
li a {
padding: 15px 35px 10px 15px;
//padding: 15px 35px 10px 15px;
padding-left: 60px;
}
ul {
@ -197,7 +200,7 @@ a.link {
margin-bottom: 0px;
&.active > a {
color: $topbar;
color: $white;
font-weight: 400;
border-left: 3px solid $topbar;
background: $themecolor-alt;
@ -205,7 +208,7 @@ a.link {
box-shadow: 0 0 0 rgba(0, 0, 0, 0.2);
i {
color: $topbar;
color: $white;
font-size: 14px;
}
}
@ -217,7 +220,7 @@ a.link {
&:hover {
i {
color: $topbar;
color: $white;
}
}
@ -237,8 +240,8 @@ a.link {
&.active {
font-weight: 400;
background: #242933;
color: $topbar;
background: $themecolor-alt;
color: $white;
}
}
}
@ -253,7 +256,7 @@ a.link {
top: 45px;
width: 200px;
z-index: 1001;
background: $sidebar-alt;
background: $sidebar;
display: none;
padding-left: 1px;
}
@ -342,6 +345,9 @@ a.link {
border-color: $topbar;
}
}
.sidebar-nav ul li ul li a {
padding-left: 15px;
}
}
}

@ -93,6 +93,7 @@
#add_path_sonarr, #add_path_radarr, #add_path_bazarr {
margin-bottom: 0.5em;
}
</style>
{% endblock head %}
@ -243,16 +244,23 @@
</ul>
</li>
<li><a href="#"><i class="fas fa-exclamation-triangle"></i><span
class="hide-menu"> Wanted</span></a>
<li><a href="#"><i class="fas fa-exclamation-triangle"></i>
<span class="hide-menu"> Wanted </span>
{% if settings.general.getboolean('use_sonarr') %}
<span class="wanted_series_badge"></span>
{% endif %}
{% if settings.general.getboolean('use_radarr') %}
<span class="wanted_movies_badge"></span>
{% endif %}
</a>
<ul aria-expanded="false" class="collapse">
{% if settings.general.getboolean('use_sonarr') %}
<li>
<a href="{{ url_for('wantedseries') }}"> Series <span id="wanted_series_badge"></span></a>
<a href="{{ url_for('wantedseries') }}"> Series <span class="wanted_series_badge"></span></a>
</li>
{% endif %}
{% if settings.general.getboolean('use_radarr') %}
<li><a href="{{ url_for('wantedmovies') }}"> Movies <span id="wanted_movies_badge"></span></a></li>
<li><a href="{{ url_for('wantedmovies') }}"> Movies <span class="wanted_movies_badge"></span></a></li>
{% endif %}
</ul>
</li>
@ -272,11 +280,11 @@
</li>
<li><a href="#"><i
class="fas fa-laptop"></i><span class="hide-menu"> System</span></a>
class="fas fa-laptop"></i><span class="hide-menu"> System </span><span class="throttled_providers_count"></span></a>
<ul aria-expanded="false" class="collapse">
<li><a href="{{ url_for('systemtasks') }}"> Tasks</a></li>
<li><a href="{{ url_for('systemlogs') }}"> Logs</a></li>
<li><a href="{{ url_for('systemproviders') }}"> Providers <span id="throttled_providers_count"></span></a></li>
<li><a href="{{ url_for('systemproviders') }}"> Providers <span class="throttled_providers_count"></span></a></li>
<li><a href="{{ url_for('systemstatus') }}"> Status</a></li>
<li><a href="{{ url_for('systemreleases') }}"> Releases</a></li>
</ul>
@ -366,7 +374,9 @@
<script>
{% if not request.endpoint == 'login_page' %}
$(document).ready(function () {
BadgesAjax();
BadgesAjax('series');
BadgesAjax('movies');
BadgesAjax('providers');
$(window).on( 'ready orientationchange resize', function() {
adjustResponsiveUI();
@ -404,30 +414,43 @@
events.on('event', function (event) {
var event_json_badges = JSON.parse(event);
if (event_json_badges.type === 'badges') {
BadgesAjax();
if (event_json_badges.type === 'badges_series') {
BadgesAjax('series');
} else if (event_json_badges.type === 'badges_movies') {
BadgesAjax('movies');
} else if (event_json_badges.type === 'badges_providers') {
BadgesAjax('providers');
}
});
function BadgesAjax() {
function BadgesAjax(type) {
if (type=='series') {
var url = "{{url_for('api.badgesseries')}}";
} else if (type=='movies') {
var url = "{{url_for('api.badgesmovies')}}";
} else if (type=='providers') {
var url = "{{url_for('api.badgesproviders')}}";
} else {
return;
}
$.ajax({
url: "{{url_for('api.badges')}}",
url: url,
async: true,
success: function (data) {
if (data['throttled_providers']) {
$('#throttled_providers_count').html('<span class="badge badge-secondary">' + data['throttled_providers'] + '</span>');
} else {
$('#throttled_providers_count').html('');
if (data['throttled_providers'] == 0) {
$('.throttled_providers_count').html('');
} else if (data['throttled_providers'] > 0) {
$('.throttled_providers_count').html('<span class="badge badge-secondary">' + data['throttled_providers'] + '</span>');
}
if (data['missing_episodes']) {
$('#wanted_series_badge').html('<span class="badge badge-secondary">' + data['missing_episodes'] + '</span>');
} else {
$('#wanted_series_badge').html('');
if (data['missing_episodes'] == 0) {
$('.wanted_series_badge').html('');
} else if (data['missing_episodes'] > 0) {
$('.wanted_series_badge').html('<span class="badge badge-secondary">' + data['missing_episodes'] + '</span>');
}
if (data['missing_movies']) {
$('#wanted_movies_badge').html('<span class="badge badge-secondary">' + data['missing_movies'] + '</span>');
} else {
$('#wanted_movies_badge').html('');
if (data['missing_movies'] == 0) {
$('.wanted_movies_badge').html('');
} else if (data['missing_movies'] > 0) {
$('.wanted_movies_badge').html('<span class="badge badge-secondary">' + data['missing_movies'] + '</span>');
}
},

@ -40,6 +40,10 @@
white-space: nowrap;
vertical-align: middle;
}
.dtrg-start {
cursor: pointer;
}
</style>
{% endblock page_head %}
@ -206,7 +210,7 @@
<input type="hidden" id="upload_audioLanguage" name="audioLanguage" value=""/>
</div>
<div class="modal-footer">
<button type="submit" id="upload_save_button" class="btn btn-info">Save</button>
<span id="upload_save_button_span"><button type="submit" id="upload_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -271,7 +275,7 @@
<input type="hidden" id="edit_sonarrSeriesId" name="sonarrSeriesId" value=""/>
</div>
<div class="modal-footer">
<button type="submit" id="edit_save_button" class="btn btn-info">Save</button>
<button type="submit" id="edit_save_button" class="btn btn-info"><span id="edit_save_button_span">Save</span></button>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -327,7 +331,7 @@
<tr>
<th style="text-align: left;">Language:</th>
<th style="text-align: left;">Filename:</th>
<th style="text-align: left;">Sync:</th>
<th style="text-align: left;">Tools:</th>
</tr>
</thead>
</table>
@ -339,6 +343,154 @@
</div>
</div>
</div>
<div id="episodeSubtitleModColorModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Chose Color</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_color_form" id="subtitles_mod_color_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>Color Name</b>
</div>
<div class="form-group col">
<select class="form-control selectpicker" id="subzero_color_name">
<option value="white">White</option>
<option value="light-grey">Light Grey</option>
<option value="red">Red</option>
<option value="green">Green</option>
<option value="yellow">Yellow</option>
<option value="blue">Blue</option>
<option value="magenta">Magenta</option>
<option value="cyan">Cyan</option>
<option value="black">Black</option>
<option value="dark-red">Dark Red</option>
<option value="dark-green">Dark Green</option>
<option value="dark-yellow">Dark Yellow</option>
<option value="dark-blue">Dark Blue</option>
<option value="dark-magenta">Dark Magenta</option>
<option value="dark-cyan">Dark Cyan</option>
<option value="dark-grey">Dark Grey</option>
</select>
</div>
<input type="hidden" id="subzero_color_data_language" value="" />
<input type="hidden" id="subzero_color_data_path" value="" />
</div>
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_color_save_button_span"><button type="submit" id="subtitles_mod_color_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
<div id="episodeSubtitleModFpsModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Convert frame rate of subtitle</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_fps_form" id="subtitles_mod_fps_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>From frame rate</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" list="default_frame_rates" id="subzero_fps_from" minlength="2" maxlength="6" required autocomplete>
<datalist id="default_frame_rates">
<option value="23.976">23.976</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="29.97">29.97</option>
<option value="30">30</option>
</datalist>
</div>
</div>
<div class="row">
<div class="col text-right">
<b>To frame rate</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" list="default_frame_rates" id="subzero_fps_to" minlength="2" maxlength="6" required autocomplete>
</div>
</div>
<input type="hidden" id="subzero_fps_data_language" value="" />
<input type="hidden" id="subzero_fps_data_path" value="" />
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_fps_save_button_span"><button type="submit" id="subtitles_mod_fps_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
<div id="episodeSubtitleModOffsetModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Adjust All Times</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_offset_form" id="subtitles_mod_offset_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>Hour:min:sec:ms</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" id="subzero_offset_time" minlength="12" maxlength="12" required value="00:00:00.100">
</div>
</div>
<div class="row justify-content-center">
<div class="form-group col-sm-7">
<label class="custom-control custom-radio">
<input type="radio" class="custom-control-input" id="subzero_offset_show_earlier" name="subzero_offset_dir" checked value="0">
<span class="custom-control-label" for="subzero_offset_show_earlier">Show earlier</span>
</label>
</div>
</div>
<div class="row justify-content-center">
<div class="form-group col-sm-7">
<label class="custom-control custom-radio">
<input type="radio" class="custom-control-input" id="subzero_offset_show_later" name="subzero_offset_dir" value="1">
<span class="custom-control-label" for="subzero_offset_show_later">Show later</span>
</label>
</div>
</div>
<input type="hidden" id="subzero_offset_data_language" value="" />
<input type="hidden" id="subzero_offset_data_path" value="" />
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_offset_save_button_span"><button type="submit" id="subtitles_mod_offset_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
{% endblock body %}
{% block tail %}
@ -391,8 +543,15 @@
r.style.display = collapsed ? 'none' : '';
});
if (collapsed) {
var chevron_icon = '<i class="fas fa-chevron-circle-right"></i>';
} else {
var chevron_icon = '<i class="fas fa-chevron-circle-down"></i>';
}
return $('<tr/>')
.append('<td colspan=' + rows.columns()[0].length + '>Season ' + group + '</td>')
.append('<td colspan=' + rows.columns()[0].length + '>Season ' + group + ' ' + chevron_icon + '</td>')
.attr('data-name', group)
.toggleClass('collapsed', collapsed);
}
@ -818,15 +977,21 @@
processData: false,
contentType: false,
type: 'POST',
beforeSend: function () {
$('#edit_save_button_span').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
success: function () {
seriesDetailsRefresh();
$('#editModal').modal('hide');
$('#edit_save_button_span').html('Save');
}
});
});
$('#uploadModal').on('hidden.bs.modal', function () {
$(this).find('form')[0].reset();
$(this).find('form').trigger('reset');
$('.custom-file-label').text('Choose file')
$('#upload_save_button_span').html('<button type="submit" id="upload_save_button" class="btn btn-info">Save</button>');
});
events.on('event', function (event) {
@ -1008,7 +1173,20 @@
{
data: null,
"render": function (data) {
return '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '"><i class="far fa-play-circle"></i></a>';
var tools = '';
tools += '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '" data-toggle="tooltip" data-placement="right" title="Sync"><i class="far fa-play-circle"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "remove_HI" data-toggle="tooltip" data-placement="right" title="Remove HI-tags"><i class="fa fa-deaf"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "remove_tags" data-toggle="tooltip" data-placement="right" title="Remove style tags"><i class="fa fa-code"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "OCR_fixes" data-toggle="tooltip" data-placement="right" title="OCR Fixes"><i class="fa fa-image"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "common" data-toggle="tooltip" data-placement="right" title="Common Fixes"><i class="fas fa-magic"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "fix_uppercase" data-toggle="tooltip" data-placement="right" title="Fix Uppercase"><i class="fa fa-text-height"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "reverse_rtl" data-toggle="tooltip" data-placement="right" title="Reverse RTL"><i class="fa fa-exchange-alt"></i></a> ';
tools += '<a href="" class="subtitles_mod_color badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Adds color to your subtitles"><i class="fa fa-paint-brush"></i></a> ';
tools += '<a href="" class="subtitles_mod_fps badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Change Frame Rate"><i class="fa fa-film"></i></a> ';
tools += '<a href="" class="subtitles_mod_offset badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Adjust all times (show earlier/later)"><i class="fa fa-clock"></i></a> ';
return tools;
}
}
]
@ -1037,6 +1215,7 @@
dataType: "json",
data: values,
beforeSend: function () {
$('.subtitles_sync').tooltip('hide')
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
@ -1044,6 +1223,163 @@
}
});
});
$('#episode_tools_result').on('click', '.subtitles_mod', function (e) {
e.preventDefault();
const values = {
language: $(this).attr("data-language"),
subtitlesPath: $(this).attr("data-path"),
mod: $(this).attr("data-mod"),
};
var cell = $(this).parent();
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('.subtitles_mod').tooltip('hide')
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#episodeToolsModal').modal('hide');
}
});
});
$('#episode_tools_result').on('click', '.subtitles_mod_color', function (e) {
e.preventDefault();
$('#subzero_color_data_language').val($(this).attr("data-language"))
$('#subzero_color_data_path').val($(this).attr("data-path"))
$('#episodeToolsModal').modal('hide');
$('#episodeSubtitleModColorModal')
.modal({
focus: false
});
});
$('#subtitles_mod_color_form').on('submit', function (e) {
e.preventDefault();
const values = {
language: $('#subzero_color_data_language').val(),
subtitlesPath: $('#subzero_color_data_path').val(),
mod: 'color(name=' + $('#subzero_color_name').val() + ')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_color_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#episodeSubtitleModColorModal').modal('hide');
}
});
});
$('#episodeSubtitleModColorModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_color_save_button_span').html('<button type="submit" id="subtitles_mod_color_save_button" class="btn btn-info">Save</button>');
});
$('#episode_tools_result').on('click', '.subtitles_mod_fps', function (e) {
e.preventDefault();
$('#subzero_fps_data_language').val($(this).attr("data-language"))
$('#subzero_fps_data_path').val($(this).attr("data-path"))
$('#episodeToolsModal').modal('hide');
$('#episodeSubtitleModFpsModal')
.modal({
focus: false
});
});
$('#subtitles_mod_fps_form').on('submit', function (e) {
e.preventDefault();
const values = {
language: $('#subzero_fps_data_language').val(),
subtitlesPath: $('#subzero_fps_data_path').val(),
mod: 'change_FPS(from=' + $('#subzero_fps_from').val() + ',to=' + $('#subzero_fps_to').val() + ')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_fps_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#episodeSubtitleModFpsModal').modal('hide');
}
});
});
$('#episodeSubtitleModFpsModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_fps_save_button_span').html('<button type="submit" id="subtitles_mod_fps_save_button" class="btn btn-info">Save</button>');
});
$('#episode_tools_result').on('click', '.subtitles_mod_offset', function (e) {
e.preventDefault();
$('#subzero_offset_data_language').val($(this).attr("data-language"))
$('#subzero_offset_data_path').val($(this).attr("data-path"))
$('#episodeToolsModal').modal('hide');
$('#episodeSubtitleModOffsetModal')
.modal({
focus: false
});
});
$('#subtitles_mod_offset_form').on('submit', function (e) {
e.preventDefault();
let times = $('#subzero_offset_time').val().match(/(\d\d):(\d\d):(\d\d)[\.,:](\d\d\d)/);
if (times == null || times.length != 5) return false;
let sign = '';
if ($('input[name="subzero_offset_dir"]:checked').val() == "0") {
sign = '-';
}
const values = {
language: $('#subzero_offset_data_language').val(),
subtitlesPath: $('#subzero_offset_data_path').val(),
mod: 'shift_offset(h='+sign+times[1]+',m='+sign+times[2]+',s='+sign+times[3]+',ms='+sign+times[4]+')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_offset_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#episodeSubtitleModOffsetModal').modal('hide');
}
});
});
$('#episodeSubtitleModOffsetModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_offset_save_button_span').html('<button type="submit" id="subtitles_mod_offset_save_button" class="btn btn-info">Save</button>');
});
});
function seriesDetailsRefresh() {

@ -214,7 +214,7 @@
<input type="hidden" id="upload_audioLanguage" name="audioLanguage" value="" />
</div>
<div class="modal-footer">
<button type="submit" id="upload_save_button" class="btn btn-info">Save</button>
<span id="upload_save_button_span"><button type="submit" id="upload_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -278,7 +278,7 @@
<input type="hidden" id="edit_radarrId" name="radarrId" value="" />
</div>
<div class="modal-footer">
<button type="submit" id="edit_save_button" class="btn btn-info">Save</button>
<button type="submit" id="edit_save_button" class="btn btn-info"><span id="edit_save_button_span">Save</span></button>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -334,7 +334,7 @@
<tr>
<th style="text-align: left;">Language:</th>
<th style="text-align: left;">Filename:</th>
<th style="text-align: left;">Sync:</th>
<th style="text-align: left;">Tools:</th>
</tr>
</thead>
</table>
@ -346,6 +346,154 @@
</div>
</div>
</div>
<div id="movieSubtitleModColorModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Chose Color</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_color_form" id="subtitles_mod_color_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>Color Name</b>
</div>
<div class="form-group col">
<select class="form-control selectpicker" id="subzero_color_name">
<option value="white">White</option>
<option value="light-grey">Light Grey</option>
<option value="red">Red</option>
<option value="green">Green</option>
<option value="yellow">Yellow</option>
<option value="blue">Blue</option>
<option value="magenta">Magenta</option>
<option value="cyan">Cyan</option>
<option value="black">Black</option>
<option value="dark-red">Dark Red</option>
<option value="dark-green">Dark Green</option>
<option value="dark-yellow">Dark Yellow</option>
<option value="dark-blue">Dark Blue</option>
<option value="dark-magenta">Dark Magenta</option>
<option value="dark-cyan">Dark Cyan</option>
<option value="dark-grey">Dark Grey</option>
</select>
</div>
<input type="hidden" id="subzero_color_data_language" value="" />
<input type="hidden" id="subzero_color_data_path" value="" />
</div>
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_color_save_button_span"><button type="submit" id="subtitles_mod_color_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
<div id="movieSubtitleModFpsModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Convert frame rate of subtitle</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_fps_form" id="subtitles_mod_fps_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>From frame rate</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" list="default_frame_rates" id="subzero_fps_from" minlength="2" maxlength="6" required autocomplete>
<datalist id="default_frame_rates">
<option value="23.976">23.976</option>
<option value="24">24</option>
<option value="25">25</option>
<option value="29.97">29.97</option>
<option value="30">30</option>
</datalist>
</div>
</div>
<div class="row">
<div class="col text-right">
<b>To frame rate</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" list="default_frame_rates" id="subzero_fps_to" minlength="2" maxlength="6" required autocomplete>
</div>
</div>
<input type="hidden" id="subzero_fps_data_language" value="" />
<input type="hidden" id="subzero_fps_data_path" value="" />
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_fps_save_button_span"><button type="submit" id="subtitles_mod_fps_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
<div id="movieSubtitleModOffsetModal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Adjust All Times</h5><br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<form class="form" name="subtitles_mod_offset_form" id="subtitles_mod_offset_form">
<div class="modal-body">
<div class="container-fluid">
<div class="row">
<div class="col text-right">
<b>Hour:min:sec:ms</b>
</div>
<div class="form-group col">
<input type="text" class="form-control" id="subzero_offset_time" minlength="12" maxlength="12" required value="00:00:00.100">
</div>
</div>
<div class="row justify-content-center">
<div class="form-group col-sm-7">
<label class="custom-control custom-radio">
<input type="radio" class="custom-control-input" id="subzero_offset_show_earlier" name="subzero_offset_dir" checked value="0">
<span class="custom-control-label" for="subzero_offset_show_earlier">Show earlier</span>
</label>
</div>
</div>
<div class="row justify-content-center">
<div class="form-group col-sm-7">
<label class="custom-control custom-radio">
<input type="radio" class="custom-control-input" id="subzero_offset_show_later" name="subzero_offset_dir" value="1">
<span class="custom-control-label" for="subzero_offset_show_later">Show later</span>
</label>
</div>
</div>
<input type="hidden" id="subzero_offset_data_language" value="" />
<input type="hidden" id="subzero_offset_data_path" value="" />
</div>
</div>
<div class="modal-footer">
<span id="subtitles_mod_offset_save_button_span"><button type="submit" id="subtitles_mod_offset_save_button" class="btn btn-info">Save</button></span>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</form>
</div>
</div>
</div>
{% endblock body %}
{% block tail %}
@ -678,15 +826,21 @@
processData: false,
contentType: false,
type: 'POST',
beforeSend: function () {
$('#edit_save_button_span').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
success: function(){
movieDetailsRefresh();
$('#editModal').modal('hide');
$('#edit_save_button_span').html('Save');
}
});
});
$('#uploadModal').on('hidden.bs.modal', function () {
$(this).find('form')[0].reset();
$(this).find('form').trigger('reset');
$('.custom-file-label').text('Choose file')
$('#upload_save_button_span').html('<button type="submit" id="upload_save_button" class="btn btn-info">Save</button>');
});
events.on('event', function(event) {
@ -827,7 +981,20 @@
{
data: null,
"render": function (data) {
return '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '"><i class="far fa-play-circle"></i></a>';
var tools = '';
tools += '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '" data-toggle="tooltip" data-placement="right" title="Sync"><i class="far fa-play-circle"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "remove_HI" data-toggle="tooltip" data-placement="right" title="Remove HI-tags"><i class="fa fa-deaf"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "remove_tags" data-toggle="tooltip" data-placement="right" title="Remove style tags"><i class="fa fa-code"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "OCR_fixes" data-toggle="tooltip" data-placement="right" title="OCR Fixes"><i class="fa fa-image"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "common" data-toggle="tooltip" data-placement="right" title="Common Fixes"><i class="fas fa-magic"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "fix_uppercase" data-toggle="tooltip" data-placement="right" title="Fix Uppercase"><i class="fa fa-text-height"></i></a> ';
tools += '<a href="" class="subtitles_mod badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-mod = "reverse_rtl" data-toggle="tooltip" data-placement="right" title="Reverse RTL"><i class="fa fa-exchange-alt"></i></a> ';
tools += '<a href="" class="subtitles_mod_color badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Adds color to your subtitles"><i class="fa fa-paint-brush"></i></a> ';
tools += '<a href="" class="subtitles_mod_fps badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Change Frame Rate"><i class="fa fa-film"></i></a> ';
tools += '<a href="" class="subtitles_mod_offset badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-toggle="tooltip" data-placement="right" title = "Adjust all times (show earlier/later)"><i class="fa fa-clock"></i></a> ';
return tools;
}
}
]
@ -855,6 +1022,7 @@
dataType: "json",
data: values,
beforeSend: function () {
$('.subtitles_sync').tooltip('hide')
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function (data) {
@ -862,6 +1030,163 @@
}
});
});
$('#movie_tools_result').on('click', '.subtitles_mod', function (e) {
e.preventDefault();
const values = {
language: $(this).attr("data-language"),
subtitlesPath: $(this).attr("data-path"),
mod: $(this).attr("data-mod"),
};
var cell = $(this).parent();
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('.subtitles_mod').tooltip('hide')
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#movieToolsModal').modal('hide');
}
});
});
$('#movie_tools_result').on('click', '.subtitles_mod_color', function (e) {
e.preventDefault();
$('#subzero_color_data_language').val($(this).attr("data-language"))
$('#subzero_color_data_path').val($(this).attr("data-path"))
$('#movieToolsModal').modal('hide');
$('#movieSubtitleModColorModal')
.modal({
focus: false
});
});
$('#subtitles_mod_color_form').on('submit', function (e) {
e.preventDefault();
const values = {
language: $('#subzero_color_data_language').val(),
subtitlesPath: $('#subzero_color_data_path').val(),
mod: 'color(name=' + $('#subzero_color_name').val() + ')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_color_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#movieSubtitleModColorModal').modal('hide');
}
});
});
$('#movieSubtitleModColorModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_color_save_button_span').html('<button type="submit" id="subtitles_mod_color_save_button" class="btn btn-info">Save</button>');
});
$('#movie_tools_result').on('click', '.subtitles_mod_fps', function (e) {
e.preventDefault();
$('#subzero_fps_data_language').val($(this).attr("data-language"))
$('#subzero_fps_data_path').val($(this).attr("data-path"))
$('#movieToolsModal').modal('hide');
$('#movieSubtitleModFpsModal')
.modal({
focus: false
});
});
$('#subtitles_mod_fps_form').on('submit', function (e) {
e.preventDefault();
const values = {
language: $('#subzero_fps_data_language').val(),
subtitlesPath: $('#subzero_fps_data_path').val(),
mod: 'change_FPS(from=' + $('#subzero_fps_from').val() + ',to=' + $('#subzero_fps_to').val() + ')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_fps_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#movieSubtitleModFpsModal').modal('hide');
}
});
});
$('#movieSubtitleModFpsModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_fps_save_button_span').html('<button type="submit" id="subtitles_mod_fps_save_button" class="btn btn-info">Save</button>');
});
$('#movie_tools_result').on('click', '.subtitles_mod_offset', function (e) {
e.preventDefault();
$('#subzero_offset_data_language').val($(this).attr("data-language"))
$('#subzero_offset_data_path').val($(this).attr("data-path"))
$('#movieToolsModal').modal('hide');
$('#movieSubtitleModOffsetModal')
.modal({
focus: false
});
});
$('#subtitles_mod_offset_form').on('submit', function (e) {
e.preventDefault();
let times = $('#subzero_offset_time').val().match(/(\d\d):(\d\d):(\d\d)[\.,:](\d\d\d)/);
if (times == null || times.length != 5) return false;
let sign = '';
if ($('input[name="subzero_offset_dir"]:checked').val() == "0") {
sign = '-';
}
const values = {
language: $('#subzero_offset_data_language').val(),
subtitlesPath: $('#subzero_offset_data_path').val(),
mod: 'shift_offset(h='+sign+times[1]+',m='+sign+times[2]+',s='+sign+times[3]+',ms='+sign+times[4]+')',
};
$.ajax({
url: "{{ url_for('api.submods') }}",
type: "POST",
dataType: "json",
data: values,
beforeSend: function () {
$('#subtitles_mod_offset_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
complete: function () {
$('#movieSubtitleModOffsetModal').modal('hide');
}
});
});
$('#movieSubtitleModOffsetModal').on('hidden.bs.modal', function (e) {
$('#subtitles_mod_offset_save_button_span').html('<button type="submit" id="subtitles_mod_offset_save_button" class="btn btn-info">Save</button>');
});
});
function movieDetailsRefresh() {

@ -27,6 +27,7 @@
<th>Subtitles Languages</th>
<th>Hearing-Impaired</th>
<th>Forced</th>
<th>Missing Subtitles</th>
<th></th>
</tr>
</thead>
@ -89,7 +90,7 @@
<input type="hidden" id="edit_radarrId" name="radarrId" value=""/>
</div>
<div class="modal-footer">
<button type="submit" id="edit_save_button" class="btn btn-info">Save</button>
<button type="submit" id="edit_save_button" class="btn btn-info"><span id="edit_save_button_span">Save</span></button>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -209,6 +210,22 @@
data: "forced",
className: "dt-center"
},
{
data: "missing_subtitles",
render: function (data) {
if (data && data !== 'None') {
var languages = '';
data.forEach(appendFunc);
return languages;
} else {
return null
}
function appendFunc(value) {
languages = languages + '<span class="badge badge-secondary" data-toggle="tooltip" data-placement="right" title="' + value.name + '">' + value.code2 + '</span> ';
}
}
},
{
data: null,
render: function (data) {
@ -261,8 +278,12 @@
processData: false,
contentType: false,
type: 'POST',
beforeSend: function () {
$('#edit_save_button_span').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
success: function () {
$('#editModal').modal('hide');
$('#edit_save_button_span').html('Save');
}
});
});

@ -89,7 +89,7 @@
<input type="hidden" id="edit_sonarrSeriesId" name="sonarrSeriesId" value=""/>
</div>
<div class="modal-footer">
<button type="submit" id="edit_save_button" class="btn btn-info">Save</button>
<button type="submit" id="edit_save_button" class="btn btn-info"><span id="edit_save_button_span">Save</span></button>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
</div>
</form>
@ -268,8 +268,12 @@
processData: false,
contentType: false,
type: 'POST',
beforeSend: function () {
$('#edit_save_button_span').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
},
success: function () {
$('#editModal').modal('hide');
$('#edit_save_button_span').html('Save');
}
});
});

@ -48,6 +48,22 @@
</table>
</div>
</div>
<br>
<h4>Options</h4>
<hr/>
<div class="row">
<div class="col-sm-3 text-right">
<b>Do not notify for manual actions</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input" id="settings-general-dont_notify_manual_actions"
name="settings-general-dont_notify_manual_actions">
<span class="custom-control-label" for="settings-general-dont_notify_manual_actions"></span>
</label>
<label>Suppress notifications when manually download/upload subtitles.</label>
</div>
</div>
</form>
</div>
@ -120,6 +136,9 @@
$('#save_button_checkmark').hide();
$('#save_button').prop('disabled', true).css('cursor', 'not-allowed');
// Set Checkbox input values
$('#settings-general-dont_notify_manual_actions').prop('checked', {{'true' if settings.general.getboolean('dont_notify_manual_actions') else 'false'}});
var table = $('#notification_providers').DataTable({
select: {
style: 'single'
@ -192,6 +211,11 @@
$('#save_button').on('click', function() {
var formdata = new FormData(document.getElementById("settings_form"));
// Make sure all checkbox input are sent with true/false value
$('input[type=checkbox]').each(function () {
formdata.set($(this).prop('id'), $(this).prop('checked'));
});
formdata.append('notification_providers', JSON.stringify(table.rows().data().toArray()));
$.ajax({

@ -272,6 +272,117 @@
<label>Re-encode downloaded Subtitles to UTF8. Should be left enabled in most case.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Hearing Impaired</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="remove_HI">
<span class="custom-control-label"></span>
</label>
<label>Removes tags, text and characters from subtitles that are meant for hearing impaired people.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Remove Tags</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="remove_tags">
<span class="custom-control-label"></span>
</label>
<label>Removes all possible style tags from the subtitle, such as font, bold, color etc.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>OCR Fixes</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="OCR_fixes">
<span class="custom-control-label"></span>
</label>
<label>Fix issues that happen when a subtitle gets converted from bitmap to text through OCR.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Common Fixes</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="common">
<span class="custom-control-label"></span>
</label>
<label>Fix common and whitespace/punctuation issues in subtitles.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Fix Uppercase</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="fix_uppercase">
<span class="custom-control-label"></span>
</label>
<label>Tries to make subtitles that are completely uppercase readable.</label>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Color</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="subzero_color">
<span class="custom-control-label"></span>
</label>
<label>Adds color to your subtitles (for playback devices/software that don't ship their own color modes; only works for players that support color tags).</label>
</div>
</div>
<div id="subzero_color_div">
<div class="row">
<div class="col-sm-4 text-right">
<b>Color Name</b>
</div>
<div class="form-group col-sm-5">
<select class="form-control selectpicker" id="subzero_color_name">
<option value="white">White</option>
<option value="light-grey">Light Grey</option>
<option value="red">Red</option>
<option value="green">Green</option>
<option value="yellow">Yellow</option>
<option value="blue">Blue</option>
<option value="magenta">Magenta</option>
<option value="cyan">Cyan</option>
<option value="black">Black</option>
<option value="dark-red">Dark Red</option>
<option value="dark-green">Dark Green</option>
<option value="dark-yellow">Dark Yellow</option>
<option value="dark-blue">Dark Blue</option>
<option value="dark-magenta">Dark Magenta</option>
<option value="dark-cyan">Dark Cyan</option>
<option value="dark-grey">Dark Grey</option>
</select>
</div>
</div>
</div>
<div class="row">
<div class="col-sm-3 text-right">
<b>Reverse RTL</b>
</div>
<div class="form-group col-sm-8">
<label class="custom-control custom-checkbox">
<input type="checkbox" class="custom-control-input subzero_mods" id="reverse_rtl">
<span class="custom-control-label"></span>
</label>
<label>Reverses the punctuation in right-to-left subtitles for problematic playback devices.</label>
</div>
</div>
{% if not os.startswith('win') %}
<div class="row">
<div class="col-sm-3 text-right">
@ -519,6 +630,14 @@
}
});
$('#subzero_color').on('change', function () {
if ($(this).prop('checked')) {
$('#subzero_color_div').show();
} else {
$('#subzero_color_div').hide();
}
});
$('#settings-general-chmod_enabled').on('change', function () {
if ($(this).prop('checked')) {
$('#chmod_div').show();
@ -566,9 +685,35 @@
$('#settings-general-use_postprocessing_threshold').prop('checked', {{'true' if settings.general.getboolean('use_postprocessing_threshold') else 'false'}}).trigger('change');
$('#settings-general-use_postprocessing_threshold_movie').prop('checked', {{'true' if settings.general.getboolean('use_postprocessing_threshold_movie') else 'false'}}).trigger('change');
$('.subzero_mods').prop('checked', false).trigger('change');
{% if settings.general.subzero_mods %}
$('{{settings.general.subzero_mods}}'.split(',')).each( function(i, item) {
if (item.startsWith('color'))
{
var color_name = 'white';
var m = item.match(/color\(name=(.*)\)/);
if (m != null && m.length > 1) color_name = m[1];
$('#subzero_color_name').val(color_name).trigger('change');
item = 'subzero_color';
}
$("[id=" + item + "]").prop('checked', true).trigger('change');
})
{% endif %}
$('#save_button').on('click', function () {
var formdata = new FormData(document.getElementById("settings_form"));
var enabled_subzero_mods = $(".subzero_mods").map(function () {
if ($(this).prop('checked')) {
if ($(this).attr('id') == 'subzero_color')
{
return 'color(name=' + $('#subzero_color_name').val() + ')';
}
else return $(this).attr('id');
}
}).get().join(',');
formdata.append('settings-general-subzero_mods', enabled_subzero_mods)
// Make sure all checkbox input are sent with true/false value
$('input[type=checkbox]').each(function () {
formdata.set($(this).prop('id'), $(this).prop('checked'));

Loading…
Cancel
Save