Merge branch 'development' of https://github.com/morpheus65535/bazarr into development
commit
d875dc7733
@ -0,0 +1,30 @@
|
||||
# coding=utf-8
|
||||
|
||||
# only primitive types can be specified here
|
||||
# for other derived values, use constants.py
|
||||
|
||||
# bazarr environment variable names
|
||||
ENV_STOPFILE = 'STOPFILE'
|
||||
ENV_RESTARTFILE = 'RESTARTFILE'
|
||||
ENV_BAZARR_ROOT_DIR = 'BAZARR_ROOT'
|
||||
|
||||
# bazarr subdirectories
|
||||
DIR_BACKUP = 'backup'
|
||||
DIR_CACHE = 'cache'
|
||||
DIR_CONFIG = 'config'
|
||||
DIR_DB = 'db'
|
||||
DIR_LOG = 'log'
|
||||
DIR_RESTORE = 'restore'
|
||||
|
||||
# bazarr special files
|
||||
FILE_LOG = 'bazarr.log'
|
||||
FILE_RESTART = 'bazarr.restart'
|
||||
FILE_STOP = 'bazarr.stop'
|
||||
|
||||
# bazarr exit codes
|
||||
EXIT_NORMAL = 0
|
||||
EXIT_INTERRUPT = -100
|
||||
EXIT_VALIDATION_ERROR = -101
|
||||
EXIT_CONFIG_CREATE_ERROR = -102
|
||||
EXIT_PYTHON_UPGRADE_NEEDED = -103
|
||||
EXIT_REQUIREMENTS_ERROR = -104
|
@ -0,0 +1,49 @@
|
||||
# coding=utf-8
|
||||
|
||||
# only methods can be specified here that do not cause other moudules to be loaded
|
||||
# for other methods that use settings, etc., use utilities/helper.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from literals import *
|
||||
|
||||
def get_bazarr_dir(sub_dir):
|
||||
path = os.path.join(os.environ[ENV_BAZARR_ROOT_DIR], sub_dir)
|
||||
return path
|
||||
|
||||
def make_bazarr_dir(sub_dir):
|
||||
path = get_bazarr_dir(sub_dir)
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
|
||||
def get_log_file_path():
|
||||
path = os.path.join(get_bazarr_dir(DIR_LOG), FILE_LOG)
|
||||
return path
|
||||
|
||||
def get_stop_file_path():
|
||||
return os.environ[ENV_STOPFILE]
|
||||
|
||||
def get_restart_file_path():
|
||||
return os.environ[ENV_RESTARTFILE]
|
||||
|
||||
def stop_bazarr(status_code=EXIT_NORMAL, exit_main=True):
|
||||
try:
|
||||
with open(get_stop_file_path(),'w', encoding='UTF-8') as file:
|
||||
# write out status code for final exit
|
||||
file.write(f'{status_code}\n')
|
||||
file.close()
|
||||
except Exception as e:
|
||||
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
|
||||
logging.info('Bazarr is being shutdown...')
|
||||
if exit_main:
|
||||
raise SystemExit(status_code)
|
||||
|
||||
def restart_bazarr():
|
||||
try:
|
||||
Path(get_restart_file_path()).touch()
|
||||
except Exception as e:
|
||||
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
|
||||
logging.info('Bazarr is being restarted...')
|
||||
raise SystemExit(EXIT_NORMAL)
|
||||
|
@ -0,0 +1,19 @@
|
||||
# Bazarr dependencies
|
||||
subliminal_patch
|
||||
subzero
|
||||
py-pretty==1 # modified version to support Python 3
|
||||
|
||||
# Bazarr modified dependencies
|
||||
signalr-client-threads==0.0.12 # Modified to work with Sonarr v3. Not used anymore with v4
|
||||
Flask-Compress==1.14 # modified to import brotli only if required
|
||||
|
||||
# Required-by: signalr-client-threads
|
||||
sseclient==0.0.27 # Modified to work with Sonarr v3
|
||||
|
||||
# Required-by: subliminal_patch
|
||||
deathbycaptcha # unknown version, only found on gist
|
||||
git+https://github.com/pannal/libfilebot#egg=libfilebot
|
||||
git+https://github.com/RobinDavid/pyADS.git@28a2f6dbfb357f85b2c2f49add770b336e88840d#egg=pyads
|
||||
py7zr==0.7.0 # modified to prevent importing of modules that can't be vendored
|
||||
subscene-api==1.0.0 # modified specificaly for Bazarr
|
||||
subliminal==2.1.0 # modified specifically for Bazarr
|
@ -0,0 +1 @@
|
||||
__version__ = "1.14"
|
@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from babelfish import LanguageReverseConverter
|
||||
|
||||
from ..exceptions import ConfigurationError
|
||||
|
||||
|
||||
class LegendasTVConverter(LanguageReverseConverter):
|
||||
def __init__(self):
|
||||
self.from_legendastv = {1: ('por', 'BR'), 2: ('eng',), 3: ('spa',), 4: ('fra',), 5: ('deu',), 6: ('jpn',),
|
||||
7: ('dan',), 8: ('nor',), 9: ('swe',), 10: ('por',), 11: ('ara',), 12: ('ces',),
|
||||
13: ('zho',), 14: ('kor',), 15: ('bul',), 16: ('ita',), 17: ('pol',)}
|
||||
self.to_legendastv = {v: k for k, v in self.from_legendastv.items()}
|
||||
self.codes = set(self.from_legendastv.keys())
|
||||
|
||||
def convert(self, alpha3, country=None, script=None):
|
||||
if (alpha3, country) in self.to_legendastv:
|
||||
return self.to_legendastv[(alpha3, country)]
|
||||
if (alpha3,) in self.to_legendastv:
|
||||
return self.to_legendastv[(alpha3,)]
|
||||
|
||||
raise ConfigurationError('Unsupported language code for legendastv: %s, %s, %s' % (alpha3, country, script))
|
||||
|
||||
def reverse(self, legendastv):
|
||||
if legendastv in self.from_legendastv:
|
||||
return self.from_legendastv[legendastv]
|
||||
|
||||
raise ConfigurationError('Unsupported language number for legendastv: %s' % legendastv)
|
@ -0,0 +1,229 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from rebulk.loose import ensure_list
|
||||
|
||||
from .score import get_equivalent_release_groups, score_keys
|
||||
from .video import Episode, Movie
|
||||
from .utils import sanitize, sanitize_release_group
|
||||
|
||||
|
||||
def series_matches(video, title=None, **kwargs):
|
||||
"""Whether the `video` matches the series title.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str title: the series name.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if isinstance(video, Episode):
|
||||
return video.series and sanitize(title) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series
|
||||
)
|
||||
|
||||
|
||||
def title_matches(video, title=None, episode_title=None, **kwargs):
|
||||
"""Whether the movie matches the movie `title` or the series matches the `episode_title`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str title: the movie title.
|
||||
:param str episode_title: the series episode title.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if isinstance(video, Episode):
|
||||
return video.title and sanitize(episode_title) == sanitize(video.title)
|
||||
if isinstance(video, Movie):
|
||||
return video.title and sanitize(title) == sanitize(video.title)
|
||||
|
||||
|
||||
def season_matches(video, season=None, **kwargs):
|
||||
"""Whether the episode matches the `season`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param int season: the episode season.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if isinstance(video, Episode):
|
||||
return video.season and season == video.season
|
||||
|
||||
|
||||
def episode_matches(video, episode=None, **kwargs):
|
||||
"""Whether the episode matches the `episode`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param episode: the episode season.
|
||||
:type: list of int or int
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if isinstance(video, Episode):
|
||||
return video.episodes and ensure_list(episode) == video.episodes
|
||||
|
||||
|
||||
def year_matches(video, year=None, partial=False, **kwargs):
|
||||
"""Whether the video matches the `year`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param int year: the video year.
|
||||
:param bool partial: whether or not the guess is partial.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if video.year and year == video.year:
|
||||
return True
|
||||
if isinstance(video, Episode):
|
||||
# count "no year" as an information
|
||||
return not partial and video.original_series and not year
|
||||
|
||||
|
||||
def country_matches(video, country=None, partial=False, **kwargs):
|
||||
"""Whether the video matches the `country`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param country: the video country.
|
||||
:type country: :class:`~babelfish.country.Country`
|
||||
:param bool partial: whether or not the guess is partial.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if video.country and country == video.country:
|
||||
return True
|
||||
|
||||
if isinstance(video, Episode):
|
||||
# count "no country" as an information
|
||||
return not partial and video.original_series and not country
|
||||
|
||||
if isinstance(video, Movie):
|
||||
# count "no country" as an information
|
||||
return not video.country and not country
|
||||
|
||||
|
||||
def release_group_matches(video, release_group=None, **kwargs):
|
||||
"""Whether the video matches the `release_group`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str release_group: the video release group.
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return (video.release_group and release_group and
|
||||
any(r in sanitize_release_group(release_group)
|
||||
for r in get_equivalent_release_groups(sanitize_release_group(video.release_group))))
|
||||
|
||||
|
||||
def streaming_service_matches(video, streaming_service=None, **kwargs):
|
||||
"""Whether the video matches the `streaming_service`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str streaming_service: the video streaming service
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return video.streaming_service and streaming_service == video.streaming_service
|
||||
|
||||
|
||||
def resolution_matches(video, screen_size=None, **kwargs):
|
||||
"""Whether the video matches the `resolution`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str screen_size: the video resolution
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return video.resolution and screen_size == video.resolution
|
||||
|
||||
|
||||
def source_matches(video, source=None, **kwargs):
|
||||
"""Whether the video matches the `source`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str source: the video source
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return video.source and source == video.source
|
||||
|
||||
|
||||
def video_codec_matches(video, video_codec=None, **kwargs):
|
||||
"""Whether the video matches the `video_codec`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str video_codec: the video codec
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return video.video_codec and video_codec == video.video_codec
|
||||
|
||||
|
||||
def audio_codec_matches(video, audio_codec=None, **kwargs):
|
||||
"""Whether the video matches the `audio_codec`.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param str audio_codec: the video audio codec
|
||||
:return: whether there's a match
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return video.audio_codec and audio_codec == video.audio_codec
|
||||
|
||||
|
||||
#: Available matches functions
|
||||
matches_manager = {
|
||||
'series': series_matches,
|
||||
'title': title_matches,
|
||||
'season': season_matches,
|
||||
'episode': episode_matches,
|
||||
'year': year_matches,
|
||||
'country': country_matches,
|
||||
'release_group': release_group_matches,
|
||||
'streaming_service': streaming_service_matches,
|
||||
'resolution': resolution_matches,
|
||||
'source': source_matches,
|
||||
'video_codec': video_codec_matches,
|
||||
'audio_codec': audio_codec_matches
|
||||
}
|
||||
|
||||
|
||||
def guess_matches(video, guess, partial=False):
|
||||
"""Get matches between a `video` and a `guess`.
|
||||
|
||||
If a guess is `partial`, the absence information won't be counted as a match.
|
||||
|
||||
:param video: the video.
|
||||
:type video: :class:`~subliminal.video.Video`
|
||||
:param guess: the guess.
|
||||
:type guess: dict
|
||||
:param bool partial: whether or not the guess is partial.
|
||||
:return: matches between the `video` and the `guess`.
|
||||
:rtype: set
|
||||
|
||||
"""
|
||||
matches = set()
|
||||
for key in score_keys:
|
||||
if key in matches_manager and matches_manager[key](video, partial=partial, **guess):
|
||||
matches.add(key)
|
||||
|
||||
return matches
|
@ -0,0 +1,135 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
from zipfile import ZipFile
|
||||
|
||||
from babelfish import Language
|
||||
from guessit import guessit
|
||||
from requests import Session
|
||||
from six.moves import urllib
|
||||
|
||||
from . import Provider
|
||||
from ..cache import EPISODE_EXPIRATION_TIME, region
|
||||
from ..exceptions import ProviderError
|
||||
from ..matches import guess_matches
|
||||
from ..subtitle import Subtitle, fix_line_ending
|
||||
from ..video import Episode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArgenteamSubtitle(Subtitle):
|
||||
provider_name = 'argenteam'
|
||||
|
||||
def __init__(self, language, download_link, series, season, episode, release, version):
|
||||
super(ArgenteamSubtitle, self).__init__(language, download_link)
|
||||
self.download_link = download_link
|
||||
self.series = series
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
self.release = release
|
||||
self.version = version
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.download_link
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
return urllib.parse.unquote(self.download_link.rsplit('/')[-1])
|
||||
|
||||
def get_matches(self, video):
|
||||
matches = guess_matches(video, {
|
||||
'title': self.series,
|
||||
'season': self.season,
|
||||
'episode': self.episode,
|
||||
'release_group': self.version
|
||||
})
|
||||
|
||||
# resolution
|
||||
if video.resolution and self.version and video.resolution in self.version.lower():
|
||||
matches.add('resolution')
|
||||
|
||||
matches |= guess_matches(video, guessit(self.version, {'type': 'episode'}), partial=True)
|
||||
return matches
|
||||
|
||||
|
||||
class ArgenteamProvider(Provider):
|
||||
provider_name = 'argenteam'
|
||||
language = Language.fromalpha2('es')
|
||||
languages = {language}
|
||||
video_types = (Episode,)
|
||||
server_url = "http://argenteam.net/api/v1/"
|
||||
subtitle_class = ArgenteamSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['User-Agent'] = self.user_agent
|
||||
|
||||
def terminate(self):
|
||||
self.session.close()
|
||||
|
||||
@region.cache_on_arguments(expiration_time=EPISODE_EXPIRATION_TIME, should_cache_fn=lambda value: value)
|
||||
def search_episode_id(self, series, season, episode):
|
||||
"""Search the episode id from the `series`, `season` and `episode`.
|
||||
|
||||
:param str series: series of the episode.
|
||||
:param int season: season of the episode.
|
||||
:param int episode: episode number.
|
||||
:return: the episode id, if any.
|
||||
:rtype: int or None
|
||||
|
||||
"""
|
||||
# make the search
|
||||
query = '%s S%#02dE%#02d' % (series, season, episode)
|
||||
logger.info('Searching episode id for %r', query)
|
||||
r = self.session.get(self.server_url + 'search', params={'q': query}, timeout=10)
|
||||
r.raise_for_status()
|
||||
results = json.loads(r.text)
|
||||
if results['total'] == 1:
|
||||
return results['results'][0]['id']
|
||||
|
||||
logger.error('No episode id found for %r', series)
|
||||
|
||||
def query(self, series, season, episode):
|
||||
episode_id = self.search_episode_id(series, season, episode)
|
||||
if episode_id is None:
|
||||
return []
|
||||
|
||||
response = self.session.get(self.server_url + 'episode', params={'id': episode_id}, timeout=10)
|
||||
response.raise_for_status()
|
||||
content = json.loads(response.text)
|
||||
subtitles = []
|
||||
for r in content['releases']:
|
||||
for s in r['subtitles']:
|
||||
subtitle = self.subtitle_class(self.language, s['uri'], series, season, episode, r['team'], r['tags'])
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
return subtitles
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
titles = [video.series] + video.alternative_series
|
||||
for title in titles:
|
||||
subs = self.query(title, video.season, video.episode)
|
||||
if subs:
|
||||
return subs
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download as a zip
|
||||
logger.info('Downloading subtitle %r', subtitle)
|
||||
r = self.session.get(subtitle.download_link, timeout=10)
|
||||
r.raise_for_status()
|
||||
|
||||
# open the zip
|
||||
with ZipFile(io.BytesIO(r.content)) as zf:
|
||||
if len(zf.namelist()) > 1:
|
||||
raise ProviderError('More than one file to unzip')
|
||||
|
||||
subtitle.content = fix_line_ending(zf.read(zf.namelist()[0]))
|
@ -0,0 +1,514 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from babelfish import Language, language_converters
|
||||
from datetime import datetime, timedelta
|
||||
from dogpile.cache.api import NO_VALUE
|
||||
from guessit import guessit
|
||||
import pytz
|
||||
import rarfile
|
||||
from rarfile import RarFile, is_rarfile
|
||||
from rebulk.loose import ensure_list
|
||||
from requests import Session
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
|
||||
from . import ParserBeautifulSoup, Provider
|
||||
from ..cache import SHOW_EXPIRATION_TIME, region
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError, ServiceUnavailable
|
||||
from ..matches import guess_matches
|
||||
from ..subtitle import SUBTITLE_EXTENSIONS, Subtitle, fix_line_ending
|
||||
from ..utils import sanitize
|
||||
from ..video import Episode, Movie
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
language_converters.register('legendastv = subliminal.converters.legendastv:LegendasTVConverter')
|
||||
|
||||
# Configure :mod:`rarfile` to use the same path separator as :mod:`zipfile`
|
||||
rarfile.PATH_SEP = '/'
|
||||
|
||||
#: Conversion map for types
|
||||
type_map = {'M': 'movie', 'S': 'episode', 'C': 'episode'}
|
||||
|
||||
#: BR title season parsing regex
|
||||
season_re = re.compile(r' - (?P<season>\d+)(\xaa|a|st|nd|rd|th) (temporada|season)', re.IGNORECASE)
|
||||
|
||||
#: Downloads parsing regex
|
||||
downloads_re = re.compile(r'(?P<downloads>\d+) downloads')
|
||||
|
||||
#: Rating parsing regex
|
||||
rating_re = re.compile(r'nota (?P<rating>\d+)')
|
||||
|
||||
#: Timestamp parsing regex
|
||||
timestamp_re = re.compile(r'(?P<day>\d+)/(?P<month>\d+)/(?P<year>\d+) - (?P<hour>\d+):(?P<minute>\d+)')
|
||||
|
||||
#: Title with year/country regex
|
||||
title_re = re.compile(r'^(?P<series>.*?)(?: \((?:(?P<year>\d{4})|(?P<country>[A-Z]{2}))\))?$')
|
||||
|
||||
#: Cache key for releases
|
||||
releases_key = __name__ + ':releases|{archive_id}|{archive_name}'
|
||||
|
||||
|
||||
class LegendasTVArchive(object):
|
||||
"""LegendasTV Archive.
|
||||
|
||||
:param str id: identifier.
|
||||
:param str name: name.
|
||||
:param bool pack: contains subtitles for multiple episodes.
|
||||
:param bool pack: featured.
|
||||
:param str link: link.
|
||||
:param int downloads: download count.
|
||||
:param int rating: rating (0-10).
|
||||
:param timestamp: timestamp.
|
||||
:type timestamp: datetime.datetime
|
||||
"""
|
||||
|
||||
def __init__(self, id, name, pack, featured, link, downloads=0, rating=0, timestamp=None):
|
||||
#: Identifier
|
||||
self.id = id
|
||||
|
||||
#: Name
|
||||
self.name = name
|
||||
|
||||
#: Pack
|
||||
self.pack = pack
|
||||
|
||||
#: Featured
|
||||
self.featured = featured
|
||||
|
||||
#: Link
|
||||
self.link = link
|
||||
|
||||
#: Download count
|
||||
self.downloads = downloads
|
||||
|
||||
#: Rating (0-10)
|
||||
self.rating = rating
|
||||
|
||||
#: Timestamp
|
||||
self.timestamp = timestamp
|
||||
|
||||
#: Compressed content as :class:`rarfile.RarFile` or :class:`zipfile.ZipFile`
|
||||
self.content = None
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s [%s] %r>' % (self.__class__.__name__, self.id, self.name)
|
||||
|
||||
|
||||
class LegendasTVSubtitle(Subtitle):
|
||||
"""LegendasTV Subtitle."""
|
||||
|
||||
provider_name = 'legendastv'
|
||||
|
||||
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
|
||||
super(LegendasTVSubtitle, self).__init__(language, page_link=archive.link)
|
||||
self.type = type
|
||||
self.title = title
|
||||
self.year = year
|
||||
self.imdb_id = imdb_id
|
||||
self.season = season
|
||||
self.archive = archive
|
||||
self.name = name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return '%s-%s' % (self.archive.id, self.name.lower())
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
return self.name
|
||||
|
||||
def get_matches(self, video, hearing_impaired=False):
|
||||
matches = guess_matches(video, {
|
||||
'title': self.title,
|
||||
'year': self.year
|
||||
})
|
||||
|
||||
# episode
|
||||
if isinstance(video, Episode) and self.type == 'episode':
|
||||
# imdb_id
|
||||
if video.series_imdb_id and self.imdb_id == video.series_imdb_id:
|
||||
matches.add('series_imdb_id')
|
||||
|
||||
# movie
|
||||
elif isinstance(video, Movie) and self.type == 'movie':
|
||||
# imdb_id
|
||||
if video.imdb_id and self.imdb_id == video.imdb_id:
|
||||
matches.add('imdb_id')
|
||||
|
||||
# name
|
||||
matches |= guess_matches(video, guessit(self.name, {'type': self.type}))
|
||||
|
||||
return matches
|
||||
|
||||
|
||||
class LegendasTVProvider(Provider):
|
||||
"""LegendasTV Provider.
|
||||
|
||||
:param str username: username.
|
||||
:param str password: password.
|
||||
"""
|
||||
|
||||
languages = {Language.fromlegendastv(l) for l in language_converters['legendastv'].codes}
|
||||
server_url = 'http://legendas.tv/'
|
||||
subtitle_class = LegendasTVSubtitle
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
|
||||
# Provider needs UNRAR installed. If not available raise ConfigurationError
|
||||
try:
|
||||
rarfile.custom_check([rarfile.UNRAR_TOOL], True)
|
||||
except rarfile.RarExecError:
|
||||
raise ConfigurationError('UNRAR tool not available')
|
||||
|
||||
if any((username, password)) and not all((username, password)):
|
||||
raise ConfigurationError('Username and password must be specified')
|
||||
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.logged_in = False
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['User-Agent'] = self.user_agent
|
||||
|
||||
# login
|
||||
if self.username and self.password:
|
||||
logger.info('Logging in')
|
||||
data = {'_method': 'POST', 'data[User][username]': self.username, 'data[User][password]': self.password}
|
||||
r = self.session.post(self.server_url + 'login', data, allow_redirects=False, timeout=10)
|
||||
raise_for_status(r)
|
||||
|
||||
soup = ParserBeautifulSoup(r.content, ['html.parser'])
|
||||
if soup.find('div', {'class': 'alert-error'}, string=re.compile(u'Usuário ou senha inválidos')):
|
||||
raise AuthenticationError(self.username)
|
||||
|
||||
logger.debug('Logged in')
|
||||
self.logged_in = True
|
||||
|
||||
def terminate(self):
|
||||
# logout
|
||||
if self.logged_in:
|
||||
logger.info('Logging out')
|
||||
r = self.session.get(self.server_url + 'users/logout', allow_redirects=False, timeout=10)
|
||||
raise_for_status(r)
|
||||
logger.debug('Logged out')
|
||||
self.logged_in = False
|
||||
|
||||
self.session.close()
|
||||
|
||||
@staticmethod
|
||||
def is_valid_title(title, title_id, sanitized_title, season, year):
|
||||
"""Check if is a valid title."""
|
||||
sanitized_result = sanitize(title['title'])
|
||||
if sanitized_result != sanitized_title:
|
||||
logger.debug("Mismatched title, discarding title %d (%s)",
|
||||
title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# episode type
|
||||
if season:
|
||||
# discard mismatches on type
|
||||
if title['type'] != 'episode':
|
||||
logger.debug("Mismatched 'episode' type, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# discard mismatches on season
|
||||
if 'season' not in title or title['season'] != season:
|
||||
logger.debug('Mismatched season %s, discarding title %d (%s)',
|
||||
title.get('season'), title_id, sanitized_result)
|
||||
return
|
||||
# movie type
|
||||
else:
|
||||
# discard mismatches on type
|
||||
if title['type'] != 'movie':
|
||||
logger.debug("Mismatched 'movie' type, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# discard mismatches on year
|
||||
if year is not None and 'year' in title and title['year'] != year:
|
||||
logger.debug("Mismatched movie year, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
return True
|
||||
|
||||
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
|
||||
def search_titles(self, title, season, title_year):
|
||||
"""Search for titles matching the `title`.
|
||||
|
||||
For episodes, each season has it own title
|
||||
:param str title: the title to search for.
|
||||
:param int season: season of the title
|
||||
:param int title_year: year of the title
|
||||
:return: found titles.
|
||||
:rtype: dict
|
||||
"""
|
||||
titles = {}
|
||||
sanitized_titles = [sanitize(title)]
|
||||
ignore_characters = {'\'', '.'}
|
||||
if any(c in title for c in ignore_characters):
|
||||
sanitized_titles.append(sanitize(title, ignore_characters=ignore_characters))
|
||||
|
||||
for sanitized_title in sanitized_titles:
|
||||
# make the query
|
||||
if season:
|
||||
logger.info('Searching episode title %r for season %r', sanitized_title, season)
|
||||
else:
|
||||
logger.info('Searching movie title %r', sanitized_title)
|
||||
|
||||
r = self.session.get(self.server_url + 'legenda/sugestao/{}'.format(sanitized_title), timeout=10)
|
||||
raise_for_status(r)
|
||||
results = json.loads(r.text)
|
||||
|
||||
# loop over results
|
||||
for result in results:
|
||||
source = result['_source']
|
||||
|
||||
# extract id
|
||||
title_id = int(source['id_filme'])
|
||||
|
||||
# extract type
|
||||
title = {'type': type_map[source['tipo']]}
|
||||
|
||||
# extract title, year and country
|
||||
name, year, country = title_re.match(source['dsc_nome']).groups()
|
||||
title['title'] = name
|
||||
|
||||
# extract imdb_id
|
||||
if source['id_imdb'] != '0':
|
||||
if not source['id_imdb'].startswith('tt'):
|
||||
title['imdb_id'] = 'tt' + source['id_imdb'].zfill(7)
|
||||
else:
|
||||
title['imdb_id'] = source['id_imdb']
|
||||
|
||||
# extract season
|
||||
if title['type'] == 'episode':
|
||||
if source['temporada'] and source['temporada'].isdigit():
|
||||
title['season'] = int(source['temporada'])
|
||||
else:
|
||||
match = season_re.search(source['dsc_nome_br'])
|
||||
if match:
|
||||
title['season'] = int(match.group('season'))
|
||||
else:
|
||||
logger.debug('No season detected for title %d (%s)', title_id, name)
|
||||
|
||||
# extract year
|
||||
if year:
|
||||
title['year'] = int(year)
|
||||
elif source['dsc_data_lancamento'] and source['dsc_data_lancamento'].isdigit():
|
||||
# year is based on season air date hence the adjustment
|
||||
title['year'] = int(source['dsc_data_lancamento']) - title.get('season', 1) + 1
|
||||
|
||||
# add title only if is valid
|
||||
# Check against title without ignored chars
|
||||
if self.is_valid_title(title, title_id, sanitized_titles[0], season, title_year):
|
||||
titles[title_id] = title
|
||||
|
||||
logger.debug('Found %d titles', len(titles))
|
||||
|
||||
return titles
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(minutes=15).total_seconds())
|
||||
def get_archives(self, title_id, language_code, title_type, season, episodes):
|
||||
"""Get the archive list from a given `title_id`, `language_code`, `title_type`, `season` and `episode`.
|
||||
|
||||
:param int title_id: title id.
|
||||
:param int language_code: language code.
|
||||
:param str title_type: episode or movie
|
||||
:param int season: season
|
||||
:param list episodes: episodes
|
||||
:return: the archives.
|
||||
:rtype: list of :class:`LegendasTVArchive`
|
||||
|
||||
"""
|
||||
archives = []
|
||||
page = 0
|
||||
while True:
|
||||
# get the archive page
|
||||
url = self.server_url + 'legenda/busca/-/{language}/-/{page}/{title}'.format(
|
||||
language=language_code, page=page, title=title_id)
|
||||
r = self.session.get(url)
|
||||
raise_for_status(r)
|
||||
|
||||
# parse the results
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
for archive_soup in soup.select('div.list_element > article > div > div.f_left'):
|
||||
# create archive
|
||||
archive = LegendasTVArchive(archive_soup.a['href'].split('/')[2],
|
||||
archive_soup.a.text,
|
||||
'pack' in archive_soup.parent['class'],
|
||||
'destaque' in archive_soup.parent['class'],
|
||||
self.server_url + archive_soup.a['href'][1:])
|
||||
# clean name of path separators and pack flags
|
||||
clean_name = archive.name.replace('/', '-')
|
||||
if archive.pack and clean_name.startswith('(p)'):
|
||||
clean_name = clean_name[3:]
|
||||
|
||||
# guess from name
|
||||
guess = guessit(clean_name, {'type': title_type})
|
||||
|
||||
# episode
|
||||
if season and episodes:
|
||||
# discard mismatches on episode in non-pack archives
|
||||
|
||||
# Guessit may return int for single episode or list for multi-episode
|
||||
# Check if archive name has multiple episodes releases on it
|
||||
if not archive.pack and 'episode' in guess:
|
||||
wanted_episode = set(episodes)
|
||||
archive_episode = set(ensure_list(guess['episode']))
|
||||
|
||||
if not wanted_episode.intersection(archive_episode):
|
||||
logger.debug('Mismatched episode %s, discarding archive: %s', guess['episode'], clean_name)
|
||||
continue
|
||||
|
||||
# extract text containing downloads, rating and timestamp
|
||||
data_text = archive_soup.find('p', class_='data').text
|
||||
|
||||
# match downloads
|
||||
archive.downloads = int(downloads_re.search(data_text).group('downloads'))
|
||||
|
||||
# match rating
|
||||
match = rating_re.search(data_text)
|
||||
if match:
|
||||
archive.rating = int(match.group('rating'))
|
||||
|
||||
# match timestamp and validate it
|
||||
time_data = {k: int(v) for k, v in timestamp_re.search(data_text).groupdict().items()}
|
||||
archive.timestamp = pytz.timezone('America/Sao_Paulo').localize(datetime(**time_data))
|
||||
if archive.timestamp > datetime.utcnow().replace(tzinfo=pytz.utc):
|
||||
raise ProviderError('Archive timestamp is in the future')
|
||||
|
||||
# add archive
|
||||
logger.info('Found archive for title %d and language %d at page %s: %s',
|
||||
title_id, language_code, page, archive)
|
||||
archives.append(archive)
|
||||
|
||||
# stop on last page
|
||||
if soup.find('a', attrs={'class': 'load_more'}, string='carregar mais') is None:
|
||||
break
|
||||
|
||||
# increment page count
|
||||
page += 1
|
||||
|
||||
logger.debug('Found %d archives', len(archives))
|
||||
|
||||
return archives
|
||||
|
||||
def download_archive(self, archive):
|
||||
"""Download an archive's :attr:`~LegendasTVArchive.content`.
|
||||
|
||||
:param archive: the archive to download :attr:`~LegendasTVArchive.content` of.
|
||||
:type archive: :class:`LegendasTVArchive`
|
||||
|
||||
"""
|
||||
logger.info('Downloading archive %s', archive.id)
|
||||
r = self.session.get(self.server_url + 'downloadarquivo/{}'.format(archive.id))
|
||||
raise_for_status(r)
|
||||
|
||||
# open the archive
|
||||
archive_stream = io.BytesIO(r.content)
|
||||
if is_rarfile(archive_stream):
|
||||
logger.debug('Identified rar archive')
|
||||
archive.content = RarFile(archive_stream)
|
||||
elif is_zipfile(archive_stream):
|
||||
logger.debug('Identified zip archive')
|
||||
archive.content = ZipFile(archive_stream)
|
||||
else:
|
||||
raise ValueError('Not a valid archive')
|
||||
|
||||
def query(self, language, title, season=None, episodes=None, year=None):
|
||||
# search for titles
|
||||
titles = self.search_titles(title, season, year)
|
||||
|
||||
subtitles = []
|
||||
# iterate over titles
|
||||
for title_id, t in titles.items():
|
||||
|
||||
logger.info('Getting archives for title %d and language %d', title_id, language.legendastv)
|
||||
archives = self.get_archives(title_id, language.legendastv, t['type'], season, episodes or [])
|
||||
if not archives:
|
||||
logger.info('No archives found for title %d and language %d', title_id, language.legendastv)
|
||||
|
||||
# iterate over title's archives
|
||||
for a in archives:
|
||||
|
||||
# compute an expiration time based on the archive timestamp
|
||||
expiration_time = (datetime.utcnow().replace(tzinfo=pytz.utc) - a.timestamp).total_seconds()
|
||||
|
||||
# attempt to get the releases from the cache
|
||||
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
|
||||
releases = region.get(cache_key, expiration_time=expiration_time)
|
||||
|
||||
# the releases are not in cache or cache is expired
|
||||
if releases == NO_VALUE:
|
||||
logger.info('Releases not found in cache')
|
||||
|
||||
# download archive
|
||||
self.download_archive(a)
|
||||
|
||||
# extract the releases
|
||||
releases = []
|
||||
for name in a.content.namelist():
|
||||
# discard the legendastv file
|
||||
if name.startswith('Legendas.tv'):
|
||||
continue
|
||||
|
||||
# discard hidden files
|
||||
if os.path.split(name)[-1].startswith('.'):
|
||||
continue
|
||||
|
||||
# discard non-subtitle files
|
||||
if not name.lower().endswith(SUBTITLE_EXTENSIONS):
|
||||
continue
|
||||
|
||||
releases.append(name)
|
||||
|
||||
# cache the releases
|
||||
region.set(cache_key, releases)
|
||||
|
||||
# iterate over releases
|
||||
for r in releases:
|
||||
subtitle = self.subtitle_class(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
|
||||
t.get('season'), a, r)
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
return subtitles
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
season = None
|
||||
episodes = []
|
||||
if isinstance(video, Episode):
|
||||
titles = [video.series] + video.alternative_series
|
||||
season = video.season
|
||||
episodes = video.episodes
|
||||
else:
|
||||
titles = [video.title] + video.alternative_titles
|
||||
|
||||
for title in titles:
|
||||
subtitles = [s for l in languages for s in
|
||||
self.query(l, title, season=season, episodes=episodes, year=video.year)]
|
||||
if subtitles:
|
||||
return subtitles
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download archive in case we previously hit the releases cache and didn't download it
|
||||
if subtitle.archive.content is None:
|
||||
self.download_archive(subtitle.archive)
|
||||
|
||||
# extract subtitle's content
|
||||
subtitle.content = fix_line_ending(subtitle.archive.content.read(subtitle.name))
|
||||
|
||||
|
||||
def raise_for_status(r):
|
||||
# When site is under maintaince and http status code 200.
|
||||
if 'Em breve estaremos de volta' in r.text:
|
||||
raise ServiceUnavailable
|
||||
else:
|
||||
r.raise_for_status()
|
@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
|
||||
from ..extensions import provider_manager, default_providers
|
||||
from ..utils import hash_napiprojekt, hash_opensubtitles, hash_shooter, hash_thesubdb
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
hash_functions = {
|
||||
'napiprojekt': hash_napiprojekt,
|
||||
'opensubtitles': hash_opensubtitles,
|
||||
'opensubtitlesvip': hash_opensubtitles,
|
||||
'shooter': hash_shooter,
|
||||
'thesubdb': hash_thesubdb
|
||||
}
|
||||
|
||||
|
||||
def refine(video, providers=None, languages=None, **kwargs):
|
||||
"""Refine a video computing required hashes for the given providers.
|
||||
|
||||
The following :class:`~subliminal.video.Video` attribute can be found:
|
||||
|
||||
* :attr:`~subliminal.video.Video.hashes`
|
||||
|
||||
"""
|
||||
if video.size <= 10485760:
|
||||
logger.warning('Size is lower than 10MB: hashes not computed')
|
||||
return
|
||||
|
||||
logger.debug('Computing hashes for %r', video.name)
|
||||
for name in providers or default_providers:
|
||||
provider = provider_manager[name].plugin
|
||||
if name not in hash_functions:
|
||||
continue
|
||||
|
||||
if not provider.check_types(video):
|
||||
continue
|
||||
|
||||
if languages and not provider.check_languages(languages):
|
||||
continue
|
||||
|
||||
video.hashes[name] = hash_functions[name](video.name)
|
||||
|
||||
logger.debug('Computed hashes %r', video.hashes)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue