|
|
|
@ -25,9 +25,10 @@ _CLEAN_TITLE_RES = [
|
|
|
|
|
(r"´|`", "'"),
|
|
|
|
|
(r" {2,}", " "),
|
|
|
|
|
]
|
|
|
|
|
_SPANISH_RE = re.compile(r"españa|ib[eé]rico|castellano|gallego|castilla")
|
|
|
|
|
|
|
|
|
|
_SPANISH_RE = re.compile(r"españa|ib[eé]rico|castellano|gallego|castilla")
|
|
|
|
|
_YEAR_RE = re.compile(r"(\(\d{4}\))")
|
|
|
|
|
_SERIES_RE = re.compile(r"\(?\d{4}\)?|[sS]\d{2}([eE]\d{2})?")
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
@ -47,11 +48,13 @@ class SubdivxSubtitle(Subtitle):
|
|
|
|
|
self.download_url = download_url
|
|
|
|
|
self.uploader = uploader
|
|
|
|
|
|
|
|
|
|
self.release_info = str(title)
|
|
|
|
|
self.description = str(description).strip()
|
|
|
|
|
self._title = str(title).strip()
|
|
|
|
|
self._description = str(description).strip()
|
|
|
|
|
|
|
|
|
|
self.release_info = self._title
|
|
|
|
|
|
|
|
|
|
if self.description:
|
|
|
|
|
self.release_info += " | " + self.description
|
|
|
|
|
if self._description:
|
|
|
|
|
self.release_info += " | " + self._description
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def id(self):
|
|
|
|
@ -62,20 +65,18 @@ class SubdivxSubtitle(Subtitle):
|
|
|
|
|
|
|
|
|
|
# episode
|
|
|
|
|
if isinstance(video, Episode):
|
|
|
|
|
# already matched in search query
|
|
|
|
|
|
|
|
|
|
# TODO: avoid false positive with some short/common titles
|
|
|
|
|
# already matched within provider
|
|
|
|
|
matches.update(["title", "series", "season", "episode", "year"])
|
|
|
|
|
|
|
|
|
|
# movie
|
|
|
|
|
elif isinstance(video, Movie):
|
|
|
|
|
# already matched in search query
|
|
|
|
|
# already matched within provider
|
|
|
|
|
matches.update(["title", "year"])
|
|
|
|
|
|
|
|
|
|
update_matches(matches, video, self.description)
|
|
|
|
|
update_matches(matches, video, self._description)
|
|
|
|
|
|
|
|
|
|
# Don't lowercase; otherwise it will match a lot of false positives
|
|
|
|
|
if video.release_group and video.release_group in self.description:
|
|
|
|
|
if video.release_group and video.release_group in self._description:
|
|
|
|
|
matches.add("release_group")
|
|
|
|
|
|
|
|
|
|
return matches
|
|
|
|
@ -108,7 +109,7 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
subtitles = []
|
|
|
|
|
|
|
|
|
|
if isinstance(video, Episode):
|
|
|
|
|
# TODO: cache pack queries (TV SHOW S01 / TV SHOW 2022 S01).
|
|
|
|
|
# TODO: cache pack queries (TV SHOW S01).
|
|
|
|
|
# Too many redundant server calls.
|
|
|
|
|
|
|
|
|
|
for query in (
|
|
|
|
@ -117,14 +118,10 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
):
|
|
|
|
|
subtitles += self._handle_multi_page_search(query, video)
|
|
|
|
|
|
|
|
|
|
# Try with year
|
|
|
|
|
if len(subtitles) <= 5 and video.year:
|
|
|
|
|
logger.debug("Few results. Trying with year")
|
|
|
|
|
for query in (
|
|
|
|
|
f"{video.series} {video.year} S{video.season:02}E{video.episode:02}",
|
|
|
|
|
f"{video.series} {video.year} S{video.season:02}",
|
|
|
|
|
):
|
|
|
|
|
subtitles += self._handle_multi_page_search(query, video)
|
|
|
|
|
# Try only with series title
|
|
|
|
|
if len(subtitles) <= 5:
|
|
|
|
|
subtitles += self._handle_multi_page_search(video.series, video, 1)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
for query in (video.title, f"{video.title} ({video.year})"):
|
|
|
|
|
subtitles += self._handle_multi_page_search(query, video)
|
|
|
|
@ -149,20 +146,25 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
max_loops_not_met = True
|
|
|
|
|
|
|
|
|
|
while max_loops_not_met:
|
|
|
|
|
loops += 1
|
|
|
|
|
max_loops_not_met = loops < max_loops
|
|
|
|
|
|
|
|
|
|
page_subtitles = self._get_page_subtitles(params, video)
|
|
|
|
|
page_subtitles, last_page = self._get_page_subtitles(params, video)
|
|
|
|
|
|
|
|
|
|
logger.debug("Yielding %d subtitles", len(page_subtitles))
|
|
|
|
|
logger.debug("Yielding %d subtitles [loop #%d]", len(page_subtitles), loops)
|
|
|
|
|
yield from page_subtitles
|
|
|
|
|
|
|
|
|
|
if len(page_subtitles) < 100:
|
|
|
|
|
break # this is the last page
|
|
|
|
|
if last_page:
|
|
|
|
|
logger.debug("Last page for '%s' query. Breaking loop", query)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
loops += 1
|
|
|
|
|
|
|
|
|
|
params["pg"] += 1 # search next page
|
|
|
|
|
time.sleep(self.multi_result_throttle)
|
|
|
|
|
|
|
|
|
|
if not max_loops_not_met:
|
|
|
|
|
logger.debug("Max loops limit exceeded")
|
|
|
|
|
|
|
|
|
|
def _get_page_subtitles(self, params, video):
|
|
|
|
|
search_link = f"{_SERVER_URL}/index.php"
|
|
|
|
|
response = self.session.get(
|
|
|
|
@ -170,19 +172,19 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
page_subtitles = self._parse_subtitles_page(video, response)
|
|
|
|
|
page_subtitles, last_page = self._parse_subtitles_page(video, response)
|
|
|
|
|
except Exception as error:
|
|
|
|
|
logger.error(f"Error parsing subtitles list: {error}")
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
return page_subtitles
|
|
|
|
|
return page_subtitles, last_page
|
|
|
|
|
|
|
|
|
|
def list_subtitles(self, video, languages):
|
|
|
|
|
return self.query(video, languages)
|
|
|
|
|
|
|
|
|
|
def download_subtitle(self, subtitle):
|
|
|
|
|
# download the subtitle
|
|
|
|
|
logger.info("Downloading subtitle %r", subtitle)
|
|
|
|
|
logger.debug("Downloading subtitle %r", subtitle)
|
|
|
|
|
|
|
|
|
|
# download zip / rar file with the subtitle
|
|
|
|
|
response = self.session.get(
|
|
|
|
@ -212,7 +214,8 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
)
|
|
|
|
|
title_soups = page_soup.find_all("div", {"id": "menu_detalle_buscador"})
|
|
|
|
|
body_soups = page_soup.find_all("div", {"id": "buscador_detalle"})
|
|
|
|
|
episode = isinstance(video, Episode)
|
|
|
|
|
|
|
|
|
|
title_checker = _check_episode if isinstance(video, Episode) else _check_movie
|
|
|
|
|
|
|
|
|
|
for subtitle in range(0, len(title_soups)):
|
|
|
|
|
title_soup, body_soup = title_soups[subtitle], body_soups[subtitle]
|
|
|
|
@ -224,8 +227,7 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
logger.debug("Skipping forced subtitles: %s", title)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Check movie title (if the video is a movie)
|
|
|
|
|
if not episode and not _check_movie(video, title):
|
|
|
|
|
if not title_checker(video, title):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Data
|
|
|
|
@ -257,7 +259,7 @@ class SubdivxSubtitlesProvider(Provider):
|
|
|
|
|
logger.debug("Found subtitle %r", subtitle)
|
|
|
|
|
subtitles.append(subtitle)
|
|
|
|
|
|
|
|
|
|
return subtitles
|
|
|
|
|
return subtitles, len(title_soups) < 100
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _clean_title(title):
|
|
|
|
@ -282,6 +284,24 @@ def _get_download_url(data):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _check_episode(video, title):
|
|
|
|
|
series_title = _SERIES_RE.sub("", title).strip()
|
|
|
|
|
|
|
|
|
|
distance = abs(len(series_title) - len(video.series))
|
|
|
|
|
|
|
|
|
|
series_matched = distance < 4
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"Series matched? %s [%s -> %s] [distance: %d]",
|
|
|
|
|
series_matched,
|
|
|
|
|
video.series,
|
|
|
|
|
series_title,
|
|
|
|
|
distance,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return series_matched
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _check_movie(video, title):
|
|
|
|
|
if str(video.year) not in title:
|
|
|
|
|
return False
|
|
|
|
|