Optimizes Greek providers (#489)

* Optimizes providers

* Adds episode id in subtitle page link

* Adds episode id in subtitle page link
pull/494/head
Panagiotis Koutsias 5 years ago committed by morpheus65535
parent 0f765db613
commit 0fcef4d674

@ -8,7 +8,6 @@ import rarfile
from subzero.language import Language from subzero.language import Language
from guessit import guessit from guessit import guessit
from requests import Session from requests import Session
from six import text_type
from subliminal import __short_version__ from subliminal import __short_version__
from subliminal.providers import ParserBeautifulSoup, Provider from subliminal.providers import ParserBeautifulSoup, Provider
@ -75,7 +74,7 @@ class GreekSubtitlesProvider(Provider):
logger.debug('Searching subtitles %r', params) logger.debug('Searching subtitles %r', params)
subtitles = [] subtitles = []
search_link = self.server_url + text_type(self.search_url).format(params) search_link = self.server_url + self.search_url.format(params)
while True: while True:
r = self.session.get(search_link, timeout=30) r = self.session.get(search_link, timeout=30)
r.raise_for_status() r.raise_for_status()

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# encoding=utf8
import io import io
import logging import logging
import os import os
@ -12,7 +11,6 @@ import zipfile
from subzero.language import Language from subzero.language import Language
from guessit import guessit from guessit import guessit
from requests import Session from requests import Session
from six import text_type
from subliminal.providers import ParserBeautifulSoup, Provider from subliminal.providers import ParserBeautifulSoup, Provider
from subliminal import __short_version__ from subliminal import __short_version__
@ -75,6 +73,9 @@ class Subs4FreeProvider(Provider):
server_url = 'https://www.sf4-industry.com' server_url = 'https://www.sf4-industry.com'
download_url = '/getSub.html' download_url = '/getSub.html'
search_url = '/search_report.php?search={}&searchType=1' search_url = '/search_report.php?search={}&searchType=1'
anti_block_1 = 'https://images.subs4free.info/favicon.ico'
anti_block_2 = 'https://www.subs4series.com/includes/anti-block-layover.php?launch=1'
anti_block_3 = 'https://www.subs4series.com/includes/anti-block.php'
subtitle_class = Subs4FreeSubtitle subtitle_class = Subs4FreeSubtitle
def __init__(self): def __init__(self):
@ -87,62 +88,51 @@ class Subs4FreeProvider(Provider):
def terminate(self): def terminate(self):
self.session.close() self.session.close()
def get_show_ids(self, title, year=None): def get_show_links(self, title, year=None):
"""Get the best matching show id for `series` and `year``. """Get the matching show links for `title` and `year`.
First search in the result of :meth:`_get_show_suggestions`. First search in the result of :meth:`_get_show_suggestions`.
:param title: show title. :param title: show title.
:param year: year of the show, if any. :param year: year of the show, if any.
:type year: int :type year: int
:return: the show id, if found. :return: the show links, if found.
:rtype: str :rtype: list of str
""" """
title_sanitized = sanitize(title).lower() title = sanitize(title)
show_ids = self._get_suggestions(title) suggestions = self._get_suggestions(title)
matched_show_ids = [] show_links = []
for show in show_ids: for suggestion in suggestions:
show_id = None show_title = sanitize(suggestion['title'])
show_title = sanitize(show['title'])
# attempt with year
if not show_id and year:
logger.debug('Getting show id with year')
show_id = show['link'].split('?p=')[-1] if show_title == '{title} {year:d}'.format(
title=title_sanitized, year=year) else None
# attempt clean
if not show_id:
logger.debug('Getting show id')
show_id = show['link'].split('?p=')[-1] if show_title == title_sanitized else None
if show_id: if show_title == title or (year and show_title == '{title} {year:d}'.format(title=title, year=year)):
matched_show_ids.append(show_id) logger.debug('Getting show id')
show_links.append(suggestion['link'].split('?p=')[-1])
return matched_show_ids return show_links
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type, @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
should_cache_fn=lambda value: value)
def _get_suggestions(self, title): def _get_suggestions(self, title):
"""Search the show or movie id from the `title` and `year`. """Search the show or movie id from the `title` and `year`.
:param str title: title of the show. :param str title: title of the show.
:return: the show suggestions found. :return: the show suggestions found.
:rtype: dict :rtype: list of dict
""" """
# make the search # make the search
logger.info('Searching show ids with %r', title) logger.info('Searching show ids with %r', title)
r = self.session.get(self.server_url + text_type(self.search_url).format(title), r = self.session.get(self.server_url + self.search_url.format(title),
headers={'Referer': self.server_url}, timeout=10) headers={'Referer': self.server_url}, timeout=10)
r.raise_for_status() r.raise_for_status()
if not r.content: if not r.content:
logger.debug('No data returned from provider') logger.debug('No data returned from provider')
return {} return []
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) soup = ParserBeautifulSoup(r.content, ['html.parser'])
suggestions = [{'link': l.attrs['value'], 'title': l.text} suggestions = [{'link': l.attrs['value'], 'title': l.text}
for l in soup.select('select[name="Mov_sel"] > option[value]')] for l in soup.select('select[name="Mov_sel"] > option[value]')]
logger.debug('Found suggestions: %r', suggestions) logger.debug('Found suggestions: %r', suggestions)
@ -155,7 +145,7 @@ class Subs4FreeProvider(Provider):
if movie_id: if movie_id:
page_link = self.server_url + '/' + movie_id page_link = self.server_url + '/' + movie_id
else: else:
page_link = self.server_url + text_type(self.search_url).format(' '.join([title, str(year)])) page_link = self.server_url + self.search_url.format(' '.join([title, str(year)]))
r = self.session.get(page_link, timeout=10) r = self.session.get(page_link, timeout=10)
r.raise_for_status() r.raise_for_status()
@ -166,26 +156,26 @@ class Subs4FreeProvider(Provider):
soup = ParserBeautifulSoup(r.content, ['html.parser']) soup = ParserBeautifulSoup(r.content, ['html.parser'])
year_num = None year = None
year_element = soup.select_one('td#dates_header > table div') year_element = soup.select_one('td#dates_header > table div')
matches = False matches = False
if year_element: if year_element:
matches = year_re.match(str(year_element.contents[2]).strip()) matches = year_re.match(str(year_element.contents[2]).strip())
if matches: if matches:
year_num = int(matches.group(1)) year = int(matches.group(1))
title_element = soup.select_one('td#dates_header > table u') title_tag = soup.select_one('td#dates_header > table u')
show_title = str(title_element.contents[0]).strip() if title_element else None show_title = str(title_tag.contents[0]).strip() if title_tag else None
subtitles = [] subtitles = []
# loop over episode rows # loop over episode rows
for subtitle in soup.select('table.table_border div[align="center"] > div'): for subs_tag in soup.select('table .seeDark,.seeMedium'):
# read common info # read common info
version = subtitle.find('b').text version = subs_tag.find('b').text
download_link = self.server_url + subtitle.find('a')['href'] download_link = self.server_url + subs_tag.find('a')['href']
language = Language.fromalpha2(subtitle.find('img')['src'].split('/')[-1].split('.')[0]) language = Language.fromalpha2(subs_tag.find('img')['src'].split('/')[-1].split('.')[0])
subtitle = self.subtitle_class(language, page_link, show_title, year_num, version, download_link) subtitle = self.subtitle_class(language, page_link, show_title, year, version, download_link)
logger.debug('Found subtitle {!r}'.format(subtitle)) logger.debug('Found subtitle {!r}'.format(subtitle))
subtitles.append(subtitle) subtitles.append(subtitle)
@ -196,19 +186,19 @@ class Subs4FreeProvider(Provider):
# lookup show_id # lookup show_id
titles = [video.title] + video.alternative_titles if isinstance(video, Movie) else [] titles = [video.title] + video.alternative_titles if isinstance(video, Movie) else []
show_ids = None show_links = None
for title in titles: for title in titles:
show_ids = self.get_show_ids(title, video.year) show_links = self.get_show_links(title, video.year)
if show_ids and len(show_ids) > 0: if show_links:
break break
subtitles = [] subtitles = []
# query for subtitles with the show_id # query for subtitles with the show_id
if show_ids and len(show_ids) > 0: if show_links:
for show_id in show_ids: for show_link in show_links:
subtitles += [s for s in self.query(show_id, video.title, video.year) if s.language in languages] subtitles += [s for s in self.query(show_link, video.title, video.year) if s.language in languages]
else: else:
subtitles += [s for s in self.query(None, video.title, video.year) if s.language in languages] subtitles += [s for s in self.query(None, sanitize(video.title), video.year) if s.language in languages]
return subtitles return subtitles
@ -234,8 +224,10 @@ class Subs4FreeProvider(Provider):
logger.debug('Unable to download subtitle. No download link found') logger.debug('Unable to download subtitle. No download link found')
return return
self.apply_anti_block(subtitle)
download_url = self.server_url + self.download_url download_url = self.server_url + self.download_url
r = self.session.post(download_url, data={'utf8': 1, 'id': subtitle_id, 'x': random.randint(0, width), r = self.session.post(download_url, data={'id': subtitle_id, 'x': random.randint(0, width),
'y': random.randint(0, height)}, 'y': random.randint(0, height)},
headers={'Referer': subtitle.download_link}, timeout=10) headers={'Referer': subtitle.download_link}, timeout=10)
r.raise_for_status() r.raise_for_status()
@ -253,6 +245,11 @@ class Subs4FreeProvider(Provider):
else: else:
logger.debug('Could not extract subtitle from %r', archive) logger.debug('Could not extract subtitle from %r', archive)
def apply_anti_block(self, subtitle):
self.session.get(self.anti_block_1, headers={'Referer': subtitle.download_link}, timeout=10)
self.session.get(self.anti_block_2, headers={'Referer': subtitle.download_link}, timeout=10)
self.session.get(self.anti_block_3, headers={'Referer': subtitle.download_link}, timeout=10)
def _get_archive(content): def _get_archive(content):
# open the archive # open the archive

@ -10,7 +10,6 @@ import zipfile
from subzero.language import Language from subzero.language import Language
from guessit import guessit from guessit import guessit
from requests import Session from requests import Session
from six import text_type
from subliminal.providers import ParserBeautifulSoup, Provider from subliminal.providers import ParserBeautifulSoup, Provider
from subliminal import __short_version__ from subliminal import __short_version__
@ -73,6 +72,8 @@ class Subs4SeriesProvider(Provider):
server_url = 'https://www.subs4series.com' server_url = 'https://www.subs4series.com'
search_url = '/search_report.php?search={}&searchType=1' search_url = '/search_report.php?search={}&searchType=1'
episode_link = '/tv-series/{show_id}/season-{season:d}/episode-{episode:d}' episode_link = '/tv-series/{show_id}/season-{season:d}/episode-{episode:d}'
anti_block_1 = '/includes/anti-block-layover.php?launch=1'
anti_block_2 = '/includes/anti-block.php'
subtitle_class = Subs4SeriesSubtitle subtitle_class = Subs4SeriesSubtitle
def __init__(self): def __init__(self):
@ -85,62 +86,51 @@ class Subs4SeriesProvider(Provider):
def terminate(self): def terminate(self):
self.session.close() self.session.close()
def get_show_ids(self, title, year=None): def get_show_links(self, title, year=None):
"""Get the best matching show id for `series` and `year`. """Get the matching show links for `title` and `year`.
First search in the result of :meth:`_get_show_suggestions`. First search in the result of :meth:`_get_show_suggestions`.
:param title: show title. :param title: show title.
:param year: year of the show, if any. :param year: year of the show, if any.
:type year: int :type year: int
:return: the show id, if found. :return: the show links, if found.
:rtype: str :rtype: list of str
""" """
title_sanitized = sanitize(title).lower() title = sanitize(title)
show_ids = self._get_suggestions(title) suggestions = self._get_suggestions(title)
matched_show_ids = [] show_links = []
for show in show_ids: for suggestion in suggestions:
show_id = None show_title = sanitize(suggestion['title'])
show_title = sanitize(show['title'])
# attempt with year if show_title == title or (year and show_title == '{title} {year:d}'.format(title=title, year=year)):
if not show_id and year: logger.debug('Getting show link')
logger.debug('Getting show id with year') show_links.append('/'.join(suggestion['link'].rsplit('/', 2)[1:]))
show_id = '/'.join(show['link'].rsplit('/', 2)[1:]) if show_title == '{title} {year:d}'.format(
title=title_sanitized, year=year) else None return show_links
# attempt clean @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
if not show_id:
logger.debug('Getting show id')
show_id = '/'.join(show['link'].rsplit('/', 2)[1:]) if show_title == title_sanitized else None
if show_id:
matched_show_ids.append(show_id)
return matched_show_ids
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type,
should_cache_fn=lambda value: value)
def _get_suggestions(self, title): def _get_suggestions(self, title):
"""Search the show or movie id from the `title` and `year`. """Search the show or movie id from the `title` and `year`.
:param str title: title of the show. :param str title: title of the show.
:return: the show suggestions found. :return: the show suggestions found.
:rtype: dict :rtype: list of dict
""" """
# make the search # make the search
logger.info('Searching show ids with %r', title) logger.info('Searching show ids with %r', title)
r = self.session.get(self.server_url + text_type(self.search_url).format(title), r = self.session.get(self.server_url + self.search_url.format(title),
headers={'Referer': self.server_url}, timeout=10) headers={'Referer': self.server_url}, timeout=10)
r.raise_for_status() r.raise_for_status()
if not r.content: if not r.content:
logger.debug('No data returned from provider') logger.debug('No data returned from provider')
return {} return []
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) soup = ParserBeautifulSoup(r.content, ['html.parser'])
series = [{'link': l.attrs['value'], 'title': l.text} series = [{'link': l.attrs['value'], 'title': l.text}
for l in soup.select('select[name="Mov_sel"] > option[value]')] for l in soup.select('select[name="Mov_sel"] > option[value]')]
logger.debug('Found suggestions: %r', series) logger.debug('Found suggestions: %r', series)
@ -164,21 +154,21 @@ class Subs4SeriesProvider(Provider):
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
year_num = None year = None
matches = year_re.match(str(soup.select_one('#dates_header_br > table div').contents[2]).strip()) matches = year_re.match(str(soup.select_one('#dates_header_br > table div').contents[2]).strip())
if matches: if matches:
year_num = int(matches.group(1)) year = int(matches.group(1))
show_title = str(soup.select_one('#dates_header_br > table u').contents[0]).strip() show_title = str(soup.select_one('#dates_header_br > table div u').string).strip()
subtitles = [] subtitles = []
# loop over episode rows # loop over episode rows
for subtitle in soup.select('table.table_border div[align="center"] > div'): for subs_tag in soup.select('table .seeDark,.seeMedium'):
# read common info # read common info
version = subtitle.find('b').text version = subs_tag.find('b').text
download_link = self.server_url + subtitle.find('a')['href'] download_link = self.server_url + subs_tag.find('a')['href']
language = Language.fromalpha2(subtitle.find('img')['src'].split('/')[-1].split('.')[0]) language = Language.fromalpha2(subs_tag.find('img')['src'].split('/')[-1].split('.')[0])
subtitle = self.subtitle_class(language, page_link, show_title, year_num, version, download_link) subtitle = self.subtitle_class(language, page_link, show_title, year, version, download_link)
logger.debug('Found subtitle %r', subtitle) logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle) subtitles.append(subtitle)
@ -189,16 +179,16 @@ class Subs4SeriesProvider(Provider):
# lookup show_id # lookup show_id
titles = [video.series] + video.alternative_series if isinstance(video, Episode) else [] titles = [video.series] + video.alternative_series if isinstance(video, Episode) else []
show_ids = None show_links = None
for title in titles: for title in titles:
show_ids = self.get_show_ids(title, video.year) show_links = self.get_show_links(title, video.year)
if show_ids and len(show_ids) > 0: if show_links:
break break
subtitles = [] subtitles = []
# query for subtitles with the show_id # query for subtitles with the show_id
for show_id in show_ids: for show_link in show_links:
subtitles += [s for s in self.query(show_id, video.series, video.season, video.episode, video.title) subtitles += [s for s in self.query(show_link, video.series, video.season, video.episode, video.title)
if s.language in languages] if s.language in languages]
return subtitles return subtitles
@ -226,6 +216,8 @@ class Subs4SeriesProvider(Provider):
logger.debug('Unable to download subtitle. No download link found') logger.debug('Unable to download subtitle. No download link found')
return return
self.apply_anti_block(subtitle)
download_url = self.server_url + target download_url = self.server_url + target
r = self.session.get(download_url, headers={'Referer': subtitle.download_link}, timeout=10) r = self.session.get(download_url, headers={'Referer': subtitle.download_link}, timeout=10)
r.raise_for_status() r.raise_for_status()
@ -242,6 +234,10 @@ class Subs4SeriesProvider(Provider):
else: else:
logger.debug('Could not extract subtitle from %r', archive) logger.debug('Could not extract subtitle from %r', archive)
def apply_anti_block(self, subtitle):
self.session.get(self.server_url + self.anti_block_1, headers={'Referer': subtitle.download_link}, timeout=10)
self.session.get(self.server_url + self.anti_block_2, headers={'Referer': subtitle.download_link}, timeout=10)
def _get_archive(content): def _get_archive(content):
# open the archive # open the archive

@ -11,7 +11,6 @@ import zipfile
from subzero.language import Language from subzero.language import Language
from guessit import guessit from guessit import guessit
from requests import Session from requests import Session
from six import text_type
from subliminal.providers import ParserBeautifulSoup, Provider from subliminal.providers import ParserBeautifulSoup, Provider
from subliminal import __short_version__ from subliminal import __short_version__
@ -113,8 +112,8 @@ class SubzProvider(Provider):
def terminate(self): def terminate(self):
self.session.close() self.session.close()
def get_show_ids(self, title, year=None, is_episode=True, country_code=None): def get_show_links(self, title, year=None, is_episode=True):
"""Get the best matching show id for `series`, `year` and `country_code`. """Get the matching show links for `title` and `year`.
First search in the result of :meth:`_get_show_suggestions`. First search in the result of :meth:`_get_show_suggestions`.
@ -123,61 +122,41 @@ class SubzProvider(Provider):
:type year: int :type year: int
:param is_episode: if the search is for episode. :param is_episode: if the search is for episode.
:type is_episode: bool :type is_episode: bool
:param country_code: country code of the show, if any. :return: the show links, if found.
:type country_code: str :rtype: list of str
:return: the show id, if found.
:rtype: str
""" """
title_sanitized = sanitize(title).lower() title = sanitize(title)
show_ids = self._get_suggestions(title, is_episode) suggestions = self._get_suggestions(title, is_episode)
matched_show_ids = []
for show in show_ids:
show_id = None
# attempt with country
if not show_id and country_code:
logger.debug('Getting show id with country')
if sanitize(show['title']) == text_type('{title} {country}').format(title=title_sanitized,
country=country_code.lower()):
show_id = show['link'].split('/')[-1]
# attempt with year
if not show_id and year:
logger.debug('Getting show id with year')
if sanitize(show['title']) == text_type('{title} {year}').format(title=title_sanitized, year=year):
show_id = show['link'].split('/')[-1]
# attempt clean
if not show_id:
logger.debug('Getting show id')
show_id = show['link'].split('/')[-1] if sanitize(show['title']) == title_sanitized else None
if show_id: show_links = []
matched_show_ids.append(show_id) for suggestion in suggestions:
if sanitize(suggestion['title']) == title or \
(year and sanitize(suggestion['title']) == '{title} {year}'.format(title=title, year=year)):
logger.debug('Getting show id')
show_links.append(suggestion['link'].split('/')[-1])
return matched_show_ids return show_links
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type, @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
should_cache_fn=lambda value: value)
def _get_suggestions(self, title, is_episode=True): def _get_suggestions(self, title, is_episode=True):
"""Search the show or movie id from the `title` and `year`. """Search the show or movie id from the `title`.
:param str title: title of the show. :param str title: title of the show.
:param is_episode: if the search is for episode. :param is_episode: if the search is for episode.
:type is_episode: bool :type is_episode: bool
:return: the show suggestions found. :return: the show suggestions found.
:rtype: dict :rtype: list of dict
""" """
# make the search # make the search
logger.info('Searching show ids with %r', title) logger.info('Searching show ids with %r', title)
r = self.session.get(self.server_url + text_type(self.search_url).format(title), timeout=10) r = self.session.get(self.server_url + self.search_url.format(title), timeout=10)
r.raise_for_status() r.raise_for_status()
if not r.content: if not r.content:
logger.debug('No data returned from provider') logger.debug('No data returned from provider')
return {} return []
show_type = 'series' if is_episode else 'movie' show_type = 'series' if is_episode else 'movie'
parsed_suggestions = [s for s in json.loads(r.text) if 'type' in s and s['type'] == show_type] parsed_suggestions = [s for s in json.loads(r.text) if 'type' in s and s['type'] == show_type]
@ -198,6 +177,9 @@ class SubzProvider(Provider):
return [] return []
r = self.session.get(page_link, timeout=10) r = self.session.get(page_link, timeout=10)
if r.status_code == 404:
return []
r.raise_for_status() r.raise_for_status()
if not r.content: if not r.content:
@ -206,36 +188,37 @@ class SubzProvider(Provider):
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
year_num = None year = None
if not is_episode: if not is_episode:
year_num = int(soup.select_one('span.year').text) year = int(soup.select_one('span.year').text)
show_title = str(soup.select_one('#summary-wrapper > div.summary h1').contents[0]).strip()
subtitles = [] subtitles = []
# loop over episode rows # loop over episode rows
for subtitle in soup.select('div[id="subtitles"] tr[data-id]'): for subs_tag in soup.select('div[id="subtitles"] tr[data-id]'):
# read common info # read common info
version = subtitle.find('td', {'class': 'name'}).text version = subs_tag.find('td', {'class': 'name'}).text
download_link = subtitle.find('a', {'class': 'btn-success'})['href'].strip('\'') download_link = subs_tag.find('a', {'class': 'btn-success'})['href'].strip('\'')
# read the episode info # read the episode info
if is_episode: if is_episode:
episode_numbers = soup.select_one('#summary-wrapper > div.container.summary span.main-title-sxe').text episode_numbers = soup.select_one('#summary-wrapper > div.container.summary span.main-title-sxe').text
season_num = None season = None
episode_num = None episode = None
matches = episode_re.match(episode_numbers.strip()) matches = episode_re.match(episode_numbers.strip())
if matches: if matches:
season_num = int(matches.group(1)) season = int(matches.group(1))
episode_num = int(matches.group(2)) episode = int(matches.group(2))
episode_title = soup.select_one('#summary-wrapper > div.container.summary span.main-title').text series = soup.select_one('#summary-wrapper > div.summary h2 > a').string.strip()
title = soup.select_one('#summary-wrapper > div.container.summary span.main-title').text
subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, show_title, season_num, subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series, season, episode, title,
episode_num, episode_title, year_num, version, download_link) year, version, download_link)
# read the movie info # read the movie info
else: else:
subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, None, None, None, show_title, title = str(soup.select_one('#summary-wrapper > div.summary h1').contents[0]).strip()
year_num, version, download_link) subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, None, None, None, title, year,
version, download_link)
logger.debug('Found subtitle %r', subtitle) logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle) subtitles.append(subtitle)
@ -251,20 +234,20 @@ class SubzProvider(Provider):
else: else:
titles = [] titles = []
show_ids = None show_links = None
for title in titles: for title in titles:
show_ids = self.get_show_ids(title, video.year, isinstance(video, Episode)) show_links = self.get_show_links(title, video.year, isinstance(video, Episode))
if show_ids is not None and len(show_ids) > 0: if show_links is not None and len(show_links) > 0:
break break
subtitles = [] subtitles = []
# query for subtitles with the show_id # query for subtitles with the show_id
for show_id in show_ids: for show_links in show_links:
if isinstance(video, Episode): if isinstance(video, Episode):
subtitles += [s for s in self.query(show_id, video.series, video.season, video.episode, video.title) subtitles += [s for s in self.query(show_links, video.series, video.season, video.episode, video.title)
if s.language in languages and s.season == video.season and s.episode == video.episode] if s.language in languages and s.season == video.season and s.episode == video.episode]
elif isinstance(video, Movie): elif isinstance(video, Movie):
subtitles += [s for s in self.query(show_id, None, None, None, video.title) subtitles += [s for s in self.query(show_links, None, None, None, video.title)
if s.language in languages and s.year == video.year] if s.language in languages and s.year == video.year]
return subtitles return subtitles

@ -17,6 +17,7 @@ from subliminal.video import Episode
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
article_re = re.compile(r'^([A-Za-z]{1,3}) (.*)$') article_re = re.compile(r'^([A-Za-z]{1,3}) (.*)$')
episode_re = re.compile(r'^(\d+)(-(\d+))*$')
class XSubsSubtitle(Subtitle): class XSubsSubtitle(Subtitle):
@ -80,7 +81,7 @@ class XSubsProvider(Provider):
all_series_url = '/series/all.xml' all_series_url = '/series/all.xml'
series_url = '/series/{:d}/main.xml' series_url = '/series/{:d}/main.xml'
season_url = '/series/{show_id:d}/{season:d}.xml' season_url = '/series/{show_id:d}/{season:d}.xml'
page_link = '/ice/xsw.xml?srsid={show_id:d}#{season_id:d};{season:d}' page_link = '/ice/xsw.xml?srsid={show_id:d}#{season_id:d};{season:d};{episode:d}'
download_link = '/xthru/getsub/{:d}' download_link = '/xthru/getsub/{:d}'
subtitle_class = XSubsSubtitle subtitle_class = XSubsSubtitle
@ -147,7 +148,7 @@ class XSubsProvider(Provider):
return show_ids return show_ids
def get_show_id(self, series_names, year=None, country_code=None): def get_show_id(self, series_names, year=None):
series_sanitized_names = [] series_sanitized_names = []
for name in series_names: for name in series_names:
sanitized_name = sanitize(name) sanitized_name = sanitize(name)
@ -160,14 +161,8 @@ class XSubsProvider(Provider):
show_id = None show_id = None
for series_sanitized in series_sanitized_names: for series_sanitized in series_sanitized_names:
# attempt with country
if not show_id and country_code:
logger.debug('Getting show id with country')
show_id = show_ids.get('{series} {country}'.format(series=series_sanitized,
country=country_code.lower()))
# attempt with year # attempt with year
if not show_id and year: if year:
logger.debug('Getting show id with year') logger.debug('Getting show id with year')
show_id = show_ids.get('{series} {year:d}'.format(series=series_sanitized, year=year)) show_id = show_ids.get('{series} {year:d}'.format(series=series_sanitized, year=year))
@ -198,7 +193,7 @@ class XSubsProvider(Provider):
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
series_title = soup.find('name').text series = soup.find('name').text
# loop over season rows # loop over season rows
seasons = soup.findAll('series_group') seasons = soup.findAll('series_group')
@ -230,35 +225,40 @@ class XSubsProvider(Provider):
subtitles = [] subtitles = []
# loop over episode rows # loop over episode rows
for episode in soup.findAll('subg'): for subtitle_group in soup.findAll('subg'):
# read the episode info # read the episode info
etitle = episode.find('etitle') episode_info = subtitle_group.find('etitle')
if etitle is None: if episode_info is None:
continue continue
episode_num = int(etitle['number'].split('-')[0]) episodes = []
episode_match = episode_re.match(episode_info['number'])
if episode_match:
episodes = [int(e) for e in [episode_match.group(1), episode_match.group(3)] if e]
sgt = episode.find('sgt') subtitle_info = subtitle_group.find('sgt')
if sgt is None: if subtitle_info is None:
continue continue
season_num = int(sgt['ssnnum']) season = int(subtitle_info['ssnnum'])
episode_id = int(subtitle_info['epsid'])
# filter out unreleased subtitles # filter out unreleased subtitles
for subtitle in episode.findAll('sr'): for subs_tag in subtitle_group.findAll('sr'):
if subtitle['published_on'] == '': if subs_tag['published_on'] == '':
continue continue
page_link = self.server_url + self.page_link.format(show_id=show_id, season_id=season_id, page_link = self.server_url + self.page_link.format(show_id=show_id, season_id=season_id,
season=season_num) season=season, episode=episode_id)
episode_title = etitle['title'] title = episode_info['title']
version = subtitle.fmt.text + ' ' + subtitle.team.text version = subs_tag.fmt.text + ' ' + subs_tag.team.text
download_link = self.server_url + self.download_link.format(int(subtitle['rlsid'])) download_link = self.server_url + self.download_link.format(int(subs_tag['rlsid']))
subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series_title, season_num, for episode in episodes:
episode_num, year, episode_title, version, download_link) subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series, season, episode, year,
logger.debug('Found subtitle %r', subtitle) title, version, download_link)
subtitles.append(subtitle) logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles return subtitles

Loading…
Cancel
Save