Fixed Zimuku provider to bypass yunsuo protection

pull/1936/head v1.1.0
VisoTC 2 years ago committed by GitHub
parent ced1736ed5
commit b9ba99e189
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -88,7 +88,7 @@ class ZimukuProvider(Provider):
logger.info(str(supported_languages))
server_url = "http://zimuku.org"
search_url = "/search?q={}"
search_url = "/search?q={}&vertoken={}"
download_url = "http://zimuku.org/"
subtitle_class = ZimukuSubtitle
@ -96,6 +96,39 @@ class ZimukuProvider(Provider):
def __init__(self):
self.session = None
def stringToHex(self, s):
val = ""
for i in s:
val += hex(ord(i))[2:]
return val
vertoken = ""
location_re = re.compile(
r'self\.location = "(.*)" \+ stringToHex\(screendate\)')
def yunsuo_bypass(self, url, *args, **kwargs):
i = -1
while True:
i += 1
r = self.session.get(url, *args, **kwargs)
if(r.status_code == 404):
tr = self.location_re.findall(r.text)
self.session.cookies.set("srcurl", self.stringToHex(r.url))
if(tr):
verify_resp = self.session.get(
self.server_url+tr[0]+self.stringToHex("1080,1920"), allow_redirects=False)
if(verify_resp.status_code == 302 and self.session.cookies.get("security_session_verify") != None):
pass
continue
if len(self.location_re.findall(r.text)) == 0:
if(r.headers.get("Content-Type") == "text/html; charset=utf-8"):
v = ParserBeautifulSoup(
r.content.decode("utf-8", "ignore"), ["html.parser"]
).find(
"input", attrs={'name': 'vertoken'})
if(v):
self.vertoken = v.get("value")
return r
def initialize(self):
self.session = Session()
self.session.headers["User-Agent"] = AGENT_LIST[randint(0, len(AGENT_LIST) - 1)]
@ -104,7 +137,7 @@ class ZimukuProvider(Provider):
self.session.close()
def _parse_episode_page(self, link, year):
r = self.session.get(link)
r = self.yunsuo_bypass(link)
bs_obj = ParserBeautifulSoup(
r.content.decode("utf-8", "ignore"), ["html.parser"]
)
@ -144,6 +177,8 @@ class ZimukuProvider(Provider):
return subs
def query(self, keyword, season=None, episode=None, year=None):
if self.vertoken == "":
self.yunsuo_bypass(self.server_url + '/')
params = keyword
if season:
params += ".S{season:02d}".format(season=season)
@ -152,9 +187,9 @@ class ZimukuProvider(Provider):
logger.debug("Searching subtitles %r", params)
subtitles = []
search_link = self.server_url + text_type(self.search_url).format(params)
search_link = self.server_url + text_type(self.search_url).format(params, self.vertoken)
r = self.session.get(search_link, timeout=30)
r = self.yunsuo_bypass(search_link, timeout=30)
r.raise_for_status()
if not r.content:
@ -169,7 +204,7 @@ class ZimukuProvider(Provider):
while parts:
parts.reverse()
redirect_url = urljoin(self.server_url, "".join(parts))
r = self.session.get(redirect_url, timeout=30)
r = self.query_resp(redirect_url, timeout=30)
html = r.content.decode("utf-8", "ignore")
parts = re.findall(pattern, html)
logger.debug("search url located: " + redirect_url)
@ -238,14 +273,14 @@ class ZimukuProvider(Provider):
return subtitles
def download_subtitle(self, subtitle):
def _get_archive_dowload_link(session, sub_page_link):
r = session.get(sub_page_link)
def _get_archive_dowload_link(yunsuopass, sub_page_link):
r = yunsuopass(sub_page_link)
bs_obj = ParserBeautifulSoup(
r.content.decode("utf-8", "ignore"), ["html.parser"]
)
down_page_link = bs_obj.find("a", {"id": "down1"}).attrs["href"]
down_page_link = urljoin(sub_page_link, down_page_link)
r = session.get(down_page_link)
r = yunsuopass(down_page_link)
bs_obj = ParserBeautifulSoup(
r.content.decode("utf-8", "ignore"), ["html.parser"]
)
@ -257,8 +292,8 @@ class ZimukuProvider(Provider):
# download the subtitle
logger.info("Downloading subtitle %r", subtitle)
self.session = subtitle.session
download_link = _get_archive_dowload_link(self.session, subtitle.page_link)
r = self.session.get(download_link, headers={'Referer': subtitle.page_link}, timeout=30)
download_link = _get_archive_dowload_link(self.yunsuo_bypass, subtitle.page_link)
r = self.yunsuo_bypass(download_link, headers={'Referer': subtitle.page_link}, timeout=30)
r.raise_for_status()
try:
filename = r.headers["Content-Disposition"]

Loading…
Cancel
Save