Merge remote-tracking branch 'pr/fix/timeout-requests'

pull/442/head^2
Christopher K. Hoadley 5 years ago
commit 09d3a1b042

3
.gitignore vendored

@ -22,5 +22,8 @@ src/
# Comma-Separated Values (CSV) Reports # Comma-Separated Values (CSV) Reports
*.csv *.csv
# Excluded sites list
tests/.excluded_sites
# MacOS Folder Metadata File # MacOS Folder Metadata File
.DS_Store .DS_Store

@ -197,6 +197,10 @@ Note that we do currently have 100% test coverage. Unfortunately, some of
the sites that Sherlock checks are not always reliable, so it is common the sites that Sherlock checks are not always reliable, so it is common
to get response errors. to get response errors.
If some sites are failing due to conection problems (site is down, in maintainence, etc)
you can exclude them from tests by creating a `tests/.excluded_sites` file with a
list of sites to ignore (one site name per line).
## Stargazers over time ## Stargazers over time
[![Stargazers over time](https://starcharts.herokuapp.com/TheYahya/sherlock.svg)](https://starcharts.herokuapp.com/TheYahya/sherlock) [![Stargazers over time](https://starcharts.herokuapp.com/TheYahya/sherlock.svg)](https://starcharts.herokuapp.com/TheYahya/sherlock)

@ -557,7 +557,7 @@
"url": "https://flipboard.com/@{}", "url": "https://flipboard.com/@{}",
"urlMain": "https://flipboard.com/", "urlMain": "https://flipboard.com/",
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "xgtrq" "username_unclaimed": "noonewould"
}, },
"FortniteTracker": { "FortniteTracker": {
"errorType": "status_code", "errorType": "status_code",

@ -30,9 +30,10 @@ __version__ = "0.9.12"
amount = 0 amount = 0
global proxy_list global proxy_list, timeout_limit
proxy_list = [] proxy_list = []
timeout_limit = 10
class ElapsedFuturesSession(FuturesSession): class ElapsedFuturesSession(FuturesSession):
""" """
@ -246,11 +247,13 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
proxies = {"http": proxy, "https": proxy} proxies = {"http": proxy, "https": proxy}
future = request_method(url=url_probe, headers=headers, future = request_method(url=url_probe, headers=headers,
proxies=proxies, proxies=proxies,
allow_redirects=allow_redirects allow_redirects=allow_redirects,
timeout=timeout_limit
) )
else: else:
future = request_method(url=url_probe, headers=headers, future = request_method(url=url_probe, headers=headers,
allow_redirects=allow_redirects allow_redirects=allow_redirects,
timeout=timeout_limit
) )
# Store future in data for access later # Store future in data for access later

@ -32,6 +32,14 @@ class SherlockBaseTest(unittest.TestCase):
with open(data_file_path, "r", encoding="utf-8") as raw: with open(data_file_path, "r", encoding="utf-8") as raw:
self.site_data_all = json.load(raw) self.site_data_all = json.load(raw)
# Load excluded sites list, if any
excluded_sites_path = os.path.join(os.path.dirname(os.path.realpath(sherlock.__file__)), "tests/.excluded_sites")
try:
with open(excluded_sites_path, "r", encoding="utf-8") as excluded_sites_file:
self.excluded_sites = excluded_sites_file.read().splitlines()
except FileNotFoundError:
self.excluded_sites = []
self.verbose=False self.verbose=False
self.tor=False self.tor=False
self.unique_tor=False self.unique_tor=False
@ -134,6 +142,7 @@ class SherlockBaseTest(unittest.TestCase):
for site, site_data in self.site_data_all.items(): for site, site_data in self.site_data_all.items():
if ( if (
(site in self.excluded_sites) or
(site_data["errorType"] != detect_type) or (site_data["errorType"] != detect_type) or
(site_data.get("username_claimed") is None) or (site_data.get("username_claimed") is None) or
(site_data.get("username_unclaimed") is None) (site_data.get("username_unclaimed") is None)

Loading…
Cancel
Save