diff --git a/sherlock/sherlock.py b/sherlock/sherlock.py index fa9f4d9..e29a56a 100644 --- a/sherlock/sherlock.py +++ b/sherlock/sherlock.py @@ -74,19 +74,19 @@ class SherlockFuturesSession(FuturesSession): # Make sure that the time measurement hook is first, so we will not # track any later hook's execution time. try: - if isinstance(hooks['response'], list): - hooks['response'].insert(0, response_time) - elif isinstance(hooks['response'], tuple): + if isinstance(hooks["response"], list): + hooks["response"].insert(0, response_time) + elif isinstance(hooks["response"], tuple): # Convert tuple to list and insert time measurement hook first. - hooks['response'] = list(hooks['response']) - hooks['response'].insert(0, response_time) + hooks["response"] = list(hooks["response"]) + hooks["response"].insert(0, response_time) else: # Must have previously contained a single hook function, # so convert to list. - hooks['response'] = [response_time, hooks['response']] + hooks["response"] = [response_time, hooks["response"]] except KeyError: # No response hook was already defined, so install it ourselves. - hooks['response'] = [response_time] + hooks["response"] = [response_time] return super(SherlockFuturesSession, self).request(method, url, @@ -209,12 +209,12 @@ def sherlock(username, site_data, query_notify, results_site = {} # Record URL of main site - results_site['url_main'] = net_info.get("urlMain") + results_site["url_main"] = net_info.get("urlMain") # A user agent is needed because some sites don't return the correct # information since they think that we are bots (Which we actually are...) headers = { - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0', + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0", } if "headers" in net_info: @@ -228,14 +228,14 @@ def sherlock(username, site_data, query_notify, regex_check = net_info.get("regexCheck") if regex_check and re.search(regex_check, username) is None: # No need to do the check at the site: this user name is not allowed. - results_site['status'] = QueryResult(username, + results_site["status"] = QueryResult(username, social_network, url, QueryStatus.ILLEGAL) results_site["url_user"] = "" - results_site['http_status'] = "" - results_site['response_text'] = "" - query_notify.update(results_site['status']) + results_site["http_status"] = "" + results_site["response_text"] = "" + query_notify.update(results_site["status"]) else: # URL of user on site (if it exists) results_site["url_user"] = url @@ -268,7 +268,7 @@ def sherlock(username, site_data, query_notify, url_probe = interpolate_string(url_probe, username) if request is None: - if net_info["errorType"] == 'status_code': + if net_info["errorType"] == "status_code": # In most cases when we are detecting by status code, # it is not necessary to get the entire body: we can # detect fine with just the HEAD response. @@ -436,11 +436,11 @@ def sherlock(username, site_data, query_notify, query_notify.update(result) # Save status of request - results_site['status'] = result + results_site["status"] = result # Save results from request - results_site['http_status'] = http_status - results_site['response_text'] = response_text + results_site["http_status"] = http_status + results_site["response_text"] = response_text # Add this site's results into final dictionary with all of the other results. results_total[social_network] = results_site @@ -510,11 +510,11 @@ def main(): help="Create Comma-Separated Values (CSV) File." ) parser.add_argument("--site", - action="append", metavar='SITE_NAME', + action="append", metavar="SITE_NAME", dest="site_list", default=None, help="Limit analysis to just the listed sites. Add multiple options to specify more than one site." ) - parser.add_argument("--proxy", "-p", metavar='PROXY_URL', + parser.add_argument("--proxy", "-p", metavar="PROXY_URL", action="store", dest="proxy", default=None, help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080" ) @@ -522,7 +522,7 @@ def main(): dest="json_file", default=None, help="Load data from a JSON file or an online, valid, JSON file.") parser.add_argument("--timeout", - action="store", metavar='TIMEOUT', + action="store", metavar="TIMEOUT", dest="timeout", type=timeout_check, default=None, help="Time (in seconds) to wait for response to requests. " "Default timeout is infinity. " @@ -542,7 +542,7 @@ def main(): help="Don't color terminal output" ) parser.add_argument("username", - nargs='+', metavar='USERNAMES', + nargs="+", metavar="USERNAMES", action="store", help="One or more usernames to check with social networks." ) @@ -598,7 +598,7 @@ def main(): # Create object with all information about sites we are aware of. try: if args.local: - sites = SitesInformation(os.path.join(os.path.dirname(__file__), 'resources/data.json')) + sites = SitesInformation(os.path.join(os.path.dirname(__file__), "resources/data.json")) else: sites = SitesInformation(args.json_file) except Exception as error: @@ -682,25 +682,25 @@ def main(): with open(result_file, "w", newline='', encoding="utf-8") as csv_report: writer = csv.writer(csv_report) - writer.writerow(['username', - 'name', - 'url_main', - 'url_user', - 'exists', - 'http_status', - 'response_time_s' + writer.writerow(["username", + "name", + "url_main", + "url_user", + "exists", + "http_status", + "response_time_s" ] ) for site in results: - response_time_s = results[site]['status'].query_time + response_time_s = results[site]["status"].query_time if response_time_s is None: response_time_s = "" writer.writerow([username, site, - results[site]['url_main'], - results[site]['url_user'], - str(results[site]['status'].status), - results[site]['http_status'], + results[site]["url_main"], + results[site]["url_user"], + str(results[site]["status"].status), + results[site]["http_status"], response_time_s ] ) diff --git a/sherlock/tests/all.py b/sherlock/tests/all.py index 3299c49..dc4e9d5 100644 --- a/sherlock/tests/all.py +++ b/sherlock/tests/all.py @@ -21,7 +21,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'BinarySearch' + site = "BinarySearch" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. @@ -48,7 +48,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'BinarySearch' + site = "BinarySearch" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. @@ -75,7 +75,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'Pinterest' + site = "Pinterest" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. @@ -102,7 +102,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'Pinterest' + site = "Pinterest" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. @@ -129,7 +129,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'VK' + site = "VK" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. @@ -156,7 +156,7 @@ class SherlockDetectTests(SherlockBaseTest): Will trigger an assert if detection mechanism did not work as expected. """ - site = 'VK' + site = "VK" site_data = self.site_data_all[site] #Ensure that the site's detection method has not changed. diff --git a/sherlock/tests/base.py b/sherlock/tests/base.py index 20605d2..6637249 100644 --- a/sherlock/tests/base.py +++ b/sherlock/tests/base.py @@ -68,7 +68,7 @@ class SherlockBaseTest(unittest.TestCase): should be filtered. Return Value: - Dictionary containing sub-set of site data specified by 'site_list'. + Dictionary containing sub-set of site data specified by "site_list". """ # Create new dictionary that has filtered site data based on input. @@ -126,7 +126,7 @@ class SherlockBaseTest(unittest.TestCase): ): if ( (self.skip_error_sites == True) and - (result['status'].status == QueryStatus.UNKNOWN) + (result["status"].status == QueryStatus.UNKNOWN) ): #Some error connecting to site. self.skipTest(f"Skipping Username '{username}' " @@ -135,7 +135,7 @@ class SherlockBaseTest(unittest.TestCase): ) self.assertEqual(exist_result_desired, - result['status'].status) + result["status"].status) return diff --git a/site_list.py b/site_list.py index a23824a..339f729 100644 --- a/site_list.py +++ b/site_list.py @@ -12,19 +12,19 @@ with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file: with open("sites.md", "w") as site_file: data_length = len(data) - site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n') + site_file.write(f"## List Of Supported Sites ({data_length} Sites In Total!)\n") for social_network in data: url_main = data.get(social_network).get("urlMain") pool.append((social_network, url_main)) for social_network, url_main in pool: - site_file.write(f'1. [{social_network}]({url_main})\n') + site_file.write(f"1. [{social_network}]({url_main})\n") sorted_json_data = json.dumps(data, indent=2, sort_keys=True) with open("sherlock/resources/data.json", "w") as data_file: data_file.write(sorted_json_data) - data_file.write('\n') + data_file.write("\n") print("Finished updating supported site listing!")