Merge pull request #1212 from SethFalco/consistent-quotes

chore: make quotes in repo consistent
pull/1217/head
Siddharth Dushantha 3 years ago committed by GitHub
commit b5d0541c7e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -74,19 +74,19 @@ class SherlockFuturesSession(FuturesSession):
# Make sure that the time measurement hook is first, so we will not # Make sure that the time measurement hook is first, so we will not
# track any later hook's execution time. # track any later hook's execution time.
try: try:
if isinstance(hooks['response'], list): if isinstance(hooks["response"], list):
hooks['response'].insert(0, response_time) hooks["response"].insert(0, response_time)
elif isinstance(hooks['response'], tuple): elif isinstance(hooks["response"], tuple):
# Convert tuple to list and insert time measurement hook first. # Convert tuple to list and insert time measurement hook first.
hooks['response'] = list(hooks['response']) hooks["response"] = list(hooks["response"])
hooks['response'].insert(0, response_time) hooks["response"].insert(0, response_time)
else: else:
# Must have previously contained a single hook function, # Must have previously contained a single hook function,
# so convert to list. # so convert to list.
hooks['response'] = [response_time, hooks['response']] hooks["response"] = [response_time, hooks["response"]]
except KeyError: except KeyError:
# No response hook was already defined, so install it ourselves. # No response hook was already defined, so install it ourselves.
hooks['response'] = [response_time] hooks["response"] = [response_time]
return super(SherlockFuturesSession, self).request(method, return super(SherlockFuturesSession, self).request(method,
url, url,
@ -209,12 +209,12 @@ def sherlock(username, site_data, query_notify,
results_site = {} results_site = {}
# Record URL of main site # Record URL of main site
results_site['url_main'] = net_info.get("urlMain") results_site["url_main"] = net_info.get("urlMain")
# A user agent is needed because some sites don't return the correct # A user agent is needed because some sites don't return the correct
# information since they think that we are bots (Which we actually are...) # information since they think that we are bots (Which we actually are...)
headers = { headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0', "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0",
} }
if "headers" in net_info: if "headers" in net_info:
@ -228,14 +228,14 @@ def sherlock(username, site_data, query_notify,
regex_check = net_info.get("regexCheck") regex_check = net_info.get("regexCheck")
if regex_check and re.search(regex_check, username) is None: if regex_check and re.search(regex_check, username) is None:
# No need to do the check at the site: this user name is not allowed. # No need to do the check at the site: this user name is not allowed.
results_site['status'] = QueryResult(username, results_site["status"] = QueryResult(username,
social_network, social_network,
url, url,
QueryStatus.ILLEGAL) QueryStatus.ILLEGAL)
results_site["url_user"] = "" results_site["url_user"] = ""
results_site['http_status'] = "" results_site["http_status"] = ""
results_site['response_text'] = "" results_site["response_text"] = ""
query_notify.update(results_site['status']) query_notify.update(results_site["status"])
else: else:
# URL of user on site (if it exists) # URL of user on site (if it exists)
results_site["url_user"] = url results_site["url_user"] = url
@ -268,7 +268,7 @@ def sherlock(username, site_data, query_notify,
url_probe = interpolate_string(url_probe, username) url_probe = interpolate_string(url_probe, username)
if request is None: if request is None:
if net_info["errorType"] == 'status_code': if net_info["errorType"] == "status_code":
# In most cases when we are detecting by status code, # In most cases when we are detecting by status code,
# it is not necessary to get the entire body: we can # it is not necessary to get the entire body: we can
# detect fine with just the HEAD response. # detect fine with just the HEAD response.
@ -436,11 +436,11 @@ def sherlock(username, site_data, query_notify,
query_notify.update(result) query_notify.update(result)
# Save status of request # Save status of request
results_site['status'] = result results_site["status"] = result
# Save results from request # Save results from request
results_site['http_status'] = http_status results_site["http_status"] = http_status
results_site['response_text'] = response_text results_site["response_text"] = response_text
# Add this site's results into final dictionary with all of the other results. # Add this site's results into final dictionary with all of the other results.
results_total[social_network] = results_site results_total[social_network] = results_site
@ -510,11 +510,11 @@ def main():
help="Create Comma-Separated Values (CSV) File." help="Create Comma-Separated Values (CSV) File."
) )
parser.add_argument("--site", parser.add_argument("--site",
action="append", metavar='SITE_NAME', action="append", metavar="SITE_NAME",
dest="site_list", default=None, dest="site_list", default=None,
help="Limit analysis to just the listed sites. Add multiple options to specify more than one site." help="Limit analysis to just the listed sites. Add multiple options to specify more than one site."
) )
parser.add_argument("--proxy", "-p", metavar='PROXY_URL', parser.add_argument("--proxy", "-p", metavar="PROXY_URL",
action="store", dest="proxy", default=None, action="store", dest="proxy", default=None,
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080" help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080"
) )
@ -522,7 +522,7 @@ def main():
dest="json_file", default=None, dest="json_file", default=None,
help="Load data from a JSON file or an online, valid, JSON file.") help="Load data from a JSON file or an online, valid, JSON file.")
parser.add_argument("--timeout", parser.add_argument("--timeout",
action="store", metavar='TIMEOUT', action="store", metavar="TIMEOUT",
dest="timeout", type=timeout_check, default=None, dest="timeout", type=timeout_check, default=None,
help="Time (in seconds) to wait for response to requests. " help="Time (in seconds) to wait for response to requests. "
"Default timeout is infinity. " "Default timeout is infinity. "
@ -542,7 +542,7 @@ def main():
help="Don't color terminal output" help="Don't color terminal output"
) )
parser.add_argument("username", parser.add_argument("username",
nargs='+', metavar='USERNAMES', nargs="+", metavar="USERNAMES",
action="store", action="store",
help="One or more usernames to check with social networks." help="One or more usernames to check with social networks."
) )
@ -598,7 +598,7 @@ def main():
# Create object with all information about sites we are aware of. # Create object with all information about sites we are aware of.
try: try:
if args.local: if args.local:
sites = SitesInformation(os.path.join(os.path.dirname(__file__), 'resources/data.json')) sites = SitesInformation(os.path.join(os.path.dirname(__file__), "resources/data.json"))
else: else:
sites = SitesInformation(args.json_file) sites = SitesInformation(args.json_file)
except Exception as error: except Exception as error:
@ -682,25 +682,25 @@ def main():
with open(result_file, "w", newline='', encoding="utf-8") as csv_report: with open(result_file, "w", newline='', encoding="utf-8") as csv_report:
writer = csv.writer(csv_report) writer = csv.writer(csv_report)
writer.writerow(['username', writer.writerow(["username",
'name', "name",
'url_main', "url_main",
'url_user', "url_user",
'exists', "exists",
'http_status', "http_status",
'response_time_s' "response_time_s"
] ]
) )
for site in results: for site in results:
response_time_s = results[site]['status'].query_time response_time_s = results[site]["status"].query_time
if response_time_s is None: if response_time_s is None:
response_time_s = "" response_time_s = ""
writer.writerow([username, writer.writerow([username,
site, site,
results[site]['url_main'], results[site]["url_main"],
results[site]['url_user'], results[site]["url_user"],
str(results[site]['status'].status), str(results[site]["status"].status),
results[site]['http_status'], results[site]["http_status"],
response_time_s response_time_s
] ]
) )

@ -21,7 +21,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'BinarySearch' site = "BinarySearch"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -48,7 +48,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'BinarySearch' site = "BinarySearch"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -75,7 +75,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'Pinterest' site = "Pinterest"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -102,7 +102,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'Pinterest' site = "Pinterest"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -129,7 +129,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'VK' site = "VK"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -156,7 +156,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'VK' site = "VK"
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.

@ -68,7 +68,7 @@ class SherlockBaseTest(unittest.TestCase):
should be filtered. should be filtered.
Return Value: Return Value:
Dictionary containing sub-set of site data specified by 'site_list'. Dictionary containing sub-set of site data specified by "site_list".
""" """
# Create new dictionary that has filtered site data based on input. # Create new dictionary that has filtered site data based on input.
@ -126,7 +126,7 @@ class SherlockBaseTest(unittest.TestCase):
): ):
if ( if (
(self.skip_error_sites == True) and (self.skip_error_sites == True) and
(result['status'].status == QueryStatus.UNKNOWN) (result["status"].status == QueryStatus.UNKNOWN)
): ):
#Some error connecting to site. #Some error connecting to site.
self.skipTest(f"Skipping Username '{username}' " self.skipTest(f"Skipping Username '{username}' "
@ -135,7 +135,7 @@ class SherlockBaseTest(unittest.TestCase):
) )
self.assertEqual(exist_result_desired, self.assertEqual(exist_result_desired,
result['status'].status) result["status"].status)
return return

@ -12,19 +12,19 @@ with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file:
with open("sites.md", "w") as site_file: with open("sites.md", "w") as site_file:
data_length = len(data) data_length = len(data)
site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n') site_file.write(f"## List Of Supported Sites ({data_length} Sites In Total!)\n")
for social_network in data: for social_network in data:
url_main = data.get(social_network).get("urlMain") url_main = data.get(social_network).get("urlMain")
pool.append((social_network, url_main)) pool.append((social_network, url_main))
for social_network, url_main in pool: for social_network, url_main in pool:
site_file.write(f'1. [{social_network}]({url_main})\n') site_file.write(f"1. [{social_network}]({url_main})\n")
sorted_json_data = json.dumps(data, indent=2, sort_keys=True) sorted_json_data = json.dumps(data, indent=2, sort_keys=True)
with open("sherlock/resources/data.json", "w") as data_file: with open("sherlock/resources/data.json", "w") as data_file:
data_file.write(sorted_json_data) data_file.write(sorted_json_data)
data_file.write('\n') data_file.write("\n")
print("Finished updating supported site listing!") print("Finished updating supported site listing!")

Loading…
Cancel
Save