pull/1874/merge
AgentXMan 8 months ago committed by GitHub
commit f0d4017713
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -40,7 +40,7 @@ $ python3 -m pip install -r requirements.txt
```console
$ python3 sherlock --help
usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT]
[--output OUTPUT] [--tor] [--unique-tor] [--csv]
[--output OUTPUT] [--tor] [--unique-tor] [--csv] [--txt]
[--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE]
[--timeout TIMEOUT] [--print-all] [--print-found] [--no-color]
[--browse] [--local] [--nsfw]
@ -71,6 +71,7 @@ optional arguments:
--csv Create Comma-Separated Values (CSV) File.
--xlsx Create the standard file for the modern Microsoft Excel
spreadsheet (xslx).
--txt Create .txt file
--site SITE_NAME Limit analysis to just the listed sites. Add multiple options to
specify more than one site.
--proxy PROXY_URL, -p PROXY_URL

@ -517,8 +517,12 @@ def main():
)
parser.add_argument("--xlsx",
action="store_true", dest="xlsx", default=False,
help="Create the standard file for the modern Microsoft Excel spreadsheet (xslx)."
)
help="Create the standard file for the modern Microsoft Excel spreadsheet (xslx).")
parser.add_argument("--txt",
action="store_true", dest="txt", default=False,
help="Create .txt file.")
parser.add_argument("--site",
action="append", metavar="SITE_NAME",
dest="site_list", default=None,
@ -694,15 +698,19 @@ def main():
else:
result_file = f"{username}.txt"
with open(result_file, "w", encoding="utf-8") as file:
exists_counter = 0
for website_name in results:
dictionary = results[website_name]
if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1
file.write(dictionary["url_user"] + "\n")
file.write(
f"Total Websites Username Detected On : {exists_counter}\n")
if args.txt:
try:
with open(result_file, "w", encoding="utf-8") as file:
exists_counter = 0
for website_name in results:
dictionary = results[website_name]
if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1
file.write(dictionary["url_user"] + "\n")
file.write(
f"Total Websites Username Detected On : {exists_counter}\n")
except IOError as e:
print("An error occured. File could not be made or written.", e)
if args.csv:
result_file = f"{username}.csv"
@ -763,7 +771,9 @@ def main():
exists.append(str(results[site]["status"].status))
http_status.append(results[site]["http_status"])
DataFrame = pd.DataFrame({"username": usernames, "name": names, "url_main": url_main, "url_user": url_user, "exists": exists, "http_status": http_status, "response_time_s": response_time_s})
DataFrame = pd.DataFrame(
{"username": usernames, "name": names, "url_main": url_main, "url_user": url_user, "exists": exists,
"http_status": http_status, "response_time_s": response_time_s})
DataFrame.to_excel(f'{username}.xlsx', sheet_name='sheet1', index=False)
print()

Loading…
Cancel
Save