You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
944 lines
34 KiB
944 lines
34 KiB
6 years ago
|
#! /usr/bin/env python3
|
||
|
|
||
6 years ago
|
"""
|
||
|
Sherlock: Find Usernames Across Social Networks Module
|
||
6 years ago
|
|
||
|
This module contains the main logic to search for usernames at social
|
||
|
networks.
|
||
|
"""
|
||
6 years ago
|
|
||
5 months ago
|
import sys
|
||
|
|
||
|
try:
|
||
5 months ago
|
from sherlock.__init__ import import_error_test_var # noqa: F401
|
||
5 months ago
|
except ImportError:
|
||
|
print("Did you run Sherlock with `python3 sherlock/sherlock.py ...`?")
|
||
|
print("This is an outdated method. Please see https://sherlockproject.xyz/installation for up to date instructions.")
|
||
|
sys.exit(1)
|
||
|
|
||
6 years ago
|
import csv
|
||
2 years ago
|
import signal
|
||
3 years ago
|
import pandas as pd
|
||
6 years ago
|
import os
|
||
6 years ago
|
import re
|
||
6 years ago
|
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||
5 years ago
|
from time import monotonic
|
||
6 years ago
|
|
||
|
import requests
|
||
6 years ago
|
|
||
7 months ago
|
# Removing __version__ here will trigger update message for users
|
||
|
# Do not remove until ready to trigger that message
|
||
7 months ago
|
# When removed, also remove all the noqa: E402 comments for linting
|
||
7 months ago
|
__version__ = "0.14.4"
|
||
7 months ago
|
del __version__
|
||
|
|
||
6 months ago
|
from sherlock_project.__init__ import ( # noqa: E402
|
||
7 months ago
|
__longname__,
|
||
|
__version__
|
||
|
)
|
||
|
|
||
7 months ago
|
from requests_futures.sessions import FuturesSession # noqa: E402
|
||
|
from torrequest import TorRequest # noqa: E402
|
||
6 months ago
|
from sherlock_project.result import QueryStatus # noqa: E402
|
||
|
from sherlock_project.result import QueryResult # noqa: E402
|
||
|
from sherlock_project.notify import QueryNotify # noqa: E402
|
||
|
from sherlock_project.notify import QueryNotifyPrint # noqa: E402
|
||
|
from sherlock_project.sites import SitesInformation # noqa: E402
|
||
7 months ago
|
from colorama import init # noqa: E402
|
||
|
from argparse import ArgumentTypeError # noqa: E402
|
||
6 years ago
|
|
||
|
|
||
5 years ago
|
class SherlockFuturesSession(FuturesSession):
|
||
3 years ago
|
def request(self, method, url, hooks=None, *args, **kwargs):
|
||
5 years ago
|
"""Request URL.
|
||
|
|
||
|
This extends the FuturesSession request method to calculate a response
|
||
|
time metric to each request.
|
||
|
|
||
3 years ago
|
It is taken (almost) directly from the following Stack Overflow answer:
|
||
5 years ago
|
https://github.com/ross/requests-futures#working-in-the-background
|
||
|
|
||
|
Keyword Arguments:
|
||
|
self -- This object.
|
||
|
method -- String containing method desired for request.
|
||
|
url -- String containing URL for request.
|
||
|
hooks -- Dictionary containing hooks to execute after
|
||
|
request finishes.
|
||
|
args -- Arguments.
|
||
|
kwargs -- Keyword arguments.
|
||
|
|
||
|
Return Value:
|
||
|
Request object.
|
||
|
"""
|
||
4 years ago
|
# Record the start time for the request.
|
||
3 years ago
|
if hooks is None:
|
||
|
hooks = {}
|
||
5 years ago
|
start = monotonic()
|
||
6 years ago
|
|
||
5 years ago
|
def response_time(resp, *args, **kwargs):
|
||
5 years ago
|
"""Response Time Hook.
|
||
|
|
||
|
Keyword Arguments:
|
||
|
resp -- Response object.
|
||
|
args -- Arguments.
|
||
|
kwargs -- Keyword arguments.
|
||
|
|
||
|
Return Value:
|
||
3 years ago
|
Nothing.
|
||
5 years ago
|
"""
|
||
|
resp.elapsed = monotonic() - start
|
||
|
|
||
|
return
|
||
6 years ago
|
|
||
4 years ago
|
# Install hook to execute when response completes.
|
||
|
# Make sure that the time measurement hook is first, so we will not
|
||
|
# track any later hook's execution time.
|
||
6 years ago
|
try:
|
||
3 years ago
|
if isinstance(hooks["response"], list):
|
||
|
hooks["response"].insert(0, response_time)
|
||
|
elif isinstance(hooks["response"], tuple):
|
||
4 years ago
|
# Convert tuple to list and insert time measurement hook first.
|
||
3 years ago
|
hooks["response"] = list(hooks["response"])
|
||
|
hooks["response"].insert(0, response_time)
|
||
6 years ago
|
else:
|
||
4 years ago
|
# Must have previously contained a single hook function,
|
||
|
# so convert to list.
|
||
3 years ago
|
hooks["response"] = [response_time, hooks["response"]]
|
||
6 years ago
|
except KeyError:
|
||
4 years ago
|
# No response hook was already defined, so install it ourselves.
|
||
3 years ago
|
hooks["response"] = [response_time]
|
||
6 years ago
|
|
||
12 months ago
|
return super(SherlockFuturesSession, self).request(
|
||
|
method, url, hooks=hooks, *args, **kwargs
|
||
|
)
|
||
6 years ago
|
|
||
|
|
||
5 years ago
|
def get_response(request_future, error_type, social_network):
|
||
4 years ago
|
# Default for Response object if some failure occurs.
|
||
5 years ago
|
response = None
|
||
6 years ago
|
|
||
5 years ago
|
error_context = "General Unknown Error"
|
||
3 years ago
|
exception_text = None
|
||
6 years ago
|
try:
|
||
5 years ago
|
response = request_future.result()
|
||
|
if response.status_code:
|
||
4 years ago
|
# Status code exists in response object
|
||
5 years ago
|
error_context = None
|
||
6 years ago
|
except requests.exceptions.HTTPError as errh:
|
||
5 years ago
|
error_context = "HTTP Error"
|
||
3 years ago
|
exception_text = str(errh)
|
||
6 years ago
|
except requests.exceptions.ProxyError as errp:
|
||
5 years ago
|
error_context = "Proxy Error"
|
||
3 years ago
|
exception_text = str(errp)
|
||
6 years ago
|
except requests.exceptions.ConnectionError as errc:
|
||
5 years ago
|
error_context = "Error Connecting"
|
||
3 years ago
|
exception_text = str(errc)
|
||
6 years ago
|
except requests.exceptions.Timeout as errt:
|
||
5 years ago
|
error_context = "Timeout Error"
|
||
3 years ago
|
exception_text = str(errt)
|
||
6 years ago
|
except requests.exceptions.RequestException as err:
|
||
5 years ago
|
error_context = "Unknown Error"
|
||
3 years ago
|
exception_text = str(err)
|
||
5 years ago
|
|
||
3 years ago
|
return response, error_context, exception_text
|
||
6 years ago
|
|
||
|
|
||
12 months ago
|
def interpolate_string(input_object, username):
|
||
|
if isinstance(input_object, str):
|
||
|
return input_object.replace("{}", username)
|
||
|
elif isinstance(input_object, dict):
|
||
|
return {k: interpolate_string(v, username) for k, v in input_object.items()}
|
||
|
elif isinstance(input_object, list):
|
||
|
return [interpolate_string(i, username) for i in input_object]
|
||
|
return input_object
|
||
3 years ago
|
|
||
|
|
||
12 months ago
|
def check_for_parameter(username):
|
||
|
"""checks if {?} exists in the username
|
||
|
if exist it means that sherlock is looking for more multiple username"""
|
||
|
return "{?}" in username
|
||
3 years ago
|
|
||
3 years ago
|
|
||
|
checksymbols = ["_", "-", "."]
|
||
|
|
||
|
|
||
12 months ago
|
def multiple_usernames(username):
|
||
|
"""replace the parameter with with symbols and return a list of usernames"""
|
||
3 years ago
|
allUsernames = []
|
||
|
for i in checksymbols:
|
||
|
allUsernames.append(username.replace("{?}", i))
|
||
3 years ago
|
return allUsernames
|
||
3 years ago
|
|
||
|
|
||
12 months ago
|
def sherlock(
|
||
|
username,
|
||
|
site_data,
|
||
7 months ago
|
query_notify: QueryNotify,
|
||
|
tor: bool = False,
|
||
|
unique_tor: bool = False,
|
||
5 months ago
|
dump_response: bool = False,
|
||
12 months ago
|
proxy=None,
|
||
|
timeout=60,
|
||
|
):
|
||
6 years ago
|
"""Run Sherlock Analysis.
|
||
|
|
||
|
Checks for existence of username on various social media sites.
|
||
6 years ago
|
|
||
6 years ago
|
Keyword Arguments:
|
||
|
username -- String indicating username that report
|
||
|
should be created against.
|
||
6 years ago
|
site_data -- Dictionary containing all of the site data.
|
||
5 years ago
|
query_notify -- Object with base type of QueryNotify().
|
||
|
This will be used to notify the caller about
|
||
|
query results.
|
||
6 years ago
|
tor -- Boolean indicating whether to use a tor circuit for the requests.
|
||
|
unique_tor -- Boolean indicating whether to use a new tor circuit for each request.
|
||
6 years ago
|
proxy -- String indicating the proxy URL
|
||
5 years ago
|
timeout -- Time in seconds to wait before timing out request.
|
||
2 years ago
|
Default is 60 seconds.
|
||
6 years ago
|
|
||
|
Return Value:
|
||
5 years ago
|
Dictionary containing results from report. Key of dictionary is the name
|
||
6 years ago
|
of the social network site, and the value is another dictionary with
|
||
|
the following keys:
|
||
|
url_main: URL of main site.
|
||
|
url_user: URL of user on site (if account exists).
|
||
5 years ago
|
status: QueryResult() object indicating results of test for
|
||
5 years ago
|
account existence.
|
||
6 years ago
|
http_status: HTTP status code of query which checked for existence on
|
||
|
site.
|
||
|
response_text: Text that came back from request. May be None if
|
||
|
there was an HTTP error when checking for existence.
|
||
|
"""
|
||
5 years ago
|
|
||
4 years ago
|
# Notify caller that we are starting the query.
|
||
5 years ago
|
query_notify.start(username)
|
||
6 years ago
|
# Create session based on request methodology
|
||
|
if tor or unique_tor:
|
||
4 years ago
|
# Requests using Tor obfuscation
|
||
10 months ago
|
try:
|
||
|
underlying_request = TorRequest()
|
||
|
except OSError:
|
||
7 months ago
|
print("Tor not found in system path. Unable to continue.\n")
|
||
10 months ago
|
sys.exit(query_notify.finish())
|
||
|
|
||
6 years ago
|
underlying_session = underlying_request.session
|
||
5 years ago
|
else:
|
||
4 years ago
|
# Normal requests
|
||
5 years ago
|
underlying_session = requests.session()
|
||
|
underlying_request = requests.Request()
|
||
6 years ago
|
|
||
4 years ago
|
# Limit number of workers to 20.
|
||
|
# This is probably vastly overkill.
|
||
5 years ago
|
if len(site_data) >= 20:
|
||
3 years ago
|
max_workers = 20
|
||
5 years ago
|
else:
|
||
3 years ago
|
max_workers = len(site_data)
|
||
5 years ago
|
|
||
4 years ago
|
# Create multi-threaded session for all requests.
|
||
12 months ago
|
session = SherlockFuturesSession(
|
||
|
max_workers=max_workers, session=underlying_session
|
||
|
)
|
||
5 years ago
|
|
||
6 years ago
|
# Results from analysis of all sites
|
||
|
results_total = {}
|
||
6 years ago
|
|
||
|
# First create futures for all requests. This allows for the requests to run in parallel
|
||
6 years ago
|
for social_network, net_info in site_data.items():
|
||
6 years ago
|
# Results from analysis of this specific site
|
||
3 years ago
|
results_site = {"url_main": net_info.get("urlMain")}
|
||
6 years ago
|
|
||
|
# Record URL of main site
|
||
|
|
||
5 years ago
|
# A user agent is needed because some sites don't return the correct
|
||
|
# information since they think that we are bots (Which we actually are...)
|
||
5 years ago
|
headers = {
|
||
8 months ago
|
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0",
|
||
5 years ago
|
}
|
||
5 years ago
|
|
||
5 years ago
|
if "headers" in net_info:
|
||
5 years ago
|
# Override/append any extra headers required by a given site.
|
||
5 years ago
|
headers.update(net_info["headers"])
|
||
6 years ago
|
|
||
5 years ago
|
# URL of user on site (if it exists)
|
||
7 months ago
|
url = interpolate_string(net_info["url"], username.replace(' ', '%20'))
|
||
5 years ago
|
|
||
6 years ago
|
# Don't make request if username is invalid for the site
|
||
6 years ago
|
regex_check = net_info.get("regexCheck")
|
||
6 years ago
|
if regex_check and re.search(regex_check, username) is None:
|
||
3 years ago
|
# No need to do the check at the site: this username is not allowed.
|
||
12 months ago
|
results_site["status"] = QueryResult(
|
||
|
username, social_network, url, QueryStatus.ILLEGAL
|
||
|
)
|
||
5 years ago
|
results_site["url_user"] = ""
|
||
3 years ago
|
results_site["http_status"] = ""
|
||
|
results_site["response_text"] = ""
|
||
|
query_notify.update(results_site["status"])
|
||
6 years ago
|
else:
|
||
|
# URL of user on site (if it exists)
|
||
|
results_site["url_user"] = url
|
||
6 years ago
|
url_probe = net_info.get("urlProbe")
|
||
3 years ago
|
request_method = net_info.get("request_method")
|
||
|
request_payload = net_info.get("request_payload")
|
||
|
request = None
|
||
|
|
||
|
if request_method is not None:
|
||
|
if request_method == "GET":
|
||
|
request = session.get
|
||
|
elif request_method == "HEAD":
|
||
|
request = session.head
|
||
|
elif request_method == "POST":
|
||
|
request = session.post
|
||
|
elif request_method == "PUT":
|
||
|
request = session.put
|
||
|
else:
|
||
3 years ago
|
raise RuntimeError(f"Unsupported request_method for {url}")
|
||
3 years ago
|
|
||
|
if request_payload is not None:
|
||
|
request_payload = interpolate_string(request_payload, username)
|
||
|
|
||
6 years ago
|
if url_probe is None:
|
||
5 years ago
|
# Probe URL is normal one seen by people out on the web.
|
||
6 years ago
|
url_probe = url
|
||
|
else:
|
||
5 years ago
|
# There is a special URL for probing existence separate
|
||
|
# from where the user profile normally can be found.
|
||
3 years ago
|
url_probe = interpolate_string(url_probe, username)
|
||
|
|
||
|
if request is None:
|
||
3 years ago
|
if net_info["errorType"] == "status_code":
|
||
3 years ago
|
# In most cases when we are detecting by status code,
|
||
|
# it is not necessary to get the entire body: we can
|
||
|
# detect fine with just the HEAD response.
|
||
|
request = session.head
|
||
|
else:
|
||
|
# Either this detect method needs the content associated
|
||
|
# with the GET response, or this specific website will
|
||
|
# not respond properly unless we request the whole page.
|
||
|
request = session.get
|
||
6 years ago
|
|
||
6 years ago
|
if net_info["errorType"] == "response_url":
|
||
6 years ago
|
# Site forwards request to a different URL if username not
|
||
|
# found. Disallow the redirect so we can capture the
|
||
|
# http status from the original URL request.
|
||
6 years ago
|
allow_redirects = False
|
||
|
else:
|
||
6 years ago
|
# Allow whatever redirect that the site wants to do.
|
||
|
# The final result of the request will be what is available.
|
||
6 years ago
|
allow_redirects = True
|
||
|
|
||
6 years ago
|
# This future starts running the request in a new thread, doesn't block the main thread
|
||
5 years ago
|
if proxy is not None:
|
||
6 years ago
|
proxies = {"http": proxy, "https": proxy}
|
||
12 months ago
|
future = request(
|
||
|
url=url_probe,
|
||
|
headers=headers,
|
||
|
proxies=proxies,
|
||
|
allow_redirects=allow_redirects,
|
||
|
timeout=timeout,
|
||
|
json=request_payload,
|
||
|
)
|
||
6 years ago
|
else:
|
||
12 months ago
|
future = request(
|
||
|
url=url_probe,
|
||
|
headers=headers,
|
||
|
allow_redirects=allow_redirects,
|
||
|
timeout=timeout,
|
||
|
json=request_payload,
|
||
|
)
|
||
6 years ago
|
|
||
6 years ago
|
# Store future in data for access later
|
||
6 years ago
|
net_info["request_future"] = future
|
||
6 years ago
|
|
||
|
# Reset identify for tor (if needed)
|
||
|
if unique_tor:
|
||
|
underlying_request.reset_identity()
|
||
|
|
||
3 years ago
|
# Add this site's results into final dictionary with all the other results.
|
||
6 years ago
|
results_total[social_network] = results_site
|
||
|
|
||
6 years ago
|
# Open the file containing account links
|
||
6 years ago
|
# Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses
|
||
6 years ago
|
for social_network, net_info in site_data.items():
|
||
6 years ago
|
# Retrieve results again
|
||
|
results_site = results_total.get(social_network)
|
||
|
|
||
|
# Retrieve other site information again
|
||
|
url = results_site.get("url_user")
|
||
5 years ago
|
status = results_site.get("status")
|
||
|
if status is not None:
|
||
6 years ago
|
# We have already determined the user doesn't exist here
|
||
|
continue
|
||
|
|
||
|
# Get the expected error type
|
||
6 years ago
|
error_type = net_info["errorType"]
|
||
6 years ago
|
|
||
6 years ago
|
# Retrieve future and ensure it has finished
|
||
6 years ago
|
future = net_info["request_future"]
|
||
12 months ago
|
r, error_text, exception_text = get_response(
|
||
|
request_future=future, error_type=error_type, social_network=social_network
|
||
|
)
|
||
5 years ago
|
|
||
4 years ago
|
# Get response time for response of our request.
|
||
5 years ago
|
try:
|
||
|
response_time = r.elapsed
|
||
|
except AttributeError:
|
||
|
response_time = None
|
||
6 years ago
|
|
||
6 years ago
|
# Attempt to get request information
|
||
|
try:
|
||
|
http_status = r.status_code
|
||
12 months ago
|
except Exception:
|
||
5 years ago
|
http_status = "?"
|
||
6 years ago
|
try:
|
||
3 years ago
|
response_text = r.text.encode(r.encoding or "UTF-8")
|
||
12 months ago
|
except Exception:
|
||
5 years ago
|
response_text = ""
|
||
6 years ago
|
|
||
3 years ago
|
query_status = QueryStatus.UNKNOWN
|
||
|
error_context = None
|
||
|
|
||
7 months ago
|
# As WAFs advance and evolve, they will occasionally block Sherlock and
|
||
|
# lead to false positives and negatives. Fingerprints should be added
|
||
|
# here to filter results that fail to bypass WAFs. Fingerprints should
|
||
|
# be highly targetted. Comment at the end of each fingerprint to
|
||
|
# indicate target and date fingerprinted.
|
||
8 months ago
|
WAFHitMsgs = [
|
||
7 months ago
|
'.loading-spinner{visibility:hidden}body.no-js .challenge-running{display:none}body.dark{background-color:#222;color:#d9d9d9}body.dark a{color:#fff}body.dark a:hover{color:#ee730a;text-decoration:underline}body.dark .lds-ring div{border-color:#999 transparent transparent}body.dark .font-red{color:#b20f03}body.dark', # 2024-05-13 Cloudflare
|
||
8 months ago
|
'{return l.onPageView}}),Object.defineProperty(r,"perimeterxIdentifiers",{enumerable:' # 2024-04-09 PerimeterX / Human Security
|
||
8 months ago
|
]
|
||
|
|
||
5 years ago
|
if error_text is not None:
|
||
3 years ago
|
error_context = error_text
|
||
3 years ago
|
|
||
8 months ago
|
elif any(hitMsg in r.text for hitMsg in WAFHitMsgs):
|
||
|
query_status = QueryStatus.WAF
|
||
|
|
||
5 years ago
|
elif error_type == "message":
|
||
4 years ago
|
# error_flag True denotes no error found in the HTML
|
||
|
# error_flag False denotes error found in the HTML
|
||
|
error_flag = True
|
||
3 years ago
|
errors = net_info.get("errorMsg")
|
||
4 years ago
|
# errors will hold the error message
|
||
|
# it can be string or list
|
||
3 years ago
|
# by isinstance method we can detect that
|
||
4 years ago
|
# and handle the case for strings as normal procedure
|
||
|
# and if its list we can iterate the errors
|
||
3 years ago
|
if isinstance(errors, str):
|
||
4 years ago
|
# Checks if the error message is in the HTML
|
||
|
# if error is present we will set flag to False
|
||
|
if errors in r.text:
|
||
|
error_flag = False
|
||
|
else:
|
||
|
# If it's list, it will iterate all the error message
|
||
|
for error in errors:
|
||
|
if error in r.text:
|
||
|
error_flag = False
|
||
|
break
|
||
|
if error_flag:
|
||
3 years ago
|
query_status = QueryStatus.CLAIMED
|
||
6 years ago
|
else:
|
||
3 years ago
|
query_status = QueryStatus.AVAILABLE
|
||
6 years ago
|
elif error_type == "status_code":
|
||
8 months ago
|
error_codes = net_info.get("errorCode")
|
||
8 months ago
|
query_status = QueryStatus.CLAIMED
|
||
8 months ago
|
|
||
8 months ago
|
# Type consistency, allowing for both singlets and lists in manifest
|
||
|
if isinstance(error_codes, int):
|
||
|
error_codes = [error_codes]
|
||
7 months ago
|
|
||
8 months ago
|
if error_codes is not None and r.status_code in error_codes:
|
||
8 months ago
|
query_status = QueryStatus.AVAILABLE
|
||
|
elif r.status_code >= 300 or r.status_code < 200:
|
||
|
query_status = QueryStatus.AVAILABLE
|
||
6 years ago
|
elif error_type == "response_url":
|
||
6 years ago
|
# For this detection method, we have turned off the redirect.
|
||
|
# So, there is no need to check the response URL: it will always
|
||
|
# match the request. Instead, we will ensure that the response
|
||
|
# code indicates that the request was successful (i.e. no 404, or
|
||
|
# forward to some odd redirect).
|
||
6 years ago
|
if 200 <= r.status_code < 300:
|
||
3 years ago
|
query_status = QueryStatus.CLAIMED
|
||
6 years ago
|
else:
|
||
3 years ago
|
query_status = QueryStatus.AVAILABLE
|
||
5 years ago
|
else:
|
||
4 years ago
|
# It should be impossible to ever get here...
|
||
12 months ago
|
raise ValueError(
|
||
|
f"Unknown Error Type '{error_type}' for " f"site '{social_network}'"
|
||
|
)
|
||
5 months ago
|
|
||
|
if dump_response:
|
||
|
print("+++++++++++++++++++++")
|
||
|
print(f"TARGET NAME : {social_network}")
|
||
|
print(f"USERNAME : {username}")
|
||
|
print(f"TARGET URL : {url}")
|
||
|
print(f"TEST METHOD : {error_type}")
|
||
|
try:
|
||
|
print(f"STATUS CODES : {net_info['errorCode']}")
|
||
|
except KeyError:
|
||
|
pass
|
||
|
print("Results...")
|
||
|
try:
|
||
|
print(f"RESPONSE CODE : {r.status_code}")
|
||
|
except Exception:
|
||
|
pass
|
||
|
try:
|
||
|
print(f"ERROR TEXT : {net_info['errorMsg']}")
|
||
|
except KeyError:
|
||
|
pass
|
||
|
print(">>>>> BEGIN RESPONSE TEXT")
|
||
|
try:
|
||
|
print(r.text)
|
||
|
except Exception:
|
||
|
pass
|
||
|
print("<<<<< END RESPONSE TEXT")
|
||
|
print("VERDICT : " + str(query_status))
|
||
|
print("+++++++++++++++++++++")
|
||
6 years ago
|
|
||
4 years ago
|
# Notify caller about results of query.
|
||
12 months ago
|
result = QueryResult(
|
||
|
username=username,
|
||
|
site_name=social_network,
|
||
|
site_url_user=url,
|
||
|
status=query_status,
|
||
|
query_time=response_time,
|
||
|
context=error_context,
|
||
|
)
|
||
5 years ago
|
query_notify.update(result)
|
||
|
|
||
5 years ago
|
# Save status of request
|
||
3 years ago
|
results_site["status"] = result
|
||
6 years ago
|
|
||
|
# Save results from request
|
||
3 years ago
|
results_site["http_status"] = http_status
|
||
|
results_site["response_text"] = response_text
|
||
6 years ago
|
|
||
|
# Add this site's results into final dictionary with all of the other results.
|
||
|
results_total[social_network] = results_site
|
||
5 years ago
|
|
||
6 years ago
|
return results_total
|
||
6 years ago
|
|
||
|
|
||
5 years ago
|
def timeout_check(value):
|
||
|
"""Check Timeout Argument.
|
||
|
|
||
|
Checks timeout for validity.
|
||
|
|
||
|
Keyword Arguments:
|
||
|
value -- Time in seconds to wait before timing out request.
|
||
|
|
||
|
Return Value:
|
||
|
Floating point number representing the time (in seconds) that should be
|
||
|
used for the timeout.
|
||
|
|
||
|
NOTE: Will raise an exception if the timeout in invalid.
|
||
|
"""
|
||
|
|
||
12 months ago
|
float_value = float(value)
|
||
|
|
||
|
if float_value <= 0:
|
||
3 years ago
|
raise ArgumentTypeError(
|
||
12 months ago
|
f"Invalid timeout value: {value}. Timeout must be a positive number."
|
||
|
)
|
||
|
|
||
12 months ago
|
return float_value
|
||
5 years ago
|
|
||
|
|
||
2 years ago
|
def handler(signal_received, frame):
|
||
|
"""Exit gracefully without throwing errors
|
||
|
|
||
|
Source: https://www.devdungeon.com/content/python-catch-sigint-ctrl-c
|
||
|
"""
|
||
|
sys.exit(0)
|
||
|
|
||
|
|
||
6 years ago
|
def main():
|
||
12 months ago
|
parser = ArgumentParser(
|
||
|
formatter_class=RawDescriptionHelpFormatter,
|
||
7 months ago
|
description=f"{__longname__} (Version {__version__})",
|
||
12 months ago
|
)
|
||
|
parser.add_argument(
|
||
|
"--version",
|
||
|
action="version",
|
||
6 months ago
|
version=f"Sherlock v{__version__}",
|
||
12 months ago
|
help="Display version information and dependencies.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--verbose",
|
||
|
"-v",
|
||
|
"-d",
|
||
|
"--debug",
|
||
|
action="store_true",
|
||
|
dest="verbose",
|
||
|
default=False,
|
||
|
help="Display extra debugging information and metrics.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--folderoutput",
|
||
|
"-fo",
|
||
|
dest="folderoutput",
|
||
|
help="If using multiple usernames, the output of the results will be saved to this folder.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--output",
|
||
|
"-o",
|
||
|
dest="output",
|
||
|
help="If using single username, the output of the result will be saved to this file.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--tor",
|
||
|
"-t",
|
||
|
action="store_true",
|
||
|
dest="tor",
|
||
|
default=False,
|
||
|
help="Make requests over Tor; increases runtime; requires Tor to be installed and in system path.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--unique-tor",
|
||
|
"-u",
|
||
|
action="store_true",
|
||
|
dest="unique_tor",
|
||
|
default=False,
|
||
|
help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--csv",
|
||
|
action="store_true",
|
||
|
dest="csv",
|
||
|
default=False,
|
||
|
help="Create Comma-Separated Values (CSV) File.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--xlsx",
|
||
|
action="store_true",
|
||
|
dest="xlsx",
|
||
|
default=False,
|
||
9 months ago
|
help="Create the standard file for the modern Microsoft Excel spreadsheet (xlsx).",
|
||
12 months ago
|
)
|
||
|
parser.add_argument(
|
||
|
"--site",
|
||
|
action="append",
|
||
|
metavar="SITE_NAME",
|
||
|
dest="site_list",
|
||
7 months ago
|
default=[],
|
||
12 months ago
|
help="Limit analysis to just the listed sites. Add multiple options to specify more than one site.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--proxy",
|
||
|
"-p",
|
||
|
metavar="PROXY_URL",
|
||
|
action="store",
|
||
|
dest="proxy",
|
||
|
default=None,
|
||
|
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080",
|
||
|
)
|
||
5 months ago
|
parser.add_argument(
|
||
|
"--dump-response",
|
||
|
action="store_true",
|
||
|
dest="dump_response",
|
||
|
default=False,
|
||
|
help="Dump the HTTP response to stdout for targeted debugging.",
|
||
|
)
|
||
12 months ago
|
parser.add_argument(
|
||
|
"--json",
|
||
|
"-j",
|
||
|
metavar="JSON_FILE",
|
||
|
dest="json_file",
|
||
|
default=None,
|
||
|
help="Load data from a JSON file or an online, valid, JSON file.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--timeout",
|
||
|
action="store",
|
||
|
metavar="TIMEOUT",
|
||
|
dest="timeout",
|
||
|
type=timeout_check,
|
||
|
default=60,
|
||
|
help="Time (in seconds) to wait for response to requests (Default: 60)",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--print-all",
|
||
|
action="store_true",
|
||
|
dest="print_all",
|
||
|
default=False,
|
||
|
help="Output sites where the username was not found.",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--print-found",
|
||
|
action="store_true",
|
||
|
dest="print_found",
|
||
|
default=True,
|
||
|
help="Output sites where the username was found (also if exported as file).",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"--no-color",
|
||
|
action="store_true",
|
||
|
dest="no_color",
|
||
|
default=False,
|
||
|
help="Don't color terminal output",
|
||
|
)
|
||
|
parser.add_argument(
|
||
|
"username",
|
||
|
nargs="+",
|
||
|
metavar="USERNAMES",
|
||
|
action="store",
|
||
11 months ago
|
help="One or more usernames to check with social networks. Check similar usernames using {?} (replace to '_', '-', '.').",
|
||
12 months ago
|
)
|
||
|
parser.add_argument(
|
||
|
"--browse",
|
||
|
"-b",
|
||
|
action="store_true",
|
||
|
dest="browse",
|
||
|
default=False,
|
||
|
help="Browse to all results on default browser.",
|
||
|
)
|
||
|
|
||
|
parser.add_argument(
|
||
|
"--local",
|
||
|
"-l",
|
||
|
action="store_true",
|
||
|
default=False,
|
||
|
help="Force the use of the local data.json file.",
|
||
|
)
|
||
|
|
||
|
parser.add_argument(
|
||
|
"--nsfw",
|
||
|
action="store_true",
|
||
|
default=False,
|
||
|
help="Include checking of NSFW sites from default list.",
|
||
|
)
|
||
2 years ago
|
|
||
6 years ago
|
args = parser.parse_args()
|
||
2 years ago
|
|
||
2 years ago
|
# If the user presses CTRL-C, exit gracefully without throwing errors
|
||
|
signal.signal(signal.SIGINT, handler)
|
||
2 years ago
|
|
||
4 years ago
|
# Check for newer version of Sherlock. If it exists, let the user know about it
|
||
4 years ago
|
try:
|
||
3 years ago
|
r = requests.get(
|
||
7 months ago
|
"https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/__init__.py"
|
||
12 months ago
|
)
|
||
4 years ago
|
|
||
7 months ago
|
remote_version = str(re.findall('__version__ *= *"(.*)"', r.text)[0])
|
||
4 years ago
|
local_version = __version__
|
||
|
|
||
|
if remote_version != local_version:
|
||
12 months ago
|
print(
|
||
|
"Update Available!\n"
|
||
|
+ f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock"
|
||
|
)
|
||
4 years ago
|
|
||
4 years ago
|
except Exception as error:
|
||
3 years ago
|
print(f"A problem occurred while checking for an update: {error}")
|
||
4 years ago
|
|
||
6 years ago
|
# Argument check
|
||
|
# TODO regex check on args.proxy
|
||
5 years ago
|
if args.tor and (args.proxy is not None):
|
||
5 years ago
|
raise Exception("Tor and Proxy cannot be set at the same time.")
|
||
6 years ago
|
|
||
6 years ago
|
# Make prompts
|
||
5 years ago
|
if args.proxy is not None:
|
||
6 years ago
|
print("Using the proxy: " + args.proxy)
|
||
6 years ago
|
|
||
6 years ago
|
if args.tor or args.unique_tor:
|
||
6 years ago
|
print("Using Tor to make requests")
|
||
3 years ago
|
|
||
3 years ago
|
print(
|
||
12 months ago
|
"Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors."
|
||
|
)
|
||
3 years ago
|
|
||
3 years ago
|
if args.no_color:
|
||
|
# Disable color output.
|
||
|
init(strip=True, convert=False)
|
||
|
else:
|
||
|
# Enable color output.
|
||
|
init(autoreset=True)
|
||
3 years ago
|
|
||
6 years ago
|
# Check if both output methods are entered as input.
|
||
|
if args.output is not None and args.folderoutput is not None:
|
||
|
print("You can only use one of the output methods.")
|
||
|
sys.exit(1)
|
||
|
|
||
|
# Check validity for single username output.
|
||
|
if args.output is not None and len(args.username) != 1:
|
||
|
print("You can only use --output with a single username")
|
||
|
sys.exit(1)
|
||
|
|
||
4 years ago
|
# Create object with all information about sites we are aware of.
|
||
6 years ago
|
try:
|
||
4 years ago
|
if args.local:
|
||
12 months ago
|
sites = SitesInformation(
|
||
|
os.path.join(os.path.dirname(__file__), "resources/data.json")
|
||
|
)
|
||
4 years ago
|
else:
|
||
|
sites = SitesInformation(args.json_file)
|
||
5 years ago
|
except Exception as error:
|
||
|
print(f"ERROR: {error}")
|
||
|
sys.exit(1)
|
||
6 years ago
|
|
||
2 years ago
|
if not args.nsfw:
|
||
7 months ago
|
sites.remove_nsfw_sites(do_not_remove=args.site_list)
|
||
2 years ago
|
|
||
4 years ago
|
# Create original dictionary from SitesInformation() object.
|
||
|
# Eventually, the rest of the code will be updated to use the new object
|
||
|
# directly, but this will glue the two pieces together.
|
||
3 years ago
|
site_data_all = {site.name: site.information for site in sites}
|
||
7 months ago
|
if args.site_list == []:
|
||
6 years ago
|
# Not desired to look at a sub-set of sites
|
||
|
site_data = site_data_all
|
||
|
else:
|
||
|
# User desires to selectively run queries on a sub-set of the site list.
|
||
|
# Make sure that the sites are supported & build up pruned site database.
|
||
|
site_data = {}
|
||
|
site_missing = []
|
||
|
for site in args.site_list:
|
||
4 years ago
|
counter = 0
|
||
6 years ago
|
for existing_site in site_data_all:
|
||
|
if site.lower() == existing_site.lower():
|
||
|
site_data[existing_site] = site_data_all[existing_site]
|
||
4 years ago
|
counter += 1
|
||
|
if counter == 0:
|
||
6 years ago
|
# Build up list of sites not supported for future error message.
|
||
6 years ago
|
site_missing.append(f"'{site}'")
|
||
6 years ago
|
|
||
6 years ago
|
if site_missing:
|
||
12 months ago
|
print(f"Error: Desired sites not found: {', '.join(site_missing)}.")
|
||
4 years ago
|
|
||
|
if not site_data:
|
||
6 years ago
|
sys.exit(1)
|
||
|
|
||
4 years ago
|
# Create notify object for query results.
|
||
12 months ago
|
query_notify = QueryNotifyPrint(
|
||
|
result=None, verbose=args.verbose, print_all=args.print_all, browse=args.browse
|
||
|
)
|
||
5 years ago
|
|
||
6 years ago
|
# Run report on all specified users.
|
||
3 years ago
|
all_usernames = []
|
||
6 years ago
|
for username in args.username:
|
||
12 months ago
|
if check_for_parameter(username):
|
||
|
for name in multiple_usernames(username):
|
||
3 years ago
|
all_usernames.append(name)
|
||
|
else:
|
||
|
all_usernames.append(username)
|
||
|
for username in all_usernames:
|
||
12 months ago
|
results = sherlock(
|
||
|
username,
|
||
|
site_data,
|
||
|
query_notify,
|
||
|
tor=args.tor,
|
||
|
unique_tor=args.unique_tor,
|
||
5 months ago
|
dump_response=args.dump_response,
|
||
12 months ago
|
proxy=args.proxy,
|
||
|
timeout=args.timeout,
|
||
|
)
|
||
6 years ago
|
|
||
5 years ago
|
if args.output:
|
||
|
result_file = args.output
|
||
|
elif args.folderoutput:
|
||
|
# The usernames results should be stored in a targeted folder.
|
||
5 years ago
|
# If the folder doesn't exist, create it first
|
||
|
os.makedirs(args.folderoutput, exist_ok=True)
|
||
5 years ago
|
result_file = os.path.join(args.folderoutput, f"{username}.txt")
|
||
|
else:
|
||
|
result_file = f"{username}.txt"
|
||
|
|
||
|
with open(result_file, "w", encoding="utf-8") as file:
|
||
|
exists_counter = 0
|
||
|
for website_name in results:
|
||
|
dictionary = results[website_name]
|
||
|
if dictionary.get("status").status == QueryStatus.CLAIMED:
|
||
|
exists_counter += 1
|
||
|
file.write(dictionary["url_user"] + "\n")
|
||
12 months ago
|
file.write(f"Total Websites Username Detected On : {exists_counter}\n")
|
||
6 years ago
|
|
||
5 years ago
|
if args.csv:
|
||
4 years ago
|
result_file = f"{username}.csv"
|
||
|
if args.folderoutput:
|
||
|
# The usernames results should be stored in a targeted folder.
|
||
|
# If the folder doesn't exist, create it first
|
||
|
os.makedirs(args.folderoutput, exist_ok=True)
|
||
|
result_file = os.path.join(args.folderoutput, result_file)
|
||
|
|
||
12 months ago
|
with open(result_file, "w", newline="", encoding="utf-8") as csv_report:
|
||
6 years ago
|
writer = csv.writer(csv_report)
|
||
12 months ago
|
writer.writerow(
|
||
|
[
|
||
|
"username",
|
||
|
"name",
|
||
|
"url_main",
|
||
|
"url_user",
|
||
|
"exists",
|
||
|
"http_status",
|
||
|
"response_time_s",
|
||
|
]
|
||
|
)
|
||
6 years ago
|
for site in results:
|
||
12 months ago
|
if (
|
||
|
args.print_found
|
||
|
and not args.print_all
|
||
|
and results[site]["status"].status != QueryStatus.CLAIMED
|
||
|
):
|
||
2 years ago
|
continue
|
||
|
|
||
3 years ago
|
response_time_s = results[site]["status"].query_time
|
||
5 years ago
|
if response_time_s is None:
|
||
|
response_time_s = ""
|
||
12 months ago
|
writer.writerow(
|
||
|
[
|
||
|
username,
|
||
|
site,
|
||
|
results[site]["url_main"],
|
||
|
results[site]["url_user"],
|
||
|
str(results[site]["status"].status),
|
||
|
results[site]["http_status"],
|
||
|
response_time_s,
|
||
|
]
|
||
|
)
|
||
3 years ago
|
if args.xlsx:
|
||
|
usernames = []
|
||
|
names = []
|
||
|
url_main = []
|
||
|
url_user = []
|
||
|
exists = []
|
||
|
http_status = []
|
||
|
response_time_s = []
|
||
|
|
||
|
for site in results:
|
||
12 months ago
|
if (
|
||
|
args.print_found
|
||
|
and not args.print_all
|
||
|
and results[site]["status"].status != QueryStatus.CLAIMED
|
||
|
):
|
||
2 years ago
|
continue
|
||
|
|
||
3 years ago
|
if response_time_s is None:
|
||
|
response_time_s.append("")
|
||
|
else:
|
||
|
response_time_s.append(results[site]["status"].query_time)
|
||
|
usernames.append(username)
|
||
|
names.append(site)
|
||
|
url_main.append(results[site]["url_main"])
|
||
|
url_user.append(results[site]["url_user"])
|
||
|
exists.append(str(results[site]["status"].status))
|
||
|
http_status.append(results[site]["http_status"])
|
||
|
|
||
12 months ago
|
DataFrame = pd.DataFrame(
|
||
|
{
|
||
|
"username": usernames,
|
||
|
"name": names,
|
||
|
"url_main": url_main,
|
||
|
"url_user": url_user,
|
||
|
"exists": exists,
|
||
|
"http_status": http_status,
|
||
|
"response_time_s": response_time_s,
|
||
|
}
|
||
|
)
|
||
|
DataFrame.to_excel(f"{username}.xlsx", sheet_name="sheet1", index=False)
|
||
3 years ago
|
|
||
4 years ago
|
print()
|
||
3 years ago
|
query_notify.finish()
|
||
6 years ago
|
|
||
6 years ago
|
|
||
6 years ago
|
if __name__ == "__main__":
|
||
6 years ago
|
main()
|