pull/1898/merge
Ripunjay Singh 8 months ago committed by GitHub
commit 342dd6f7d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -8,24 +8,24 @@ networks.
"""
import csv
import signal
import pandas as pd
import os
import platform
import re
import signal
import sys
import threading
import time
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from time import monotonic
import pandas as pd
import requests
from requests_futures.sessions import FuturesSession
from torrequest import TorRequest
from result import QueryStatus
from result import QueryResult
from colorama import Fore, Style, init
from notify import QueryNotifyPrint
from requests_futures.sessions import FuturesSession
from result import QueryResult, QueryStatus
from sites import SitesInformation
from colorama import init
from torrequest import TorRequest
module_name = "Sherlock: Find Usernames Across Social Networks"
__version__ = "0.14.3"
@ -482,6 +482,17 @@ def handler(signal_received, frame):
"""
sys.exit(0)
def animate_loading(message):
global done # Define done as a global variable
done = threading.Event()
animation_chars = "|/-\\"
i = 0
while not done.is_set():
sys.stdout.write(f"\r{message} {animation_chars[i % len(animation_chars)]}")
sys.stdout.flush()
time.sleep(0.1)
i += 1
def main():
version_string = f"%(prog)s {__version__}\n" + \
@ -571,34 +582,47 @@ def main():
signal.signal(signal.SIGINT, handler)
# Check for newer version of Sherlock. If it exists, let the user know about it
global done
done = threading.Event()
try:
r = requests.get(
"https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py")
loading_thread = threading.Thread(target=animate_loading, args=(Fore.YELLOW +"Checking for updates...",))
loading_thread.start()
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
remote_version = None
local_version = __version__
if remote_version != local_version:
print("Update Available!\n" +
f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock")
r = requests.get(
"https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py",
stream=True, timeout=10)
done.set() # Stop the loading animation thread
if r.status_code == 200:
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
if remote_version != local_version:
sys.stdout.write(Fore.YELLOW +"\rUpdate Available!\n")
sys.stdout.write(Fore.YELLOW + f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock\n")
else:
sys.stdout.write(Fore.YELLOW +"\rYou are running the latest version.\n")
else:
sys.stdout.write(Fore.YELLOW + f"\rFailed to retrieve data. Status code: {r.status_code}\n")
except requests.exceptions.Timeout:
done.set() # Stop the loading animation thread
sys.stdout.write(Fore.RED +"\rConnection timed out. Please check your internet connection.")
sys.stdout.flush()
except Exception as error:
print(f"A problem occurred while checking for an update: {error}")
done.set() # Stop the loading animation thread
sys.stdout.write(Fore.RED +f"\r {error}".replace("\n", ""))
sys.stdout.flush()
# Argument check
# TODO regex check on args.proxy
if args.tor and (args.proxy is not None):
raise Exception("Tor and Proxy cannot be set at the same time.")
sys.stdout.flush()
sys.stdout.write('\r' + ' ' * 100 + '\r')
# Make prompts
if args.proxy is not None:
print("Using the proxy: " + args.proxy)
if args.tor or args.unique_tor:
print("Using Tor to make requests")
print(
"Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.")
if args.no_color:
# Disable color output.

@ -4,8 +4,14 @@ This module supports storing information about websites.
This is the raw data that will be used to search for usernames.
"""
import json
import requests
import secrets
import sys
import requests
from colorama import Fore, Style, init
from requests.exceptions import Timeout
from tqdm import tqdm
class SiteInformation:
def __init__(self, name, url_home, url_username_format, username_claimed,
@ -105,56 +111,50 @@ class SitesInformation:
Return Value:
Nothing.
"""
init(autoreset=True)
# sys.stdout.write("Loading...")
# sys.stdout.flush()
data_file_url = data_file_path if data_file_path else "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/resources/data.json"
try:
response = None
# Attempt to fetch data from the specified URL
if data_file_url.lower().startswith("http"):
sys.stdout.write(Fore.YELLOW + "\rEstablishing connection to data file URL...")
sys.stdout.flush()
try:
response = requests.get(url=data_file_url, timeout=10)
response.raise_for_status() # Raise an exception for non-200 responses
except Timeout:
sys.stdout.write(Fore.RED + "\rConnection timed out. Please check your internet connection.")
sys.stdout.flush()
except requests.exceptions.RequestException as error:
sys.stdout.write(Fore.RED + "\rAn error occurred while fetching data from URL: " + str(error))
sys.stdout.flush()
if response and response.status_code == 200:
site_data = response.json()
else:
sys.stdout.write(Fore.YELLOW + "\rFalling back to the local data file...")
sys.stdout.flush()
data_file_path = "sherlock/resources/data.json"
with open(data_file_path, "r", encoding="utf-8") as file:
site_data = json.load(file)
except Exception as error:
sys.stdout.write(Fore.RED + "\rAn error occurred while loading data: " + str(error))
sys.stdout.flush()
site_data = None
if not data_file_path:
# The default data file is the live data.json which is in the GitHub repo. The reason why we are using
# this instead of the local one is so that the user has the most up-to-date data. This prevents
# users from creating issue about false positives which has already been fixed or having outdated data
data_file_path = "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/resources/data.json"
# Ensure that specified data file has correct extension.
if not data_file_path.lower().endswith(".json"):
raise FileNotFoundError(f"Incorrect JSON file extension for data file '{data_file_path}'.")
if not site_data:
raise ValueError("Failed to load site data.")
# if "http://" == data_file_path[:7].lower() or "https://" == data_file_path[:8].lower():
if data_file_path.lower().startswith("http"):
# Reference is to a URL.
try:
response = requests.get(url=data_file_path)
except Exception as error:
raise FileNotFoundError(
f"Problem while attempting to access data file URL '{data_file_path}': {error}"
)
if response.status_code != 200:
raise FileNotFoundError(f"Bad response while accessing "
f"data file URL '{data_file_path}'."
)
try:
site_data = response.json()
except Exception as error:
raise ValueError(
f"Problem parsing json contents at '{data_file_path}': {error}."
)
else:
# Reference is to a file.
try:
with open(data_file_path, "r", encoding="utf-8") as file:
try:
site_data = json.load(file)
except Exception as error:
raise ValueError(
f"Problem parsing json contents at '{data_file_path}': {error}."
)
except FileNotFoundError:
raise FileNotFoundError(f"Problem while attempting to access "
f"data file '{data_file_path}'."
)
# Clear the previous message by overwriting it with spaces
sys.stdout.write('\r' + ' ' * 100 + '\r')
sys.stdout.flush()
self.sites = {}
# Add all site information from the json file to internal site list.
for site_name in site_data:
try:

Loading…
Cancel
Save