Merge branch 'sherlock-project:master' into keder

pull/1494/head
Keder Nath mallick 2 years ago committed by GitHub
commit 7f0957d8b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -26,11 +26,6 @@ the site may be removed from the list. The
file contains sites that were included at one time in Sherlock, but had to be removed for
one reason or another.
In regards to adult sites (e.g. Pornhub), we have agreed to not include them in Sherlock.
However, we do understand that some users desire this support. The data.json file is easy to add to,
so users will be able to maintain their own forks to have this support. This is not ideal.
Maybe there could be another repo with an adult data.json? That would avoid forks getting out of date.
## Adding New Functionality
Please ensure that the content on your branch passes all tests before submitting a pull request.

@ -55,7 +55,7 @@ usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT]
[--browse] [--local]
USERNAMES [USERNAMES ...]
Sherlock: Find Usernames Across Social Networks (Version 0.14.0)
Sherlock: Find Usernames Across Social Networks (Version 0.14.2)
positional arguments:
USERNAMES One or more usernames to check with social networks.
@ -66,30 +66,26 @@ optional arguments:
--verbose, -v, -d, --debug
Display extra debugging information and metrics.
--folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT
If using multiple usernames, the output of the results
will be saved to this folder.
If using multiple usernames, the output of the results will be
saved to this folder.
--output OUTPUT, -o OUTPUT
If using single username, the output of the result
will be saved to this file.
--tor, -t Make requests over Tor; increases runtime; requires
Tor to be installed and in system path.
--unique-tor, -u Make requests over Tor with new Tor circuit after each
request; increases runtime; requires Tor to be
If using single username, the output of the result will be saved
to this file.
--tor, -t Make requests over Tor; increases runtime; requires Tor to be
installed and in system path.
--unique-tor, -u Make requests over Tor with new Tor circuit after each request;
increases runtime; requires Tor to be installed and in system
path.
--csv Create Comma-Separated Values (CSV) File.
--site SITE_NAME Limit analysis to just the listed sites. Add multiple
options to specify more than one site.
--xlsx Create the standard file for the modern Microsoft Excel
spreadsheet (xslx).
--site SITE_NAME Limit analysis to just the listed sites. Add multiple options to
specify more than one site.
--proxy PROXY_URL, -p PROXY_URL
Make requests over a proxy. e.g.
socks5://127.0.0.1:1080
Make requests over a proxy. e.g. socks5://127.0.0.1:1080
--json JSON_FILE, -j JSON_FILE
Load data from a JSON file or an online, valid, JSON
file.
--timeout TIMEOUT Time (in seconds) to wait for response to requests.
Default timeout is infinity. A longer timeout will be
more likely to get results from slow sites. On the
other hand, this may cause a long delay to gather all
results.
Load data from a JSON file or an online, valid, JSON file.
--timeout TIMEOUT Time (in seconds) to wait for response to requests (Default: 60)
--print-all Output sites where the username was not found.
--print-found Output sites where the username was found.
--no-color Don't color terminal output

@ -185,7 +185,7 @@ class QueryNotifyPrint(QueryNotify):
# return
def countResults(self):
"""This function counts the number of results. Every time the fuction is called,
"""This function counts the number of results. Every time the function is called,
the number of results is increasing.
Keyword Arguments:
@ -304,4 +304,4 @@ class QueryNotifyPrint(QueryNotify):
Return Value:
Nicely formatted string to get information about this object.
"""
return str(self.result)
return str(self.result)

@ -111,6 +111,14 @@
"username_claimed": "blue",
"username_unclaimed": "noonewould"
},
"Archive of Our Own": {
"errorMsg": "Sorry, could not find this user.",
"errorType": "message",
"url": "https://archiveofourown.org/users/{}",
"urlMain": "https://archiveofourown.org/",
"username_claimed": "test",
"username_unclaimed": "noonewouldeverusethis123123123123123123"
},
"Arduino": {
"errorType": "status_code",
"regexCheck": "^(?![_-])[A-Za-z0-9_-]{3,}$",
@ -286,7 +294,8 @@
"url": "https://pt.bongacams.com/profile/{}",
"urlMain": "https://pt.bongacams.com",
"username_claimed": "asuna-black",
"username_unclaimed": "noonewouldeverusethis77777"
"username_unclaimed": "noonewouldeverusethis77777",
"isNSFW": true
},
"Bookcrossing": {
"errorType": "status_code",
@ -361,7 +370,8 @@
"url": "https://chaturbate.com/{}",
"urlMain": "https://chaturbate.com",
"username_claimed": "cute18cute",
"username_unclaimed": "noonewouldeverusethis77777"
"username_unclaimed": "noonewouldeverusethis77777",
"isNSFW": true
},
"Chess": {
"errorMsg": "Username is valid",
@ -930,6 +940,13 @@
"username_claimed": "satznova",
"username_unclaimed": "noonewouldeverusethis7"
},
"Harvard Scholar": {
"errorType": "status_code",
"url": "https://scholar.harvard.edu/{}",
"urlMain": "https://scholar.harvard.edu/",
"username_claimed": "ousmanekane",
"username_unclaimed": "noonewouldeverusethis77"
},
"Houzz": {
"errorMsg": "The page you requested was not found.",
"errorType": "message",
@ -953,6 +970,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"Hashnode": {
"errorType": "status_code",
"url": "https://hashnode.com/@{}",
"urlMain": "https://hashnode.com",
"username_claimed": "blue",
"username_unclaimed": "noneownsthisusername"
},
"ICQ": {
"errorType": "status_code",
"url": "https://icq.im/{}/en",
@ -1363,7 +1387,8 @@
"urlMain": "https://onlyfans.com/",
"urlProbe": "https://onlyfans.com/api2/v2/users/{}",
"username_claimed": "theemilylynne",
"username_unclaimed": "noonewouldeverusethis7"
"username_unclaimed": "noonewouldeverusethis7",
"isNSFW": true
},
"OpenStreetMap": {
"errorType": "status_code",
@ -1489,7 +1514,8 @@
"url": "https://pornhub.com/users/{}",
"urlMain": "https://pornhub.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
"username_unclaimed": "noonewouldeverusethis7",
"isNSFW": true
},
"ProductHunt": {
"errorMsg": "We seem to have lost this page",
@ -2095,8 +2121,8 @@
"url": "https://en.wikipedia.org/wiki/Special:CentralAuth/{}?uselang=qqx",
"urlMain": "https://www.wikipedia.org/",
"username_claimed": "Hoadlck",
"username_unclaimed": "noonewouldeverusethis8"
},
"username_unclaimed": "noonewouldeverusethis8"
},
"Windy": {
"errorType": "status_code",
"url": "https://community.windy.com/user/{}",
@ -2156,7 +2182,8 @@
"url": "https://xvideos.com/profiles/{}",
"urlMain": "https://xvideos.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
"username_unclaimed": "noonewouldeverusethis7",
"isNSFW": true
},
"YouNow": {
"errorMsg": "No users found",
@ -2179,7 +2206,8 @@
"url": "https://youporn.com/uservids/{}",
"urlMain": "https://youporn.com",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis77777"
"username_unclaimed": "noonewouldeverusethis77777",
"isNSFW": true
},
"Zhihu": {
"errorMsg": "404",
@ -2683,4 +2711,4 @@
"username_claimed": "kossher",
"username_unclaimed": "noonewouldeverusethis7"
}
}
}

@ -28,7 +28,7 @@ from sites import SitesInformation
from colorama import init
module_name = "Sherlock: Find Usernames Across Social Networks"
__version__ = "0.14.1"
__version__ = "0.14.2"
class SherlockFuturesSession(FuturesSession):
@ -533,11 +533,8 @@ def main():
help="Load data from a JSON file or an online, valid, JSON file.")
parser.add_argument("--timeout",
action="store", metavar="TIMEOUT",
dest="timeout", type=timeout_check, default=None,
help="Time (in seconds) to wait for response to requests. "
"Default timeout is infinity. "
"A longer timeout will be more likely to get results from slow sites. "
"On the other hand, this may cause a long delay to gather all results."
dest="timeout", type=timeout_check, default=60,
help="Time (in seconds) to wait for response to requests (Default: 60)"
)
parser.add_argument("--print-all",
action="store_true", dest="print_all",
@ -564,6 +561,10 @@ def main():
action="store_true", default=False,
help="Force the use of the local data.json file.")
parser.add_argument("--nsfw",
action="store_true", default=False,
help="Include checking of NSFW sites from default list. Default False")
args = parser.parse_args()
# If the user presses CTRL-C, exit gracefully without throwing errors
@ -627,6 +628,9 @@ def main():
print(f"ERROR: {error}")
sys.exit(1)
if not args.nsfw:
sites.remove_nsfw_sites()
# Create original dictionary from SitesInformation() object.
# Eventually, the rest of the code will be updated to use the new object
# directly, but this will glue the two pieces together.

@ -8,7 +8,7 @@ import requests
class SiteInformation:
def __init__(self, name, url_home, url_username_format, username_claimed,
username_unclaimed, information):
username_unclaimed, information, is_nsfw):
"""Create Site Information Object.
Contains information about a specific website.
@ -52,6 +52,7 @@ class SiteInformation:
self.username_claimed = username_claimed
self.username_unclaimed = username_unclaimed
self.information = information
self.is_nsfw = is_nsfw
return
@ -162,7 +163,9 @@ class SitesInformation:
site_data[site_name]["url"],
site_data[site_name]["username_claimed"],
site_data[site_name]["username_unclaimed"],
site_data[site_name]
site_data[site_name],
site_data[site_name].get("isNSFW",False)
)
except KeyError as error:
raise ValueError(
@ -171,6 +174,23 @@ class SitesInformation:
return
def remove_nsfw_sites(self):
"""
Remove NSFW sites from the sites, if isNSFW flag is true for site
Keyword Arguments:
self -- This object.
Return Value:
None
"""
sites = {}
for site in self.sites:
if self.sites[site].is_nsfw:
continue
sites[site] = self.sites[site]
self.sites = sites
def site_name_list(self):
"""Get Site Name List.

Loading…
Cancel
Save