diff --git a/README.md b/README.md
index c6fc11fa..57068d30 100644
--- a/README.md
+++ b/README.md
@@ -161,7 +161,6 @@ Here are some things we would appreciate your help on:
- Addition of new site support ยน
- Bringing back site support of [sites that have been removed](removed_sites.md) in the past due to false positives
-
[1] Please look at the Wiki entry on [adding new sites](https://github.com/sherlock-project/sherlock/wiki/Adding-Sites-To-Sherlock)
to understand the issues.
diff --git a/removed_sites.json b/removed_sites.json
index 4baefbd1..1070391c 100644
--- a/removed_sites.json
+++ b/removed_sites.json
@@ -519,14 +519,6 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
- "Ebay": {
- "errorMsg": "
eBay Profile - error",
- "errorType": "message",
- "url": "https://www.ebay.com/usr/{}",
- "urlMain": "https://www.ebay.com/",
- "username_claimed": "blue",
- "username_unclaimed": "noonewouldeverusethis7"
- },
"GDProfiles": {
"errorType": "status_code",
"url": "https://gdprofiles.com/{}",
diff --git a/removed_sites.md b/removed_sites.md
index b8398eb4..20283dc1 100644
--- a/removed_sites.md
+++ b/removed_sites.md
@@ -1027,20 +1027,6 @@ As of 2021-01-13, Travellerspoint returns false positives
},
```
-## Ebay
-
-As of 2021-01-15, Ebay seems to be very laggy and take too long to return a response.
-```
- "Ebay": {
- "errorMsg": "eBay Profile - error",
- "errorType": "message",
- "url": "https://www.ebay.com/usr/{}",
- "urlMain": "https://www.ebay.com/",
- "username_claimed": "blue",
- "username_unclaimed": "noonewouldeverusethis7"
- },
-```
-
## GDProfiles
As of 2021-06-27, GDProfiles takes way too long to respond. Must be an issue on their side.
diff --git a/sherlock/__init__.py b/sherlock/__init__.py
index b0894c8b..219dcaec 100644
--- a/sherlock/__init__.py
+++ b/sherlock/__init__.py
@@ -1,5 +1,6 @@
-"""Sherlock Module
+""" Sherlock Module
This module contains the main logic to search for usernames at social
networks.
+
"""
diff --git a/sherlock/__main__.py b/sherlock/__main__.py
index 6ac4fe73..52645a49 100644
--- a/sherlock/__main__.py
+++ b/sherlock/__main__.py
@@ -20,9 +20,8 @@ if __name__ == "__main__":
major = sys.version_info[0]
minor = sys.version_info[1]
- python_version = str(major) + "." + str(minor) + "." + str(sys.version_info[2])
-
if major != 3 or major == 3 and minor < 6:
+ python_version = str(major) + "." + str(minor) + "." + str(sys.version_info[2])
print("Sherlock requires Python 3.6+\nYou are using Python %s, which is not supported by Sherlock" % (python_version))
sys.exit(1)
diff --git a/sherlock/notify.py b/sherlock/notify.py
index fffe801b..bb467914 100644
--- a/sherlock/notify.py
+++ b/sherlock/notify.py
@@ -33,7 +33,7 @@ class QueryNotify:
self.result = result
- return
+ # return
def start(self, message=None):
"""Notify Start.
@@ -52,7 +52,7 @@ class QueryNotify:
Nothing.
"""
- return
+ # return
def update(self, result):
"""Notify Update.
@@ -71,7 +71,7 @@ class QueryNotify:
self.result = result
- return
+ # return
def finish(self, message=None):
"""Notify Finish.
@@ -90,7 +90,7 @@ class QueryNotify:
Nothing.
"""
- return
+ # return
def __str__(self):
"""Convert Object To String.
@@ -101,9 +101,7 @@ class QueryNotify:
Return Value:
Nicely formatted string to get information about this object.
"""
- result = str(self.result)
-
- return result
+ return str(self.result)
class QueryNotifyPrint(QueryNotify):
@@ -150,14 +148,43 @@ class QueryNotifyPrint(QueryNotify):
"""
title = "Checking username"
+
print(Style.BRIGHT + Fore.GREEN + "[" +
Fore.YELLOW + "*" +
Fore.GREEN + f"] {title}" +
Fore.WHITE + f" {message}" +
Fore.GREEN + " on:")
+ # An empty line between first line and the result(more clear output)
+ print('\r')
return
+ def finish(self, message="The processing has been finished."):
+ """Notify Start.
+
+ Will print the last line to the standard output.
+
+ Keyword Arguments:
+ self -- This object.
+ message -- The last phrase.
+
+ Return Value:
+ Nothing.
+ """
+
+ title = "End"
+
+ print('\r') # An empty line between last line of main output and last line(more clear output)
+ print(Style.BRIGHT + Fore.GREEN + "[" +
+ Fore.YELLOW + "!" +
+ Fore.GREEN + f"] {title}" +
+ Fore.GREEN + ": " +
+ Fore.WHITE + f" {message}" )
+
+ # An empty line between first line and the result(more clear output)
+
+ # return
+
def update(self, result):
"""Notify Update.
@@ -176,7 +203,7 @@ class QueryNotifyPrint(QueryNotify):
response_time_text = ""
if self.result.query_time is not None and self.verbose == True:
response_time_text = f" [{round(self.result.query_time * 1000)}ms]"
-
+
# Output to the terminal is desired.
if result.status == QueryStatus.CLAIMED:
print(Style.BRIGHT + Fore.WHITE + "[" +
@@ -217,8 +244,9 @@ class QueryNotifyPrint(QueryNotify):
else:
# It should be impossible to ever get here...
- raise ValueError(f"Unknown Query Status '{str(result.status)}' for "
- f"site '{self.result.site_name}'")
+ raise ValueError(
+ f"Unknown Query Status '{result.status}' for site '{self.result.site_name}'"
+ )
return
@@ -231,6 +259,4 @@ class QueryNotifyPrint(QueryNotify):
Return Value:
Nicely formatted string to get information about this object.
"""
- result = str(self.result)
-
- return result
+ return str(self.result)
diff --git a/sherlock/resources/data.json b/sherlock/resources/data.json
index 6808e24d..531cc3c6 100644
--- a/sherlock/resources/data.json
+++ b/sherlock/resources/data.json
@@ -43,6 +43,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Airbit": {
+ "errorType": "status_code",
+ "url": "https://airbit.com/{}",
+ "urlMain": "https://airbit.com/",
+ "username_claimed": "airbit",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Airliners": {
"errorType": "status_code",
"url": "https://www.airliners.net/user/{}/profile/photos",
@@ -158,6 +165,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Autofrage": {
+ "errorType": "status_code",
+ "url": "https://www.autofrage.net/nutzer/{}",
+ "urlMain": "https://www.autofrage.net/",
+ "username_claimed": "autofrage",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Avizo": {
"errorType": "response_url",
"errorUrl": "https://www.avizo.cz/",
@@ -204,6 +218,20 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Bezuzyteczna": {
+ "errorType": "status_code",
+ "url": "https://bezuzyteczna.pl/uzytkownicy/{}",
+ "urlMain": "https://bezuzyteczna.pl",
+ "username_claimed": "Jackson",
+ "username_unclaimed": "ktobysietaknazwalnawb69"
+ },
+ "Bikemap": {
+ "errorType": "status_code",
+ "url": "https://www.bikemap.net/en/u/{}/routes/created/",
+ "urlMain": "https://www.bikemap.net/",
+ "username_claimed": "bikemap",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"BinarySearch": {
"errorMsg": "{}",
"errorType": "message",
@@ -214,6 +242,13 @@
"username_claimed": "Eyes_Wide_Shut",
"username_unclaimed": "hihowareyou101"
},
+ "BioHacking": {
+ "errorType": "status_code",
+ "url": "https://forum.dangerousthings.com/u/{}",
+ "urlMain": "https://forum.dangerousthings.com/",
+ "username_claimed": "blue",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"BitBucket": {
"errorType": "status_code",
"regexCheck": "^[a-zA-Z0-9-_]{1,30}$",
@@ -363,6 +398,13 @@
"username_claimed": "green",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Clubhouse": {
+ "errorType": "status_code",
+ "url": "https://www.clubhouse.com/@{}",
+ "urlMain": "https://www.clubhouse.com",
+ "username_claimed": "waniathar",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Codecademy": {
"errorMsg": "This profile could not be found",
"errorType": "message",
@@ -379,6 +421,14 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Codeforces": {
+ "errorType": "response_url",
+ "errorUrl": "https://codeforces.com/",
+ "url": "https://codeforces.com/profile/{}",
+ "urlMain": "https://www.codeforces.com/",
+ "username_claimed": "tourist",
+ "username_unclaimed": "noonewouldeverusethis789"
+ },
"Codepen": {
"errorType": "status_code",
"url": "https://codepen.io/{}",
@@ -463,6 +513,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Cryptomator Forum": {
+ "errorType": "status_code",
+ "url": "https://community.cryptomator.org/u/{}",
+ "urlMain": "https://community.cryptomator.org/",
+ "username_claimed": "michael",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"DEV Community": {
"errorType": "status_code",
"regexCheck": "^[a-zA-Z][a-zA-Z0-9_-]*$",
@@ -548,6 +605,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Envato Forum": {
+ "errorType": "status_code",
+ "url": "https://forums.envato.com/u/{}",
+ "urlMain": "https://forums.envato.com/",
+ "username_claimed": "enabled",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Etsy": {
"errorType": "status_code",
"url": "https://www.etsy.com/shop/{}",
@@ -586,6 +650,13 @@
"username_claimed": "hackerman",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Fameswap": {
+ "errorType": "status_code",
+ "url": "https://fameswap.com/user/{}",
+ "urlMain": "https://fameswap.com/",
+ "username_claimed": "fameswap",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"FanCentro": {
"errorMsg": "var environment",
"errorType": "message",
@@ -601,6 +672,13 @@
"username_claimed": "Jungypoo",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Finanzfrage": {
+ "errorType": "status_code",
+ "url": "https://www.finanzfrage.net/nutzer/{}",
+ "urlMain": "https://www.finanzfrage.net/",
+ "username_claimed": "finanzfrage",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Fiverr": {
"errorMsg": "\"status\":\"success\"",
"errorType": "message",
@@ -686,6 +764,14 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis"
},
+ "G2G": {
+ "errorType": "response_url",
+ "errorUrl": "https://www.g2g.com/{}",
+ "url": "https://www.g2g.com/{}",
+ "urlMain": "https://www.g2g.com/",
+ "username_claimed": "user",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"GNOME VCS": {
"errorType": "response_url",
"errorUrl": "https://gitlab.gnome.org/{}",
@@ -695,6 +781,14 @@
"username_claimed": "adam",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Gab": {
+ "errorMsg": "The page you are looking for isn't here.",
+ "errorType": "message",
+ "url": "https://gab.com/{}",
+ "urlMain": "https://gab.com",
+ "username_claimed": "a",
+ "username_unclaimed": "noonewouldeverusethis"
+ },
"GaiaOnline": {
"errorMsg": "No user ID specified or user does not exist",
"errorType": "message",
@@ -717,6 +811,27 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis"
},
+ "Genius (Artists)": {
+ "errorType": "status_code",
+ "url": "https://genius.com/artists/{}",
+ "urlMain": "https://genius.com/",
+ "username_claimed": "genius",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
+ "Genius (Users)": {
+ "errorType": "status_code",
+ "url": "https://genius.com/{}",
+ "urlMain": "https://genius.com/",
+ "username_claimed": "genius",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
+ "Gesundheitsfrage": {
+ "errorType": "status_code",
+ "url": "https://www.gesundheitsfrage.net/nutzer/{}",
+ "urlMain": "https://www.gesundheitsfrage.net/",
+ "username_claimed": "gutefrage",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"GetMyUni": {
"errorType": "status_code",
"url": "https://www.getmyuni.com/user/{}",
@@ -724,6 +839,14 @@
"username_claimed": "Upneet.Grover17",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Ghost": {
+ "errorMsg": "Domain Error",
+ "errorType": "message",
+ "url": "https://{}.ghost.io/",
+ "urlMain": "https://ghost.org/",
+ "username_claimed": "troyhunt",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Giphy": {
"errorType": "status_code",
"url": "https://giphy.com/{}",
@@ -731,6 +854,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "GitBook": {
+ "errorType": "status_code",
+ "url": "https://{}.gitbook.io/",
+ "urlMain": "https://gitbook.com/",
+ "username_claimed": "gitbook",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"GitHub": {
"errorType": "status_code",
"regexCheck": "^[a-zA-Z0-9](?:[a-zA-Z0-9]|-(?=[a-zA-Z0-9])){0,38}$",
@@ -778,6 +908,14 @@
"username_claimed": "jetbrains",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Grailed": {
+ "errorType": "response_url",
+ "errorUrl": "https://www.grailed.com/{}",
+ "url": "https://www.grailed.com/{}",
+ "urlMain": "https://www.grailed.com/",
+ "username_claimed": "blue",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Gravatar": {
"errorType": "status_code",
"regexCheck": "^((?!\\.).)*$",
@@ -787,7 +925,7 @@
"username_unclaimed": "noonewouldeverusethis7"
},
"Gumroad": {
- "errorMsg": "Page not found.",
+ "errorMsg": "Page not found (404) - Gumroad",
"errorType": "message",
"url": "https://www.gumroad.com/{}",
"urlMain": "https://www.gumroad.com/",
@@ -808,6 +946,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Gutefrage": {
+ "errorType": "status_code",
+ "url": "https://www.gutefrage.net/nutzer/{}",
+ "urlMain": "https://www.gutefrage.net/",
+ "username_claimed": "gutefrage",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"HEXRPG": {
"errorMsg": "Error : User ",
"errorType": "message",
@@ -934,6 +1079,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Ionic Forum": {
+ "errorType": "status_code",
+ "url": "https://forum.ionicframework.com/u/{}",
+ "urlMain": "https://forum.ionicframework.com/",
+ "username_claimed": "theblue222",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Issuu": {
"errorType": "status_code",
"url": "https://issuu.com/{}",
@@ -972,6 +1124,20 @@
"username_claimed": "jenny",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Joplin Forum": {
+ "errorType": "status_code",
+ "url": "https://discourse.joplinapp.org/u/{}",
+ "urlMain": "https://discourse.joplinapp.org/",
+ "username_claimed": "laurent",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
+ "KEAKR": {
+ "errorType": "status_code",
+ "url": "https://www.keakr.com/en/profile/{}",
+ "urlMain": "https://www.keakr.com/",
+ "username_claimed": "beats",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Kaggle": {
"errorType": "status_code",
"url": "https://www.kaggle.com/{}",
@@ -1025,6 +1191,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "LessWrong": {
+ "errorType": "status_code",
+ "url": "https://www.lesswrong.com/users/@{}",
+ "urlMain": "https://www.lesswrong.com/",
+ "username_claimed": "blue",
+ "username_unclaimed": "noonewouldeverusethis7why"
+ },
"Letterboxd": {
"errorMsg": "Sorry, we can\u2019t find the page you\u2019ve requested.",
"errorType": "message",
@@ -1048,6 +1221,14 @@
"username_claimed": "anne",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Listed": {
+ "errorType": "response_url",
+ "errorUrl": "https://listed.to/@{}",
+ "url": "https://listed.to/@{}",
+ "urlMain": "https://listed.to/",
+ "username_claimed": "listed",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"LiveJournal": {
"errorType": "status_code",
"regexCheck": "^[a-zA-Z][a-zA-Z0-9_-]*$",
@@ -1072,6 +1253,21 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "LottieFiles": {
+ "errorType": "status_code",
+ "url": "https://lottiefiles.com/{}",
+ "urlMain": "https://lottiefiles.com/",
+ "username_claimed": "lottiefiles",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
+ "Mapify": {
+ "errorType": "response_url",
+ "errorUrl": "https://mapify.travel/{}",
+ "url": "https://mapify.travel/{}",
+ "urlMain": "https://mapify.travel/",
+ "username_claimed": "mapify",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"Medium": {
"errorMsg": "",
"errorType": "message",
@@ -1724,6 +1999,13 @@
"username_claimed": "red",
"username_unclaimed": "impossibleusername"
},
+ "ThemeForest": {
+ "errorType": "status_code",
+ "url": "https://themeforest.net/user/{}",
+ "urlMain": "https://themeforest.net/",
+ "username_claimed": "user",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"TikTok": {
"errorType": "status_code",
"url": "https://tiktok.com/@{}",
@@ -1744,6 +2026,7 @@
},
"TradingView": {
"errorType": "status_code",
+ "request_method": "GET",
"url": "https://www.tradingview.com/u/{}/",
"urlMain": "https://www.tradingview.com/",
"username_claimed": "blue",
@@ -1994,6 +2277,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "Wykop": {
+ "errorType": "status_code",
+ "url": "https://www.wykop.pl/ludzie/{}",
+ "urlMain": "https://www.wykop.pl",
+ "username_claimed": "blue",
+ "username_unclaimed": "bumpewastaken"
+ },
"Xbox Gamertag": {
"errorType": "status_code",
"url": "https://xboxgamertag.com/search/{}",
@@ -2126,6 +2416,22 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "eBay.com": {
+ "errorMsg": "The User ID you entered was not found. Please check the User ID and try again.",
+ "errorType": "message",
+ "url": "https://www.ebay.com/usr/{}",
+ "urlMain": "https://www.ebay.com/",
+ "username_claimed": "blue",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
+ "eBay.de": {
+ "errorMsg": "Der eingegebene Nutzername wurde nicht gefunden. Bitte pr\u00fcfen Sie den Nutzernamen und versuchen Sie es erneut.",
+ "errorType": "message",
+ "url": "https://www.ebay.de/usr/{}",
+ "urlMain": "https://www.ebay.de/",
+ "username_claimed": "blue",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"eGPU": {
"errorType": "status_code",
"url": "https://egpu.io/forums/profile/{}/",
@@ -2443,6 +2749,13 @@
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
+ "skyrock": {
+ "errorType": "status_code",
+ "url": "https://{}.skyrock.com/",
+ "urlMain": "https://skyrock.com/",
+ "username_claimed": "red",
+ "username_unclaimed": "noonewouldeverusethis7"
+ },
"social.tchncs.de": {
"errorType": "status_code",
"url": "https://social.tchncs.de/@{}",
@@ -2492,6 +2805,13 @@
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis77777"
},
+ "znanylekarz.pl": {
+ "errorType": "status_code",
+ "url": "https://www.znanylekarz.pl/{}",
+ "urlMain": "https://znanylekarz.pl",
+ "username_claimed": "janusz-nowak",
+ "username_unclaimed": "kto-by-sie-tak-nazwal-69"
+ },
"zoomit": {
"errorMsg": "\u0645\u062a\u0627\u0633\u0641\u0627\u0646\u0647 \u0635\u0641\u062d\u0647 \u06cc\u0627\u0641\u062a \u0646\u0634\u062f",
"errorType": "message",
diff --git a/sherlock/sherlock.py b/sherlock/sherlock.py
index 647f0222..0e9becc3 100644
--- a/sherlock/sherlock.py
+++ b/sherlock/sherlock.py
@@ -22,17 +22,15 @@ from torrequest import TorRequest
from result import QueryStatus
from result import QueryResult
from notify import QueryNotifyPrint
-from sites import SitesInformation
+from sites import SitesInformation
from colorama import init
module_name = "Sherlock: Find Usernames Across Social Networks"
__version__ = "0.14.0"
-
-
class SherlockFuturesSession(FuturesSession):
- def request(self, method, url, hooks={}, *args, **kwargs):
+ def request(self, method, url, hooks=None, *args, **kwargs):
"""Request URL.
This extends the FuturesSession request method to calculate a response
@@ -54,6 +52,8 @@ class SherlockFuturesSession(FuturesSession):
Request object.
"""
# Record the start time for the request.
+ if hooks is None:
+ hooks = {}
start = monotonic()
def response_time(resp, *args, **kwargs):
@@ -96,12 +96,11 @@ class SherlockFuturesSession(FuturesSession):
def get_response(request_future, error_type, social_network):
-
# Default for Response object if some failure occurs.
response = None
error_context = "General Unknown Error"
- expection_text = None
+ exception_text = None
try:
response = request_future.result()
if response.status_code:
@@ -109,21 +108,21 @@ def get_response(request_future, error_type, social_network):
error_context = None
except requests.exceptions.HTTPError as errh:
error_context = "HTTP Error"
- expection_text = str(errh)
+ exception_text = str(errh)
except requests.exceptions.ProxyError as errp:
error_context = "Proxy Error"
- expection_text = str(errp)
+ exception_text = str(errp)
except requests.exceptions.ConnectionError as errc:
error_context = "Error Connecting"
- expection_text = str(errc)
+ exception_text = str(errc)
except requests.exceptions.Timeout as errt:
error_context = "Timeout Error"
- expection_text = str(errt)
+ exception_text = str(errt)
except requests.exceptions.RequestException as err:
error_context = "Unknown Error"
- expection_text = str(err)
+ exception_text = str(err)
- return response, error_context, expection_text
+ return response, error_context, exception_text
def interpolate_string(object, username):
@@ -191,15 +190,14 @@ def sherlock(username, site_data, query_notify,
# Limit number of workers to 20.
# This is probably vastly overkill.
if len(site_data) >= 20:
- max_workers=20
+ max_workers = 20
else:
- max_workers=len(site_data)
+ max_workers = len(site_data)
# Create multi-threaded session for all requests.
session = SherlockFuturesSession(max_workers=max_workers,
session=underlying_session)
-
# Results from analysis of all sites
results_total = {}
@@ -207,10 +205,9 @@ def sherlock(username, site_data, query_notify,
for social_network, net_info in site_data.items():
# Results from analysis of this specific site
- results_site = {}
+ results_site = {"url_main": net_info.get("urlMain")}
# Record URL of main site
- results_site["url_main"] = net_info.get("urlMain")
# A user agent is needed because some sites don't return the correct
# information since they think that we are bots (Which we actually are...)
@@ -228,7 +225,7 @@ def sherlock(username, site_data, query_notify,
# Don't make request if username is invalid for the site
regex_check = net_info.get("regexCheck")
if regex_check and re.search(regex_check, username) is None:
- # No need to do the check at the site: this user name is not allowed.
+ # No need to do the check at the site: this username is not allowed.
results_site["status"] = QueryResult(username,
social_network,
url,
@@ -255,7 +252,7 @@ def sherlock(username, site_data, query_notify,
elif request_method == "PUT":
request = session.put
else:
- raise RuntimeError( f"Unsupported request_method for {url}")
+ raise RuntimeError(f"Unsupported request_method for {url}")
if request_payload is not None:
request_payload = interpolate_string(request_payload, username)
@@ -301,10 +298,10 @@ def sherlock(username, site_data, query_notify,
)
else:
future = request(url=url_probe, headers=headers,
- allow_redirects=allow_redirects,
- timeout=timeout,
- json=request_payload
- )
+ allow_redirects=allow_redirects,
+ timeout=timeout,
+ json=request_payload
+ )
# Store future in data for access later
net_info["request_future"] = future
@@ -313,7 +310,7 @@ def sherlock(username, site_data, query_notify,
if unique_tor:
underlying_request.reset_identity()
- # Add this site's results into final dictionary with all of the other results.
+ # Add this site's results into final dictionary with all the other results.
results_total[social_network] = results_site
# Open the file containing account links
@@ -336,7 +333,7 @@ def sherlock(username, site_data, query_notify,
# Retrieve future and ensure it has finished
future = net_info["request_future"]
- r, error_text, expection_text = get_response(request_future=future,
+ r, error_text, exception_text = get_response(request_future=future,
error_type=error_type,
social_network=social_network)
@@ -366,13 +363,13 @@ def sherlock(username, site_data, query_notify,
# error_flag True denotes no error found in the HTML
# error_flag False denotes error found in the HTML
error_flag = True
- errors=net_info.get("errorMsg")
+ errors = net_info.get("errorMsg")
# errors will hold the error message
# it can be string or list
- # by insinstance method we can detect that
+ # by isinstance method we can detect that
# and handle the case for strings as normal procedure
# and if its list we can iterate the errors
- if isinstance(errors,str):
+ if isinstance(errors, str):
# Checks if the error message is in the HTML
# if error is present we will set flag to False
if errors in r.text:
@@ -411,7 +408,6 @@ def sherlock(username, site_data, query_notify,
raise ValueError(f"Unknown Error Type '{error_type}' for "
f"site '{social_network}'")
-
# Notify caller about results of query.
result = QueryResult(username=username,
site_name=social_network,
@@ -458,13 +454,13 @@ def timeout_check(value):
except:
raise ArgumentTypeError(f"Timeout '{value}' must be a number.")
if timeout <= 0:
- raise ArgumentTypeError(f"Timeout '{value}' must be greater than 0.0s.")
+ raise ArgumentTypeError(
+ f"Timeout '{value}' must be greater than 0.0s.")
return timeout
def main():
-
- version_string = f"%(prog)s {__version__}\n" + \
+ version_string = f"%(prog)s {__version__}\n" + \
f"{requests.__description__}: {requests.__version__}\n" + \
f"Python: {platform.python_version()}"
@@ -472,11 +468,11 @@ def main():
description=f"{module_name} (Version {__version__})"
)
parser.add_argument("--version",
- action="version", version=version_string,
+ action="version", version=version_string,
help="Display version information and dependencies."
)
parser.add_argument("--verbose", "-v", "-d", "--debug",
- action="store_true", dest="verbose", default=False,
+ action="store_true", dest="verbose", default=False,
help="Display extra debugging information and metrics."
)
parser.add_argument("--folderoutput", "-fo", dest="folderoutput",
@@ -492,7 +488,7 @@ def main():
action="store_true", dest="unique_tor", default=False,
help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.")
parser.add_argument("--csv",
- action="store_true", dest="csv", default=False,
+ action="store_true", dest="csv", default=False,
help="Create Comma-Separated Values (CSV) File."
)
parser.add_argument("--site",
@@ -514,15 +510,15 @@ def main():
"Default timeout is infinity. "
"A longer timeout will be more likely to get results from slow sites. "
"On the other hand, this may cause a long delay to gather all results."
- )
+ )
parser.add_argument("--print-all",
action="store_true", dest="print_all",
help="Output sites where the username was not found."
- )
+ )
parser.add_argument("--print-found",
action="store_false", dest="print_all", default=False,
help="Output sites where the username was found."
- )
+ )
parser.add_argument("--no-color",
action="store_true", dest="no_color", default=False,
help="Don't color terminal output"
@@ -544,7 +540,8 @@ def main():
# Check for newer version of Sherlock. If it exists, let the user know about it
try:
- r = requests.get("https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py")
+ r = requests.get(
+ "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py")
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
local_version = __version__
@@ -556,7 +553,6 @@ def main():
except Exception as error:
print(f"A problem occurred while checking for an update: {error}")
-
# Argument check
# TODO regex check on args.proxy
if args.tor and (args.proxy is not None):
@@ -568,15 +564,16 @@ def main():
if args.tor or args.unique_tor:
print("Using Tor to make requests")
- print("Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.")
-
+ print(
+ "Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.")
+
if args.no_color:
# Disable color output.
init(strip=True, convert=False)
else:
# Enable color output.
init(autoreset=True)
-
+
# Check if both output methods are entered as input.
if args.output is not None and args.folderoutput is not None:
print("You can only use one of the output methods.")
@@ -587,11 +584,11 @@ def main():
print("You can only use --output with a single username")
sys.exit(1)
-
# Create object with all information about sites we are aware of.
try:
if args.local:
- sites = SitesInformation(os.path.join(os.path.dirname(__file__), "resources/data.json"))
+ sites = SitesInformation(os.path.join(
+ os.path.dirname(__file__), "resources/data.json"))
else:
sites = SitesInformation(args.json_file)
except Exception as error:
@@ -601,10 +598,7 @@ def main():
# Create original dictionary from SitesInformation() object.
# Eventually, the rest of the code will be updated to use the new object
# directly, but this will glue the two pieces together.
- site_data_all = {}
- for site in sites:
- site_data_all[site.name] = site.information
-
+ site_data_all = {site.name: site.information for site in sites}
if args.site_list is None:
# Not desired to look at a sub-set of sites
site_data = site_data_all
@@ -625,7 +619,8 @@ def main():
site_missing.append(f"'{site}'")
if site_missing:
- print(f"Error: Desired sites not found: {', '.join(site_missing)}.")
+ print(
+ f"Error: Desired sites not found: {', '.join(site_missing)}.")
if not site_data:
sys.exit(1)
@@ -662,7 +657,8 @@ def main():
if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1
file.write(dictionary["url_user"] + "\n")
- file.write(f"Total Websites Username Detected On : {exists_counter}\n")
+ file.write(
+ f"Total Websites Username Detected On : {exists_counter}\n")
if args.csv:
result_file = f"{username}.csv"
diff --git a/sherlock/sites.py b/sherlock/sites.py
index dfa14bd6..f417e47c 100644
--- a/sherlock/sites.py
+++ b/sherlock/sites.py
@@ -1,18 +1,18 @@
"""Sherlock Sites Information Module
-This module supports storing information about web sites.
+This module supports storing information about websites.
This is the raw data that will be used to search for usernames.
"""
import json
import requests
-class SiteInformation():
+class SiteInformation:
def __init__(self, name, url_home, url_username_format, username_claimed,
username_unclaimed, information):
"""Create Site Information Object.
- Contains information about a specific web site.
+ Contains information about a specific website.
Keyword Arguments:
self -- This object.
@@ -27,13 +27,13 @@ class SiteInformation():
indicates that the individual
usernames would show up under the
"https://somesite.com/users/" area of
- the web site.
+ the website.
username_claimed -- String containing username which is known
- to be claimed on web site.
+ to be claimed on website.
username_unclaimed -- String containing username which is known
- to be unclaimed on web site.
+ to be unclaimed on website.
information -- Dictionary containing all known information
- about web site.
+ about website.
NOTE: Custom information about how to
actually detect the existence of the
username will be included in this
@@ -46,13 +46,13 @@ class SiteInformation():
Nothing.
"""
- self.name = name
- self.url_home = url_home
+ self.name = name
+ self.url_home = url_home
self.url_username_format = url_username_format
- self.username_claimed = username_claimed
- self.username_unclaimed = username_unclaimed
- self.information = information
+ self.username_claimed = username_claimed
+ self.username_unclaimed = username_unclaimed
+ self.information = information
return
@@ -65,15 +65,15 @@ class SiteInformation():
Return Value:
Nicely formatted string to get information about this object.
"""
-
+
return f"{self.name} ({self.url_home})"
-class SitesInformation():
+class SitesInformation:
def __init__(self, data_file_path=None):
"""Create Sites Information Object.
- Contains information about all supported web sites.
+ Contains information about all supported websites.
Keyword Arguments:
self -- This object.
@@ -106,7 +106,7 @@ class SitesInformation():
if data_file_path is None:
# The default data file is the live data.json which is in the GitHub repo. The reason why we are using
- # this instead of the local one is so that the user has the most up to date data. This prevents
+ # this instead of the local one is so that the user has the most up-to-date data. This prevents
# users from creating issue about false positives which has already been fixed or having outdated data
data_file_path = "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/resources/data.json"
@@ -119,21 +119,21 @@ class SitesInformation():
try:
response = requests.get(url=data_file_path)
except Exception as error:
- raise FileNotFoundError(f"Problem while attempting to access "
- f"data file URL '{data_file_path}': "
- f"{str(error)}"
- )
- if response.status_code == 200:
- try:
- site_data = response.json()
- except Exception as error:
- raise ValueError(f"Problem parsing json contents at "
- f"'{data_file_path}': {str(error)}."
- )
- else:
+ raise FileNotFoundError(
+ f"Problem while attempting to access data file URL '{data_file_path}': {error}"
+ )
+
+ if response.status_code != 200:
raise FileNotFoundError(f"Bad response while accessing "
f"data file URL '{data_file_path}'."
- )
+ )
+ try:
+ site_data = response.json()
+ except Exception as error:
+ raise ValueError(
+ f"Problem parsing json contents at '{data_file_path}': {error}."
+ )
+
else:
# Reference is to a file.
try:
@@ -141,17 +141,18 @@ class SitesInformation():
try:
site_data = json.load(file)
except Exception as error:
- raise ValueError(f"Problem parsing json contents at "
- f"'{data_file_path}': {str(error)}."
- )
- except FileNotFoundError as error:
+ raise ValueError(
+ f"Problem parsing json contents at '{data_file_path}': {error}."
+ )
+
+ except FileNotFoundError:
raise FileNotFoundError(f"Problem while attempting to access "
f"data file '{data_file_path}'."
- )
+ )
self.sites = {}
- # Add all of site information from the json file to internal site list.
+ # Add all site information from the json file to internal site list.
for site_name in site_data:
try:
@@ -162,12 +163,11 @@ class SitesInformation():
site_data[site_name]["username_claimed"],
site_data[site_name]["username_unclaimed"],
site_data[site_name]
- )
+ )
except KeyError as error:
- raise ValueError(f"Problem parsing json contents at "
- f"'{data_file_path}': "
- f"Missing attribute {str(error)}."
- )
+ raise ValueError(
+ f"Problem parsing json contents at '{data_file_path}': Missing attribute {error}."
+ )
return
@@ -181,9 +181,7 @@ class SitesInformation():
List of strings containing names of sites.
"""
- site_names = sorted([site.name for site in self], key=str.lower)
-
- return site_names
+ return sorted([site.name for site in self], key=str.lower)
def __iter__(self):
"""Iterator For Object.
diff --git a/site_list.py b/site_list.py
index 339f7298..7ba267a4 100644
--- a/site_list.py
+++ b/site_list.py
@@ -3,9 +3,10 @@ This module generates the listing of supported sites
which can be found in sites.md
It also organizes all the sites in alphanumeric order
"""
+
import json
-pool = list()
+pool = []
with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file:
data = json.load(data_file)
diff --git a/sites.md b/sites.md
index 47efb761..1ca8c061 100644
--- a/sites.md
+++ b/sites.md
@@ -1,10 +1,11 @@
-## List Of Supported Sites (323 Sites In Total!)
+## List Of Supported Sites (368 Sites In Total!)
1. [2Dimensions](https://2Dimensions.com/)
1. [3dnews](http://forum.3dnews.ru/)
1. [7Cups](https://www.7cups.com/)
1. [9GAG](https://www.9gag.com/)
1. [About.me](https://about.me/)
1. [Academia.edu](https://www.academia.edu/)
+1. [Airbit](https://airbit.com/)
1. [Airliners](https://www.airliners.net/)
1. [Alik.cz](https://www.alik.cz/)
1. [AllMyLinks](https://allmylinks.com/)
@@ -19,13 +20,17 @@
1. [AskFM](https://ask.fm/)
1. [Atom Discussions](https://discuss.atom.io)
1. [Audiojungle](https://audiojungle.net/)
+1. [Autofrage](https://www.autofrage.net/)
1. [Avizo](https://www.avizo.cz/)
1. [BLIP.fm](https://blip.fm/)
1. [BOOTH](https://booth.pm/)
1. [Bandcamp](https://www.bandcamp.com/)
1. [Bazar.cz](https://www.bazar.cz/)
1. [Behance](https://www.behance.net/)
+1. [Bezuzyteczna](https://bezuzyteczna.pl)
+1. [Bikemap](https://www.bikemap.net/)
1. [BinarySearch](https://binarysearch.io/)
+1. [BioHacking](https://forum.dangerousthings.com/)
1. [BitBucket](https://bitbucket.org/)
1. [BitCoinForum](https://bitcoinforum.com)
1. [Bitwarden Forum](https://bitwarden.com/)
@@ -45,8 +50,10 @@
1. [Chess](https://www.chess.com/)
1. [CloudflareCommunity](https://community.cloudflare.com/)
1. [Clozemaster](https://www.clozemaster.com)
+1. [Clubhouse](https://www.clubhouse.com)
1. [Codecademy](https://www.codecademy.com/)
1. [Codechef](https://www.codechef.com/)
+1. [Codeforces](https://www.codeforces.com/)
1. [Codepen](https://codepen.io/)
1. [Codewars](https://www.codewars.com)
1. [Coil](https://coil.com/)
@@ -57,6 +64,7 @@
1. [Cracked](https://www.cracked.com/)
1. [Crevado](https://crevado.com/)
1. [Crowdin](https://crowdin.com/)
+1. [Cryptomator Forum](https://community.cryptomator.org/)
1. [DEV Community](https://dev.to/)
1. [DailyMotion](https://www.dailymotion.com/)
1. [Designspiration](https://www.designspiration.net/)
@@ -68,14 +76,16 @@
1. [Dribbble](https://dribbble.com/)
1. [Duolingo](https://duolingo.com/)
1. [Ello](https://ello.co/)
+1. [Envato Forum](https://forums.envato.com/)
1. [Etsy](https://www.etsy.com/)
1. [Euw](https://euw.op.gg/)
1. [EyeEm](https://www.eyeem.com/)
1. [F3.cool](https://f3.cool/)
1. [Facebook](https://www.facebook.com/)
-1. [Facenama](https://facenama.com/)
+1. [Fameswap](https://fameswap.com/)
1. [FanCentro](https://fancentro.com/)
1. [Fandom](https://www.fandom.com/)
+1. [Finanzfrage](https://www.finanzfrage.net/)
1. [Fiverr](https://www.fiverr.com/)
1. [Flickr](https://www.flickr.com/)
1. [Flightradar24](https://www.flightradar24.com/)
@@ -86,22 +96,31 @@
1. [Freelance.habr](https://freelance.habr.com/)
1. [Freelancer](https://www.freelancer.com/)
1. [Freesound](https://freesound.org/)
+1. [G2G](https://www.g2g.com/)
1. [GNOME VCS](https://gitlab.gnome.org/)
+1. [Gab](https://gab.com)
1. [GaiaOnline](https://www.gaiaonline.com/)
1. [Gam1ng](https://gam1ng.com.br)
1. [Gamespot](https://www.gamespot.com/)
+1. [Genius (Artists)](https://genius.com/)
+1. [Genius (Users)](https://genius.com/)
+1. [Gesundheitsfrage](https://www.gesundheitsfrage.net/)
1. [GetMyUni](https://getmyuni.com/)
+1. [Ghost](https://ghost.org/)
1. [Giphy](https://giphy.com/)
+1. [GitBook](https://gitbook.com/)
1. [GitHub](https://www.github.com/)
1. [GitHub Support Community](https://github.community)
1. [GitLab](https://gitlab.com/)
1. [Gitee](https://gitee.com/)
1. [GoodReads](https://www.goodreads.com/)
1. [Gradle](https://gradle.org/)
+1. [Grailed](https://www.grailed.com/)
1. [Gravatar](http://en.gravatar.com/)
1. [Gumroad](https://www.gumroad.com/)
1. [GunsAndAmmo](https://gunsandammo.com/)
1. [GuruShots](https://gurushots.com/)
+1. [Gutefrage](https://www.gutefrage.net/)
1. [HEXRPG](https://www.hexrpg.com/)
1. [HackTheBox](https://forum.hackthebox.eu/)
1. [Hackaday](https://hackaday.io/)
@@ -118,11 +137,14 @@
1. [Imgur](https://imgur.com/)
1. [Instagram](https://www.instagram.com/)
1. [Instructables](https://www.instructables.com/)
+1. [Ionic Forum](https://forum.ionicframework.com/)
1. [Issuu](https://issuu.com/)
1. [Itch.io](https://itch.io/)
1. [Itemfix](https://www.itemfix.com/)
1. [Jellyfin Weblate](https://translate.jellyfin.org/)
1. [Jimdo](https://jimdosite.com/)
+1. [Joplin Forum](https://discourse.joplinapp.org/)
+1. [KEAKR](https://www.keakr.com/)
1. [Kaggle](https://www.kaggle.com/)
1. [Keybase](https://keybase.io/)
1. [Kik](http://kik.me/)
@@ -130,15 +152,21 @@
1. [LOR](https://linux.org.ru/)
1. [Launchpad](https://launchpad.net/)
1. [LeetCode](https://leetcode.com/)
+1. [LessWrong](https://www.lesswrong.com/)
1. [Letterboxd](https://letterboxd.com/)
1. [Lichess](https://lichess.org)
1. [Linktree](https://linktr.ee/)
+1. [Listed](https://listed.to/)
1. [LiveJournal](https://www.livejournal.com/)
1. [Lobsters](https://lobste.rs/)
1. [Lolchess](https://lolchess.gg/)
+1. [LottieFiles](https://lottiefiles.com/)
+1. [Mapify](https://mapify.travel/)
1. [Medium](https://medium.com/)
1. [Memrise](https://www.memrise.com/)
+1. [Minecraft](https://minecraft.net/)
1. [MixCloud](https://www.mixcloud.com/)
+1. [Motorradfrage](https://www.motorradfrage.net/)
1. [Munzee](https://www.munzee.com/)
1. [MyAnimeList](https://myanimelist.net/)
1. [MyMiniFactory](https://www.myminifactory.com/)
@@ -147,12 +175,14 @@
1. [NationStates Nation](https://nationstates.net)
1. [NationStates Region](https://nationstates.net)
1. [Naver](https://naver.com)
+1. [Needrom](https://www.needrom.com/)
1. [Newgrounds](https://newgrounds.com)
1. [Nextcloud Forum](https://nextcloud.com/)
1. [Nightbot](https://nightbot.tv/)
1. [Ninja Kiwi](https://ninjakiwi.com/)
1. [NotABug.org](https://notabug.org/)
1. [Nyaa.si](https://nyaa.si/)
+1. [OGUsers](https://ogusers.com/)
1. [OK](https://ok.ru/)
1. [OnlyFans](https://onlyfans.com/)
1. [OpenStreetMap](https://www.openstreetmap.org/)
@@ -173,6 +203,7 @@
1. [Pokemon Showdown](https://pokemonshowdown.com)
1. [Polarsteps](https://polarsteps.com/)
1. [Polygon](https://www.polygon.com/)
+1. [Polymart](https://polymart.org/)
1. [Pornhub](https://pornhub.com/)
1. [ProductHunt](https://www.producthunt.com/)
1. [PromoDJ](http://promodj.com/)
@@ -181,8 +212,10 @@
1. [Raidforums](https://raidforums.com/)
1. [Rajce.net](https://www.rajce.idnes.cz/)
1. [Rate Your Music](https://rateyourmusic.com/)
+1. [Rclone Forum](https://forum.rclone.org/)
1. [Redbubble](https://www.redbubble.com/)
1. [Reddit](https://www.reddit.com/)
+1. [Reisefrage](https://www.reisefrage.net/)
1. [Replit.com](https://replit.com/)
1. [ResearchGate](https://www.researchgate.net/)
1. [ReverbNation](https://www.reverbnation.com/)
@@ -190,10 +223,12 @@
1. [RoyalCams](https://royalcams.com)
1. [RubyGems](https://rubygems.org/)
1. [RuneScape](https://www.runescape.com/)
+1. [SWAPD](https://swapd.co/)
1. [Sbazar.cz](https://www.sbazar.cz/)
1. [Scratch](https://scratch.mit.edu/)
1. [Scribd](https://www.scribd.com/)
1. [ShitpostBot5000](https://www.shitpostbot.com/)
+1. [Shpock](https://www.shpock.com/)
1. [Signal](https://community.signalusers.org)
1. [Slack](https://slack.com)
1. [Slant](https://www.slant.co/)
@@ -206,8 +241,10 @@
1. [SourceForge](https://sourceforge.net/)
1. [SoylentNews](https://soylentnews.org)
1. [Speedrun.com](https://speedrun.com/)
+1. [Splice](https://splice.com/)
1. [Splits.io](https://splits.io)
1. [Sporcle](https://www.sporcle.com/)
+1. [Sportlerfrage](https://www.sportlerfrage.net/)
1. [SportsRU](https://www.sports.ru/)
1. [Spotify](https://open.spotify.com/)
1. [Star Citizen](https://robertsspaceindustries.com/)
@@ -217,9 +254,11 @@
1. [SublimeForum](https://forum.sublimetext.com/)
1. [TETR.IO](https://tetr.io)
1. [TLDR Legal](https://tldrlegal.com/)
+1. [TRAKTRAIN](https://traktrain.com/)
1. [Telegram](https://t.me/)
1. [Tellonym.me](https://tellonym.me/)
1. [Tenor](https://tenor.com/)
+1. [ThemeForest](https://themeforest.net/)
1. [TikTok](https://tiktok.com/)
1. [Tinder](https://tinder.com/)
1. [TradingView](https://www.tradingview.com/)
@@ -254,6 +293,7 @@
1. [WordPress](https://wordpress.com)
1. [WordPressOrg](https://wordpress.org/)
1. [Wordnik](https://www.wordnik.com/)
+1. [Wykop](https://www.wykop.pl)
1. [Xbox Gamertag](https://xboxgamertag.com/)
1. [Xvideos](https://xvideos.com/)
1. [YouNow](https://www.younow.com/)
@@ -272,6 +312,8 @@
1. [datingRU](http://dating.ru)
1. [devRant](https://devrant.com/)
1. [drive2](https://www.drive2.ru/)
+1. [eBay.com](https://www.ebay.com/)
+1. [eBay.de](https://www.ebay.de/)
1. [eGPU](https://egpu.io/)
1. [eintracht](https://eintracht.de)
1. [fixya](https://www.fixya.com)
@@ -315,6 +357,7 @@
1. [prog.hu](https://prog.hu/)
1. [radio_echo_msk](https://echo.msk.ru/)
1. [satsisRU](https://satsis.info/)
+1. [skyrock](https://skyrock.com/)
1. [social.tchncs.de](https://social.tchncs.de/)
1. [spletnik](https://spletnik.ru/)
1. [svidbook](https://www.svidbook.ru/)
@@ -322,4 +365,5 @@
1. [uid](https://uid.me/)
1. [wiki.vg](https://wiki.vg/)
1. [xHamster](https://xhamster.com)
+1. [znanylekarz.pl](https://znanylekarz.pl)
1. [zoomit](https://www.zoomit.ir)