Merge branch 'master' into add-linkedin-search

pull/571/head
Siddharth Dushantha 4 years ago committed by GitHub
commit a74938343e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -4,3 +4,5 @@ screenshot/
tests/ tests/
*.txt *.txt
!/requirements.txt !/requirements.txt
venv/

@ -3,12 +3,9 @@ name: Tests
on: on:
push: push:
branches: [ master ] branches: [ master ]
pull_request:
branches: [ master ]
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
@ -25,11 +22,10 @@ jobs:
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install flake8 pytest pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint With flake8 - name: Lint with flake8
run: | run: |
# stop the build if there are Python syntax errors or undefined names # stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Sherlock Site Detect Tests - name: Sherlock Site Detect Tests

@ -0,0 +1,27 @@
name: Nightly
on:
schedule:
# Run Nightly Tests At 3AM (The Hour Of The Wolf) Every Day
- cron: '0 3 * * *'
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Sherlock Site Coverage Tests
run: |
cd sherlock && python -m unittest tests.all.SherlockSiteCoverageTests --verbose

@ -0,0 +1,48 @@
name: Pull Request Action
on:
pull_request:
branches: [ master ]
jobs:
getchange:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.changes.outputs.matrix }}
steps:
- id: changes
run: |
URL="https://api.github.com/repos/sherlock-project/sherlock/pulls/${{ github.event.pull_request.number }}/files"
FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename')
if echo $FILES | grep -q ".json"; then
echo "::set-output name=matrix::{\"include\":[{\"python\":\"3.8\"}]}"
else
echo "::set-output name=matrix::{\"include\":[{\"python\":\"3.6\" },{\"python\":\"3.7\"},{\"python\":\"3.8\"}]}"
fi
build:
needs: [getchange]
runs-on: ubuntu-latest
strategy:
matrix: ${{ fromJson(needs.getchange.outputs.matrix) }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python }}
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint With flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Sherlock Site Detect Tests
run: |
cd sherlock && python -m unittest tests.all.SherlockDetectTests --verbose

3
.gitignore vendored

@ -27,3 +27,6 @@ tests/.excluded_sites
# MacOS Folder Metadata File # MacOS Folder Metadata File
.DS_Store .DS_Store
# Vim swap files
*.swp

@ -0,0 +1,2 @@
language = "python3"
run = ""

@ -23,5 +23,6 @@ COPY . /opt/sherlock/
RUN pip3 install -r requirements.txt -f /wheels \ RUN pip3 install -r requirements.txt -f /wheels \
&& rm -rf /wheels \ && rm -rf /wheels \
&& rm -rf /root/.cache/pip/* && rm -rf /root/.cache/pip/*
WORKDIR /opt/sherlock/sherlock
ENTRYPOINT ["python", "sherlock.py"] ENTRYPOINT ["python", "sherlock.py"]

@ -3,72 +3,60 @@
<img src="https://user-images.githubusercontent.com/27065646/53551960-ae4dff80-3b3a-11e9-9075-cef786c69364.png"/> <img src="https://user-images.githubusercontent.com/27065646/53551960-ae4dff80-3b3a-11e9-9075-cef786c69364.png"/>
<br> <br>
<span>Hunt down social media accounts by username across <a href="https://github.com/theyahya/sherlock/blob/master/sites.md">social networks</a></span> <span>Hunt down social media accounts by username across <a href="https://github.com/sherlock-project/sherlock/blob/master/sites.md">social networks</a></span>
<br> <br>
<a target="_blank" href="https://www.python.org/downloads/" title="Python version"><img src="https://img.shields.io/badge/python-%3E=_3.6-green.svg"></a> <a target="_blank" href="https://www.python.org/downloads/" title="Python version"><img src="https://img.shields.io/badge/python-%3E=_3.6-green.svg"></a>
<a target="_blank" href="LICENSE" title="License: MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg"></a> <a target="_blank" href="LICENSE" title="License: MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg"></a>
<a target="_blank" href="https://github.com/sherlock-project/sherlock/actions" title="Test Status"><img src="https://github.com/sherlock-project/sherlock/workflows/Tests/badge.svg?branch=master"></a> <a target="_blank" href="https://github.com/sherlock-project/sherlock/actions" title="Test Status"><img src="https://github.com/sherlock-project/sherlock/workflows/Tests/badge.svg?branch=master"></a>
<a target="_blank" href="https://twitter.com/intent/tweet?text=%F0%9F%94%8E%20Find%20usernames%20across%20social%20networks%20&url=https://github.com/sherlock-project/sherlock&hashtags=hacking,%20osint,%20bugbounty,%20reconnaissance" title="Share on Tweeter"><img src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"></a> <a target="_blank" href="https://github.com/sherlock-project/sherlock/actions" title="Nightly Tests"><img src="https://github.com/sherlock-project/sherlock/workflows/Nightly/badge.svg?branch=master"></a>
<a target="_blank" href="https://twitter.com/intent/tweet?text=%F0%9F%94%8E%20Find%20usernames%20across%20social%20networks%20&url=https://github.com/sherlock-project/sherlock&hashtags=hacking,%20osint,%20bugbounty,%20reconnaissance" title="Share on Twitter"><img src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"></a>
<a target="_blank" href="http://sherlock-project.github.io/"><img alt="Website" src="https://img.shields.io/website-up-down-green-red/http/sherlock-project.github.io/..svg"></a> <a target="_blank" href="http://sherlock-project.github.io/"><img alt="Website" src="https://img.shields.io/website-up-down-green-red/http/sherlock-project.github.io/..svg"></a>
<a target="_blank" href="https://microbadger.com/images/theyahya/sherlock"><img alt="docker image" src="https://images.microbadger.com/badges/version/theyahya/sherlock.svg"></a> <a target="_blank" href="https://microbadger.com/images/theyahya/sherlock"><img alt="docker image" src="https://images.microbadger.com/badges/version/theyahya/sherlock.svg"></a>
</p> </p>
<p align="center"> <p align="center">
<a href="#demo">Demo</a>
&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;
<a href="#installation">Installation</a> <a href="#installation">Installation</a>
&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp; &nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;
<a href="#usage">Usage</a> <a href="#usage">Usage</a>
&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp; &nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;
<a href="#docker-notes">Docker Notes</a> <a href="#docker-notes">Docker Notes</a>
&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp; &nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;
<a href="#adding-new-sites">Adding New Sites</a> <a href="#contributing">Contributing</a>
</p> </p>
<p align="center"> <p align="center">
<a href="https://asciinema.org/a/223115"> <a href="https://asciinema.org/a/223115">
<img src="./images/sherlock_preview.gif"/> <img src="./images/sherlock_demo.gif"/>
</a> </a>
</p> </p>
## Demo
Use this link to test Sherlock directly in your browser:
https://elody.com/scenario/plan/16/
## Installation ## Installation
**NOTE**: Python 3.6 or higher is required. ```console
```bash
# clone the repo # clone the repo
$ git clone https://github.com/sherlock-project/sherlock.git $ git clone https://github.com/sherlock-project/sherlock.git
# change the working directory to sherlock # change the working directory to sherlock
$ cd sherlock $ cd sherlock
# install python3 and python3-pip if they are not installed
# install the requirements # install the requirements
$ python3 -m pip install -r requirements.txt $ python3 -m pip install -r requirements.txt
``` ```
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.png)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/sherlock-project/sherlock&tutorial=README.md)
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.png)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/sherlock-project/sherlock&tutorial=README.md) [![Run on Repl.it](https://user-images.githubusercontent.com/27065646/91954718-7bbe2d80-ed02-11ea-9a4e-fd11c5e68148.png)](https://repl.it/github/sherlock-project/sherlock)
## Usage ## Usage
```bash ```console
$ python3 sherlock --help $ python3 sherlock --help
usage: sherlock [-h] [--version] [--verbose] [--rank] usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT] [--output OUTPUT]
[--folderoutput FOLDEROUTPUT] [--output OUTPUT] [--tor] [--tor] [--unique-tor] [--csv] [--site SITE_NAME] [--proxy PROXY_URL]
[--unique-tor] [--csv] [--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE] [--timeout TIMEOUT] [--print-found] [--no-color] [--browse]
[--json JSON_FILE] [--timeout TIMEOUT] [--print-found]
[--no-color] [--browse]
USERNAMES [USERNAMES ...] USERNAMES [USERNAMES ...]
Sherlock: Find Usernames Across Social Networks (Version 0.12.0) Sherlock: Find Usernames Across Social Networks (Version 0.12.7)
positional arguments: positional arguments:
USERNAMES One or more usernames to check with social networks. USERNAMES One or more usernames to check with social networks.
@ -78,36 +66,30 @@ optional arguments:
--version Display version information and dependencies. --version Display version information and dependencies.
--verbose, -v, -d, --debug --verbose, -v, -d, --debug
Display extra debugging information and metrics. Display extra debugging information and metrics.
--rank, -r Present websites ordered by their Alexa.com global
rank in popularity.
--folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT --folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT
If using multiple usernames, the output of the results If using multiple usernames, the output of the results will be saved to
will be saved to this folder. this folder.
--output OUTPUT, -o OUTPUT --output OUTPUT, -o OUTPUT
If using single username, the output of the result If using single username, the output of the result will be saved to this
will be saved to this file. file.
--tor, -t Make requests over Tor; increases runtime; requires --tor, -t Make requests over Tor; increases runtime; requires Tor to be installed and
Tor to be installed and in system path. in system path.
--unique-tor, -u Make requests over Tor with new Tor circuit after each --unique-tor, -u Make requests over Tor with new Tor circuit after each request; increases
request; increases runtime; requires Tor to be runtime; requires Tor to be installed and in system path.
installed and in system path.
--csv Create Comma-Separated Values (CSV) File. --csv Create Comma-Separated Values (CSV) File.
--site SITE_NAME Limit analysis to just the listed sites. Add multiple --site SITE_NAME Limit analysis to just the listed sites. Add multiple options to specify
options to specify more than one site. more than one site.
--proxy PROXY_URL, -p PROXY_URL --proxy PROXY_URL, -p PROXY_URL
Make requests over a proxy. e.g. Make requests over a proxy. e.g. socks5://127.0.0.1:1080
socks5://127.0.0.1:1080
--json JSON_FILE, -j JSON_FILE --json JSON_FILE, -j JSON_FILE
Load data from a JSON file or an online, valid, JSON Load data from a JSON file or an online, valid, JSON file.
file. --timeout TIMEOUT Time (in seconds) to wait for response to requests. Default timeout of
--timeout TIMEOUT Time (in seconds) to wait for response to requests. 60.0s.A longer timeout will be more likely to get results from slow
Default timeout of 60.0s.A longer timeout will be more sites.On the other hand, this may cause a long delay to gather all results.
likely to get results from slow sites.On the other
hand, this may cause a long delay to gather all
results.
--print-found Do not output sites where the username was not found. --print-found Do not output sites where the username was not found.
--no-color Don't color terminal output --no-color Don't color terminal output
--browse, -b Browse to all results on default bowser. --browse, -b Browse to all results on default browser.
--local, -l Force the use of the local data.json file.
``` ```
To search for only one user: To search for only one user:
@ -123,9 +105,11 @@ python3 sherlock user1 user2 user3
Accounts found will be stored in an individual text file with the corresponding username (e.g ```user123.txt```). Accounts found will be stored in an individual text file with the corresponding username (e.g ```user123.txt```).
## Anaconda (Windows) Notes ## Anaconda (Windows) Notes
If you are using Anaconda in Windows, using 'python3' might not work. Use 'python' instead. If you are using Anaconda in Windows, using 'python3' might not work. Use 'python' instead.
## Docker Notes ## Docker Notes
If docker is installed you can build an image and run this as a container. If docker is installed you can build an image and run this as a container.
``` ```
@ -165,15 +149,19 @@ You can use the `docker-compose.yml` file from the repository and use this comma
docker-compose run sherlock -o /opt/sherlock/results/text.txt user123 docker-compose run sherlock -o /opt/sherlock/results/text.txt user123
``` ```
## Adding New Sites ## Contributing
We would love to have you help us on the development of Sherlock. Each and every contribution is greatly valued!
Please look at the Wiki entry on Here are some things we would appriciate your help on:
[adding new sites](https://github.com/TheYahya/sherlock/wiki/Adding-Sites-To-Sherlock) - Addition of new site support ¹
to understand the issues. - Bringing back site support of [sites that have been removed](removed_sites.md) in the past due to false positives
**NOTE**: Sherlock is not accepting adult sites in the standard list.
[1] Please look at the Wiki entry on [adding new sites](https://github.com/sherlock-project/sherlock/wiki/Adding-Sites-To-Sherlock)
to understand the issues.
## Tests ## Tests
Thank you for contributing to Sherlock! Thank you for contributing to Sherlock!
Before creating a pull request with new development, please run the tests Before creating a pull request with new development, please run the tests
@ -186,7 +174,7 @@ Sherlock. This invocation hides the progress text that Sherlock normally
outputs, and instead shows the verbose output of the tests. outputs, and instead shows the verbose output of the tests.
``` ```
$ cd sherlock $ cd sherlock/sherlock
$ python3 -m unittest tests.all --verbose $ python3 -m unittest tests.all --verbose
``` ```

@ -3,6 +3,5 @@ version: '2'
services: services:
sherlock: sherlock:
build: . build: .
image: theyahya/sherlock
volumes: volumes:
- "./results:/opt/sherlock/results" - "./results:/opt/sherlock/results"

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 MiB

@ -10,7 +10,7 @@
"AngelList": { "AngelList": {
"errorType": "status_code", "errorType": "status_code",
"rank": 5767, "rank": 5767,
"url": "https://angel.co/{}", "url": "https://angel.co/u/{}",
"urlMain": "https://angel.co/", "urlMain": "https://angel.co/",
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
@ -31,15 +31,6 @@
"url": "http://blackplanet.com/{}", "url": "http://blackplanet.com/{}",
"urlMain": "http://blackplanet.com/" "urlMain": "http://blackplanet.com/"
}, },
"CapFriendly": {
"errorMsg": "No User Found",
"errorType": "message",
"rank": 64100,
"url": "https://www.capfriendly.com/users/{}",
"urlMain": "https://www.capfriendly.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis"
},
"Canva": { "Canva": {
"errorType": "response_url", "errorType": "response_url",
"errorUrl": "https://www.canva.com/{}", "errorUrl": "https://www.canva.com/{}",
@ -65,6 +56,14 @@
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"ColourLovers": {
"errorType": "status_code",
"rank": 21271,
"url": "https://www.colourlovers.com/lover/{}",
"urlMain": "https://www.colourlovers.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"EVE Online": { "EVE Online": {
"errorType": "response_url", "errorType": "response_url",
"errorUrl": "https://eveonline.com", "errorUrl": "https://eveonline.com",
@ -106,6 +105,14 @@
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis777777" "username_unclaimed": "noonewouldeverusethis777777"
}, },
"gpodder.net": {
"errorType": "status_code",
"rank": 2013984,
"url": "https://gpodder.net/user/{}",
"urlMain": "https://gpodder.net/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"Imgur": { "Imgur": {
"errorType": "status_code", "errorType": "status_code",
"rank": 74, "rank": 74,
@ -114,6 +121,14 @@
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"Investing.com": {
"errorType": "status_code",
"rank": 196,
"url": "https://www.investing.com/traders/{}",
"urlMain": "https://www.investing.com/",
"username_claimed": "jenny",
"username_unclaimed": "noonewouldeverusethis7"
},
"Khan Academy": { "Khan Academy": {
"errorType": "status_code", "errorType": "status_code",
"rank": 377, "rank": 377,
@ -140,15 +155,6 @@
"username_claimed": "alex", "username_claimed": "alex",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"PayPal": {
"errorType": "response_url",
"errorUrl": "https://www.paypal.com/paypalme2/404",
"rank": 18441,
"url": "https://www.paypal.com/paypalme2/{}",
"urlMain": "https://www.paypal.me/",
"username_claimed": "blue",
"username_unclaimed": "noneownsthisusername"
},
"Pexels": { "Pexels": {
"errorType": "status_code", "errorType": "status_code",
"rank": 745, "rank": 745,
@ -173,6 +179,15 @@
"username_claimed": "powershellteam", "username_claimed": "powershellteam",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"Raidforums": {
"errorMsg": "The member you specified is either invalid or doesn't exist.",
"errorType": "message",
"rank": 32435,
"url": "https://raidforums.com/User-{}",
"urlMain": "https://raidforums.com/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
"RamblerDating": { "RamblerDating": {
"errorType": "response_url", "errorType": "response_url",
"errorUrl": "https://dating.rambler.ru/page/{}", "errorUrl": "https://dating.rambler.ru/page/{}",
@ -208,6 +223,14 @@
"username_claimed": "red", "username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"TikTok": {
"errorType": "status_code",
"rank": 260,
"url": "https://www.tiktok.com/@{}",
"urlMain": "https://www.tiktok.com/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
"YandexMarket": { "YandexMarket": {
"errorMsg": "\u0422\u0443\u0442 \u043d\u0438\u0447\u0435\u0433\u043e \u043d\u0435\u0442", "errorMsg": "\u0422\u0443\u0442 \u043d\u0438\u0447\u0435\u0433\u043e \u043d\u0435\u0442",
"errorType": "message", "errorType": "message",
@ -251,15 +274,6 @@
"username_claimed": "admin", "username_claimed": "admin",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
}, },
"Telegram": {
"errorType": "response_url",
"errorUrl": "https://telegram.org",
"rank": 385,
"url": "https://t.me/{}",
"urlMain": "https://t.me/",
"username_claimed": "saman",
"username_unclaimed": "i_do_not_believe_this_account_exists_at_all"
},
"elwoRU": { "elwoRU": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d", "errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message", "errorType": "message",
@ -277,6 +291,197 @@
"urlMain": "http://ingvarr.net.ru/", "urlMain": "http://ingvarr.net.ru/",
"username_claimed": "red", "username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
},
"Redsun.tf": {
"errorMsg": "The specified member cannot be found",
"errorType": "message",
"rank": 3796657,
"url": "https://forum.redsun.tf/members/?username={}",
"urlMain": "https://redsun.tf/",
"username_claimed": "dan",
"username_unclaimed": "noonewouldeverusethis"
},
"CreativeMarket": {
"errorType": "status_code",
"rank": 1896,
"url": "https://creativemarket.com/users/{}",
"urlMain": "https://creativemarket.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"pvpru": {
"errorType": "status_code",
"rank": 405547,
"url": "https://pvpru.com/board/member.php?username={}&tab=aboutme#aboutme",
"urlMain": "https://pvpru.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"easyen": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 11564,
"url": "https://easyen.ru/index/8-0-{}",
"urlMain": "https://easyen.ru/",
"username_claimed": "wd",
"username_unclaimed": "noonewouldeverusethis7"
},
"pedsovet": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 6776,
"url": "http://pedsovet.su/index/8-0-{}",
"urlMain": "http://pedsovet.su/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"radioskot": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 105878,
"url": "https://radioskot.ru/index/8-0-{}",
"urlMain": "https://radioskot.ru/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
"Coderwall": {
"errorMsg": "404! Our feels when that url is used",
"errorType": "message",
"rank": 11256,
"url": "https://coderwall.com/{}",
"urlMain": "https://coderwall.com/",
"username_claimed": "jenny",
"username_unclaimed": "noonewouldeverusethis7"
},
"TamTam": {
"errorType": "response_url",
"errorUrl": "https://tamtam.chat/",
"rank": 87903,
"url": "https://tamtam.chat/{}",
"urlMain": "https://tamtam.chat/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"Zomato": {
"errorType": "status_code",
"headers": {
"Accept-Language": "en-US,en;q=0.9"
},
"rank": 1920,
"url": "https://www.zomato.com/pl/{}/foodjourney",
"urlMain": "https://www.zomato.com/",
"username_claimed": "deepigoyal",
"username_unclaimed": "noonewouldeverusethis7"
},
"mixer.com": {
"errorType": "status_code",
"rank": 1544,
"url": "https://mixer.com/{}",
"urlMain": "https://mixer.com/",
"urlProbe": "https://mixer.com/api/v1/channels/{}",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"KanoWorld": {
"errorType": "status_code",
"rank": 181933,
"url": "https://api.kano.me/progress/user/{}",
"urlMain": "https://world.kano.me/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"YandexCollection": {
"errorType": "status_code",
"url": "https://yandex.ru/collections/user/{}/",
"urlMain": "https://yandex.ru/collections/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"500px": {
"errorMsg": "Oops! This page doesn\u2019t exist.",
"errorType": "message",
"regexCheck": "^[a-z0-9_]+$",
"url": "https://500px.com/{}",
"urlMain": "https://500px.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"PayPal": {
"errorMsg": "<meta name=\"twitter:title\" content=\"Get your very own PayPal.Me link\" />",
"errorType": "message",
"url": "https://www.paypal.com/paypalme/{}",
"headers": {
"User-Agent": ""
},
"urlMain": "https://www.paypal.me/",
"username_claimed": "blue",
"username_unclaimed": "noneownsthisusername7"
},
"Fiverr": {
"errorType": "response_url",
"errorUrl": "https://www.fiverr.com/",
"url": "https://www.fiverr.com/{}",
"urlMain": "https://www.fiverr.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis"
},
"ImageShack": {
"errorType": "response_url",
"errorUrl": "https://imageshack.us/",
"url": "https://imageshack.us/user/{}",
"urlMain": "https://imageshack.us/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"Aptoide": {
"errorType": "status_code",
"url": "https://{}.en.aptoide.com/",
"urlMain": "https://en.aptoide.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"Crunchyroll": {
"errorType": "status_code",
"url": "https://www.crunchyroll.com/user/{}",
"urlMain": "https://www.crunchyroll.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"T-MobileSupport": {
"errorType": "status_code",
"url": "https://support.t-mobile.com/people/{}",
"urlMain": "https://support.t-mobile.com",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
"OpenCollective": {
"errorType": "status_code",
"url": "https://opencollective.com/{}",
"urlMain": "https://opencollective.com/",
"username_claimed": "sindresorhus",
"username_unclaimed": "noonewouldeverusethis7"
},
"SegmentFault": {
"errorType": "status_code",
"url": "https://segmentfault.com/u/{}",
"urlMain": "https://segmentfault.com/",
"username_claimed": "bule",
"username_unclaimed": "noonewouldeverusethis7"
},
"Viadeo": {
"errorType": "status_code",
"url": "http://fr.viadeo.com/en/profile/{}",
"urlMain": "http://fr.viadeo.com/en/",
"username_claimed": "franck.patissier",
"username_unclaimed": "noonewouldeverusethis"
},
"MeetMe": {
"errorType": "response_url",
"errorUrl": "https://www.meetme.com/",
"url": "https://www.meetme.com/{}",
"urlMain": "https://www.meetme.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
} }
} }

@ -6,6 +6,96 @@ They are listed here in the hope that things may change in the future
so they may be re-included. so they may be re-included.
## gpodder.net
As of 2020-05-25, all usernames are reported as available.
The server is returning a HTTP Status 500 (Internal server error)
for all queries.
```
"gpodder.net": {
"errorType": "status_code",
"rank": 2013984,
"url": "https://gpodder.net/user/{}",
"urlMain": "https://gpodder.net/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Raidforums
As of 2020-05-25, all usernames are reported as claimed.
The site will not let the query proceed unless Javascript is
turned on.
```
"Raidforums": {
"errorMsg": "The member you specified is either invalid or doesn't exist.",
"errorType": "message",
"rank": 32435,
"url": "https://raidforums.com/User-{}",
"urlMain": "https://raidforums.com/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Investing.com
As of 2020-05-25, all usernames are reported as claimed.
Any query against a user seems to be redirecting to a general
information page at https://www.investing.com/brokers/. Probably
required login before access.
```
"Investing.com": {
"errorType": "status_code",
"rank": 196,
"url": "https://www.investing.com/traders/{}",
"urlMain": "https://www.investing.com/",
"username_claimed": "jenny",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## TikTok
As of 2020-05-25, all usernames are reported as claimed.
```
"TikTok": {
"errorType": "status_code",
"rank": 260,
"url": "https://www.tiktok.com/@{}",
"urlMain": "https://www.tiktok.com/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## ColourLovers
As of 2020-05-24, all usernames are reported as claimed.
There is an API available (https://www.colourlovers.com/api/), but when
there is no match it returns an empty file. So, changes would have to
happen before the lack of a response could be used to detect.
```
"ColourLovers": {
"errorType": "status_code",
"rank": 21271,
"url": "https://www.colourlovers.com/lover/{}",
"urlMain": "https://www.colourlovers.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## AdobeForums ## AdobeForums
As of 2020-04-12, all usernames are reported as available. As of 2020-04-12, all usernames are reported as available.
@ -33,7 +123,6 @@ This can be detected, but it requires a different detection method.
As of 2020-02-23, all usernames are reported as not existing. As of 2020-02-23, all usernames are reported as not existing.
Why was this ever added? It does not look like a social network.
``` ```
"Basecamp": { "Basecamp": {
@ -236,21 +325,6 @@ Usernames that don't exist are detected. First noticed 2019-10-25.
}, },
``` ```
## PayPal
Usernames that don't exist are detected.
```
"PayPal": {
"errorType": "response_url",
"errorUrl": "https://www.paypal.com/paypalme2/404",
"rank": 18441,
"url": "https://www.paypal.com/paypalme2/{}",
"urlMain": "https://www.paypal.me/",
"username_claimed": "blue",
"username_unclaimed": "noneownsthisusername"
},
```
## EVE Online ## EVE Online
@ -270,13 +344,13 @@ Usernames that exist are not detected.
## AngelList ## AngelList
Usernames that exist are not detected. Usernames that exist are not detected. Forbidden Request 403 Error.
``` ```
"AngelList": { "AngelList": {
"errorType": "status_code", "errorType": "status_code",
"rank": 5767, "rank": 5767,
"url": "https://angel.co/{}", "url": "https://angel.co/u/{}",
"urlMain": "https://angel.co/", "urlMain": "https://angel.co/",
"username_claimed": "blue", "username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7" "username_unclaimed": "noonewouldeverusethis7"
@ -411,22 +485,6 @@ Good-bye [Google Plus](https://en.wikipedia.org/wiki/Google%2B)...
}, },
``` ```
## CapFriendly
As of 2020-02-17, CapFriendly returns fake profile pages for non-existing users, what seems to distinguish between the pages is the Sign-up date, for non-existing users, the web application returns a date before 2000-01-01.
```
"CapFriendly": {
"errorMsg": "No User Found",
"errorType": "message",
"rank": 64100,
"url": "https://www.capfriendly.com/users/{}",
"urlMain": "https://www.capfriendly.com/",
"username_claimed": "user",
"username_unclaimed": "noonewouldeverusethis"
},
```
## Furaffinity ## Furaffinity
@ -512,22 +570,6 @@ As of 2020-04-02, boingboing.net requires a login to check if a user exits or no
}, },
``` ```
## Telegram
As of 2020-04-02 Telegram always returns true even though an username is non-existant
´´´
"Telegram": {
"errorType": "response_url",
"errorUrl": "https://telegram.org",
"rank": 385,
"url": "https://t.me/{}",
"urlMain": "https://t.me/",
"username_claimed": "saman",
"username_unclaimed": "i_do_not_believe_this_account_exists_at_all"
},
´´´
## elwoRU ## elwoRU
As of 2020-04-04, elwoRu does not exist anymore. I confirmed using As of 2020-04-04, elwoRu does not exist anymore. I confirmed using
downforeveryoneorjustme.com that the website is down. downforeveryoneorjustme.com that the website is down.
@ -561,3 +603,354 @@ downforeveryoneorjustme.com that the website is down.
}, },
``` ```
## Redsun.tf
As of 2020-06-20, Redsun.tf seems to be adding random digits to the end of the usernames which makes it pretty much impossible
for Sherlock to check for usernames on this particular website.
```
"Redsun.tf": {
"errorMsg": "The specified member cannot be found",
"errorType": "message",
"rank": 3796657,
"url": "https://forum.redsun.tf/members/?username={}",
"urlMain": "https://redsun.tf/",
"username_claimed": "dan",
"username_unclaimed": "noonewouldeverusethis"
},
```
## Creative Market
As of 2020-06-20, Creative Market has a captcha to prove that you are a human, and because of this
Sherlock is unable to check for username on this site because we will always get a page which asks
us to prove that we are not a robot.
```
"CreativeMarket": {
"errorType": "status_code",
"rank": 1896,
"url": "https://creativemarket.com/users/{}",
"urlMain": "https://creativemarket.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## pvpru
As of 2020-06-20, pvpru uses CloudFlair, and because of this we get a "Access denied" error whenever
we try to check for a username.
```
"pvpru": {
"errorType": "status_code",
"rank": 405547,
"url": "https://pvpru.com/board/member.php?username={}&tab=aboutme#aboutme",
"urlMain": "https://pvpru.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## easyen
As of 2020-06-21, easyen returns false positives when using a username which contains
a period. Since we could not find the criteria for the usernames for this site, it will be
removed
```
"easyen": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 11564,
"url": "https://easyen.ru/index/8-0-{}",
"urlMain": "https://easyen.ru/",
"username_claimed": "wd",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## pedsovet
As of 2020-06-21, pedsovet returns false positives when using a username which contains
a period. Since we could not find the criteria for the usernames for this site, it will be
removed
```
"pedsovet": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 6776,
"url": "http://pedsovet.su/index/8-0-{}",
"urlMain": "http://pedsovet.su/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## radioskot
As of 2020-06-21, radioskot returns false positives when using a username which contains
a period. Since we could not find the criteria for the usernames for this site, it will be
removed
```
"radioskot": {
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
"errorType": "message",
"rank": 105878,
"url": "https://radioskot.ru/index/8-0-{}",
"urlMain": "https://radioskot.ru/",
"username_claimed": "red",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Coderwall
As of 2020-07-06, Coderwall returns false positives when checking for an username which contains a period.
I have tried to find out what Coderwall's criteria is for a valid username, but unfortunetly I have not been able to
find it and because of this, the best thing we can do now is to remove it.
```
"Coderwall": {
"errorMsg": "404! Our feels when that url is used",
"errorType": "message",
"rank": 11256,
"url": "https://coderwall.com/{}",
"urlMain": "https://coderwall.com/",
"username_claimed": "jenny",
"username_unclaimed": "noonewouldeverusethis7"
}
```
## TamTam
As of 2020-07-06, TamTam returns false positives when given a username which contains a period
```
"TamTam": {
"errorType": "response_url",
"errorUrl": "https://tamtam.chat/",
"rank": 87903,
"url": "https://tamtam.chat/{}",
"urlMain": "https://tamtam.chat/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Zomato
As of 2020-07-24, Zomato seems to be unstable. Majority of the time, Zomato takes a very long time to respond.
```
"Zomato": {
"errorType": "status_code",
"headers": {
"Accept-Language": "en-US,en;q=0.9"
},
"rank": 1920,
"url": "https://www.zomato.com/pl/{}/foodjourney",
"urlMain": "https://www.zomato.com/",
"username_claimed": "deepigoyal",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Mixer
As of 2020-07-22, the Mixer service has closed down.
```
"mixer.com": {
"errorType": "status_code",
"rank": 1544,
"url": "https://mixer.com/{}",
"urlMain": "https://mixer.com/",
"urlProbe": "https://mixer.com/api/v1/channels/{}",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## KanoWorld
As of 2020-07-22, KanoWorld's api.kano.me subdomain no longer exists which makes it not possible for us check for usernames.
If an alternative way to check for usernames is found then it will added.
```
"KanoWorld": {
"errorType": "status_code",
"rank": 181933,
"url": "https://api.kano.me/progress/user/{}",
"urlMain": "https://world.kano.me/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## YandexCollection
As of 2020-08-11, YandexCollection presents us with a rechapta which prevents us from checking for usernames
```
"YandexCollection": {
"errorType": "status_code",
"url": "https://yandex.ru/collections/user/{}/",
"urlMain": "https://yandex.ru/collections/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## 500px
As of 2020-08-24, 500px now returns false positives, which was found when running the tests, but will most likley be added again in the near
future once we find a better error detecting method.
```
"500px": {
"errorMsg": "Oops! This page doesn\u2019t exist.",
"errorType": "message",
"regexCheck": "^[a-z0-9_]+$",
"url": "https://500px.com/{}",
"urlMain": "https://500px.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## PayPal
As of 2020-08-24, PayPal now returns false positives, which was found when running the tests, but will most likley be added again in the near
future once we find a better error detecting method.
```
"PayPal": {
"errorMsg": "<meta name=\"twitter:title\" content=\"Get your very own PayPal.Me link\" />",
"errorType": "message",
"url": "https://www.paypal.com/paypalme/{}",
"headers": {
"User-Agent": ""
},
"urlMain": "https://www.paypal.me/",
"username_claimed": "blue",
"username_unclaimed": "noneownsthisusername7"
},
```
## Fiverr
As of 2020-08-24, Fiverr now returns false positives, which was found when running the tests, but will most likley be added again in the near
future once we find a better error detecting method.
```
"Fiverr": {
"errorType": "response_url",
"errorUrl": "https://www.fiverr.com/",
"url": "https://www.fiverr.com/{}",
"urlMain": "https://www.fiverr.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis"
},
```
## ImageShack
As of 2020-08-24, ImageShack now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a better error detecting method.
```
"ImageShack": {
"errorType": "response_url",
"errorUrl": "https://imageshack.us/",
"url": "https://imageshack.us/user/{}",
"urlMain": "https://imageshack.us/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Aptoide
As of 2020-08-24, Aptoide now returns false positives, which was found when running the tests, but will most likley be added again in the near
future once we find a better error detecting method.
```
"Aptoide": {
"errorType": "status_code",
"url": "https://{}.en.aptoide.com/",
"urlMain": "https://en.aptoide.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Crunchyroll
As of 2020-08-24, Crunchyroll now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a better error detecting method.
```
"Crunchyroll": {
"errorType": "status_code",
"url": "https://www.crunchyroll.com/user/{}",
"urlMain": "https://www.crunchyroll.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## T-MobileSupport
As of 2020-08-24, T-MobileSupport now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a better error detecting method.
```
"T-MobileSupport": {
"errorType": "status_code",
"url": "https://support.t-mobile.com/people/{}",
"urlMain": "https://support.t-mobile.com",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## OpenCollective
As of 2020-08-24, OpenCollective now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a better error detecting method.
```
"OpenCollective": {
"errorType": "status_code",
"url": "https://opencollective.com/{}",
"urlMain": "https://opencollective.com/",
"username_claimed": "sindresorhus",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## SegmentFault
As of 2020-08-24, SegmentFault now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a better error detecting method.
```
"SegmentFault": {
"errorType": "status_code",
"url": "https://segmentfault.com/u/{}",
"urlMain": "https://segmentfault.com/",
"username_claimed": "bule",
"username_unclaimed": "noonewouldeverusethis7"
},
```
## Viadeo
As of 2020-08-24, Viadeo now returns false positives, which was found when running the tests, but will most likley be added again in the near future once we find a fix for this
```
"Viadeo": {
"errorType": "status_code",
"url": "http://fr.viadeo.com/en/profile/{}",
"urlMain": "http://fr.viadeo.com/en/",
"username_claimed": "franck.patissier",
"username_unclaimed": "noonewouldeverusethis"
},
```
## MeetMe
As of 2020-09-02, MeetMe returns false positives
```
"MeetMe": {
"errorType": "response_url",
"errorUrl": "https://www.meetme.com/",
"url": "https://www.meetme.com/{}",
"urlMain": "https://www.meetme.com/",
"username_claimed": "blue",
"username_unclaimed": "noonewouldeverusethis7"
},
```

@ -7,8 +7,27 @@ This module contains the main logic to search for usernames at social
networks. networks.
""" """
import sherlock import sys
if __name__ == "__main__": if __name__ == "__main__":
# Checking if the user is using the correct version of Python
# Reference:
# If Python version is 3.6.5
# major --^
# minor ----^
# micro ------^
major = sys.version_info[0]
minor = sys.version_info[1]
python_version = str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])
if major != 3:
print("Sherlock requires Python 3.6+\nYou are using Python %s, which is not supported by Sherlock" % (python_version))
sys.exit(1)
if minor < 6:
print("Sherlock requires Python 3.6+\nYou are using Python %s, which is not supported by Sherlock" % (python_version))
sys.exit(1)
import sherlock
sherlock.main() sherlock.main()

@ -193,7 +193,9 @@ class QueryNotifyPrint(QueryNotify):
Fore.WHITE + "]" + Fore.WHITE + "]" +
response_time_text + response_time_text +
Fore.GREEN + Fore.GREEN +
f" {self.result.site_name}: {self.result.site_url_user}")) f" {self.result.site_name}: " +
Style.RESET_ALL +
f"{self.result.site_url_user}"))
else: else:
print(f"[+]{response_time_text} {self.result.site_name}: {self.result.site_url_user}") print(f"[+]{response_time_text} {self.result.site_name}: {self.result.site_url_user}")
elif result.status == QueryStatus.AVAILABLE: elif result.status == QueryStatus.AVAILABLE:
@ -218,7 +220,7 @@ class QueryNotifyPrint(QueryNotify):
else: else:
print(f"[-] {self.result.site_name}: {self.result.context} ") print(f"[-] {self.result.site_name}: {self.result.context} ")
elif result.status == QueryStatus.ILLEGAL: elif result.status == QueryStatus.ILLEGAL:
if self.print_found_only == False: if not self.print_found_only:
msg = "Illegal Username Format For This Site!" msg = "Illegal Username Format For This Site!"
if self.color: if self.color:
print((Style.BRIGHT + Fore.WHITE + "[" + print((Style.BRIGHT + Fore.WHITE + "[" +

File diff suppressed because it is too large Load Diff

@ -8,16 +8,12 @@ networks.
""" """
import csv import csv
import json
import os import os
import platform import platform
import re import re
import sys import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter from argparse import ArgumentParser, RawDescriptionHelpFormatter
from time import monotonic from time import monotonic
from concurrent.futures import ThreadPoolExecutor
from time import time
import webbrowser
import requests import requests
@ -25,12 +21,12 @@ from requests_futures.sessions import FuturesSession
from torrequest import TorRequest from torrequest import TorRequest
from result import QueryStatus from result import QueryStatus
from result import QueryResult from result import QueryResult
from notify import QueryNotify
from notify import QueryNotifyPrint from notify import QueryNotifyPrint
from sites import SitesInformation from sites import SitesInformation
module_name = "Sherlock: Find Usernames Across Social Networks" module_name = "Sherlock: Find Usernames Across Social Networks"
__version__ = "0.12.0" __version__ = "0.12.7"
@ -260,7 +256,7 @@ def sherlock(username, site_data, query_notify,
allow_redirects = True allow_redirects = True
# This future starts running the request in a new thread, doesn't block the main thread # This future starts running the request in a new thread, doesn't block the main thread
if proxy != None: if proxy is not None:
proxies = {"http": proxy, "https": proxy} proxies = {"http": proxy, "https": proxy}
future = request_method(url=url_probe, headers=headers, future = request_method(url=url_probe, headers=headers,
proxies=proxies, proxies=proxies,
@ -443,9 +439,6 @@ def main():
action="store_true", dest="verbose", default=False, action="store_true", dest="verbose", default=False,
help="Display extra debugging information and metrics." help="Display extra debugging information and metrics."
) )
parser.add_argument("--rank", "-r",
action="store_true", dest="rank", default=False,
help="Present websites ordered by their Alexa.com global rank in popularity.")
parser.add_argument("--folderoutput", "-fo", dest="folderoutput", parser.add_argument("--folderoutput", "-fo", dest="folderoutput",
help="If using multiple usernames, the output of the results will be saved to this folder." help="If using multiple usernames, the output of the results will be saved to this folder."
) )
@ -497,18 +490,37 @@ def main():
) )
parser.add_argument("--browse", "-b", parser.add_argument("--browse", "-b",
action="store_true", dest="browse", default=False, action="store_true", dest="browse", default=False,
help="Browse to all results on default bowser.") help="Browse to all results on default browser.")
parser.add_argument("--local", "-l",
action="store_true", default=False,
help="Force the use of the local data.json file.")
args = parser.parse_args() args = parser.parse_args()
# Check for newer version of Sherlock. If it exists, let the user know about it
try:
r = requests.get("https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py")
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
local_version = __version__
if remote_version != local_version:
print("Update Available!\n" +
f"You are running version {local_version}. Version {remote_version} is available at https://git.io/sherlock")
except Exception as error:
print(f"A problem occured while checking for an update: {error}")
# Argument check # Argument check
# TODO regex check on args.proxy # TODO regex check on args.proxy
if args.tor and (args.proxy != None): if args.tor and (args.proxy is not None):
raise Exception("Tor and Proxy cannot be set at the same time.") raise Exception("Tor and Proxy cannot be set at the same time.")
# Make prompts # Make prompts
if args.proxy != None: if args.proxy is not None:
print("Using the proxy: " + args.proxy) print("Using the proxy: " + args.proxy)
if args.tor or args.unique_tor: if args.tor or args.unique_tor:
@ -528,7 +540,10 @@ def main():
#Create object with all information about sites we are aware of. #Create object with all information about sites we are aware of.
try: try:
sites = SitesInformation(args.json_file) if args.local:
sites = SitesInformation(os.path.join(os.path.dirname(__file__), 'resources/data.json'))
else:
sites = SitesInformation(args.json_file)
except Exception as error: except Exception as error:
print(f"ERROR: {error}") print(f"ERROR: {error}")
sys.exit(1) sys.exit(1)
@ -550,26 +565,20 @@ def main():
site_data = {} site_data = {}
site_missing = [] site_missing = []
for site in args.site_list: for site in args.site_list:
counter = 0
for existing_site in site_data_all: for existing_site in site_data_all:
if site.lower() == existing_site.lower(): if site.lower() == existing_site.lower():
site_data[existing_site] = site_data_all[existing_site] site_data[existing_site] = site_data_all[existing_site]
if not site_data: counter += 1
if counter == 0:
# Build up list of sites not supported for future error message. # Build up list of sites not supported for future error message.
site_missing.append(f"'{site}'") site_missing.append(f"'{site}'")
if site_missing: if site_missing:
print( print(f"Error: Desired sites not found: {', '.join(site_missing)}.")
f"Error: Desired sites not found: {', '.join(site_missing)}.")
sys.exit(1)
if args.rank:
# Sort data by rank
site_dataCpy = dict(site_data)
ranked_sites = sorted(site_data, key=lambda k: ("rank" not in k, site_data[k].get("rank", sys.maxsize)))
site_data = {}
for site in ranked_sites:
site_data[site] = site_dataCpy.get(site)
if not site_data:
sys.exit(1)
#Create notify object for query results. #Create notify object for query results.
query_notify = QueryNotifyPrint(result=None, query_notify = QueryNotifyPrint(result=None,
@ -606,9 +615,9 @@ def main():
if dictionary.get("status").status == QueryStatus.CLAIMED: if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1 exists_counter += 1
file.write(dictionary["url_user"] + "\n") file.write(dictionary["url_user"] + "\n")
file.write(f"Total Websites Username Detected On : {exists_counter}") file.write(f"Total Websites Username Detected On : {exists_counter}\n")
if args.csv == True: if args.csv:
with open(username + ".csv", "w", newline='', encoding="utf-8") as csv_report: with open(username + ".csv", "w", newline='', encoding="utf-8") as csv_report:
writer = csv.writer(csv_report) writer = csv.writer(csv_report)
writer.writerow(['username', writer.writerow(['username',

@ -3,7 +3,6 @@
This module supports storing information about web sites. This module supports storing information about web sites.
This is the raw data that will be used to search for usernames. This is the raw data that will be used to search for usernames.
""" """
import logging
import os import os
import json import json
import operator import operator
@ -12,9 +11,8 @@ import sys
class SiteInformation(): class SiteInformation():
def __init__(self, name, url_home, url_username_format, popularity_rank, def __init__(self, name, url_home, url_username_format, username_claimed,
username_claimed, username_unclaimed, username_unclaimed, information):
information):
"""Create Site Information Object. """Create Site Information Object.
Contains information about a specific web site. Contains information about a specific web site.
@ -33,10 +31,6 @@ class SiteInformation():
usernames would show up under the usernames would show up under the
"https://somesite.com/users/" area of "https://somesite.com/users/" area of
the web site. the web site.
popularity_rank -- Integer indicating popularity of site.
In general, smaller numbers mean more
popular ("0" or None means ranking
information not available).
username_claimed -- String containing username which is known username_claimed -- String containing username which is known
to be claimed on web site. to be claimed on web site.
username_unclaimed -- String containing username which is known username_unclaimed -- String containing username which is known
@ -59,11 +53,6 @@ class SiteInformation():
self.url_home = url_home self.url_home = url_home
self.url_username_format = url_username_format self.url_username_format = url_username_format
if (popularity_rank is None) or (popularity_rank == 0):
#We do not know the popularity, so make site go to bottom of list.
popularity_rank = sys.maxsize
self.popularity_rank = popularity_rank
self.username_claimed = username_claimed self.username_claimed = username_claimed
self.username_unclaimed = username_unclaimed self.username_unclaimed = username_unclaimed
self.information = information self.information = information
@ -119,22 +108,19 @@ class SitesInformation():
""" """
if data_file_path is None: if data_file_path is None:
#Use internal default. # The default data file is the live data.json which is in the GitHub repo. The reason why we are using
data_file_path = \ # this instead of the local one is so that the user has the most up to date data. This prevents
os.path.join(os.path.dirname(os.path.realpath(__file__)), # users from creating issue about false positives which has already been fixed or having outdated data
"resources/data.json" data_file_path = "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/resources/data.json"
)
# Ensure that specified data file has correct extension.
#Ensure that specified data file has correct extension. if not data_file_path.lower().endswith(".json"):
if ".json" != data_file_path[-5:].lower():
raise FileNotFoundError(f"Incorrect JSON file extension for " raise FileNotFoundError(f"Incorrect JSON file extension for "
f"data file '{data_file_path}'." f"data file '{data_file_path}'."
) )
if ( ("http://" == data_file_path[:7].lower()) or if "http://" == data_file_path[:7].lower() or "https://" == data_file_path[:8].lower():
("https://" == data_file_path[:8].lower()) # Reference is to a URL.
):
#Reference is to a URL.
try: try:
response = requests.get(url=data_file_path) response = requests.get(url=data_file_path)
except Exception as error: except Exception as error:
@ -173,14 +159,11 @@ class SitesInformation():
#Add all of site information from the json file to internal site list. #Add all of site information from the json file to internal site list.
for site_name in site_data: for site_name in site_data:
try: try:
#If popularity unknown, make site be at bottom of list.
popularity_rank = site_data[site_name].get("rank", sys.maxsize)
self.sites[site_name] = \ self.sites[site_name] = \
SiteInformation(site_name, SiteInformation(site_name,
site_data[site_name]["urlMain"], site_data[site_name]["urlMain"],
site_data[site_name]["url"], site_data[site_name]["url"],
popularity_rank,
site_data[site_name]["username_claimed"], site_data[site_name]["username_claimed"],
site_data[site_name]["username_unclaimed"], site_data[site_name]["username_unclaimed"],
site_data[site_name] site_data[site_name]
@ -193,32 +176,17 @@ class SitesInformation():
return return
def site_name_list(self, popularity_rank=False): def site_name_list(self):
"""Get Site Name List. """Get Site Name List.
Keyword Arguments: Keyword Arguments:
self -- This object. self -- This object.
popularity_rank -- Boolean indicating if list should be sorted
by popularity rank.
Default value is False.
NOTE: List is sorted in ascending
alphabetical order is popularity rank
is not requested.
Return Value: Return Value:
List of strings containing names of sites. List of strings containing names of sites.
""" """
if popularity_rank == True: site_names = sorted([site.name for site in self], key=str.lower)
#Sort in ascending popularity rank order.
site_rank_name = \
sorted([(site.popularity_rank,site.name) for site in self],
key=operator.itemgetter(0)
)
site_names = [name for _,name in site_rank_name]
else:
#Sort in ascending alphabetical order.
site_names = sorted([site.name for site in self], key=str.lower)
return site_names return site_names

@ -75,7 +75,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'Facebook' site = 'Pinterest'
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.
@ -102,7 +102,7 @@ class SherlockDetectTests(SherlockBaseTest):
Will trigger an assert if detection mechanism did not work as expected. Will trigger an assert if detection mechanism did not work as expected.
""" """
site = 'Facebook' site = 'Pinterest'
site_data = self.site_data_all[site] site_data = self.site_data_all[site]
#Ensure that the site's detection method has not changed. #Ensure that the site's detection method has not changed.

@ -2,7 +2,6 @@
This module contains various utilities for running tests. This module contains various utilities for running tests.
""" """
import json
import os import os
import os.path import os.path
import unittest import unittest

@ -1,40 +1,12 @@
"""Sherlock: Supported Site Listing """Sherlock: Supported Site Listing
This module generates the listing of supported sites. This module generates the listing of supported sites
which can be found in sites.md
It also organizes all the sites in alphanumeric order
""" """
import json import json
import sys
import requests
import threading
import xml.etree.ElementTree as ET
from datetime import datetime
from argparse import ArgumentParser, RawDescriptionHelpFormatter
pool = list() pool = list()
def get_rank(domain_to_query, dest):
#Retrieve ranking data via alexa API
url = f"http://data.alexa.com/data?cli=10&url={domain_to_query}"
xml_data = requests.get(url).text
root = ET.fromstring(xml_data)
try:
#Get ranking for this site.
dest['rank'] = int(root.find(".//REACH").attrib["RANK"])
except:
#We did not find the rank for some reason.
print(f"Error retrieving rank information for '{domain_to_query}'")
print(f" Returned XML is |{xml_data}|")
return
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter
)
parser.add_argument("--rank","-r",
action="store_true", dest="rank", default=False,
help="Update all website ranks (not recommended)."
)
args = parser.parse_args()
with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file: with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file:
data = json.load(data_file) data = json.load(data_file)
@ -44,30 +16,17 @@ with open("sites.md", "w") as site_file:
for social_network in data: for social_network in data:
url_main = data.get(social_network).get("urlMain") url_main = data.get(social_network).get("urlMain")
data.get(social_network)["rank"] = 0 pool.append((social_network, url_main))
if args.rank:
th = threading.Thread(target=get_rank, args=(url_main, data.get(social_network)))
else:
th = None
pool.append((social_network, url_main, th))
if args.rank:
th.start()
index = 1 index = 1
for social_network, url_main, th in pool: for social_network, url_main in pool:
if args.rank:
th.join()
site_file.write(f'{index}. [{social_network}]({url_main})\n') site_file.write(f'{index}. [{social_network}]({url_main})\n')
sys.stdout.write("\r{0}".format(f"Updated {index} out of {data_length} entries"))
sys.stdout.flush()
index = index + 1 index = index + 1
if args.rank:
site_file.write(f'\nAlexa.com rank data fetched at ({datetime.utcnow()} UTC)\n')
sorted_json_data = json.dumps(data, indent=2, sort_keys=True) sorted_json_data = json.dumps(data, indent=2, sort_keys=True)
with open("sherlock/resources/data.json", "w") as data_file: with open("sherlock/resources/data.json", "w") as data_file:
data_file.write(sorted_json_data) data_file.write(sorted_json_data)
print("\nFinished updating supported site listing!") print("Finished updating supported site listing!")

@ -1,308 +1,299 @@
## List Of Supported Sites (305 Sites In Total!) ## List Of Supported Sites (298 Sites In Total!)
1. [2Dimensions](https://2Dimensions.com/) 1. [2Dimensions](https://2Dimensions.com/)
2. [3dnews](http://forum.3dnews.ru/) 2. [3dnews](http://forum.3dnews.ru/)
3. [4pda](https://4pda.ru/) 3. [7Cups](https://www.7cups.com/)
4. [500px](https://500px.com/) 4. [9GAG](https://www.9gag.com/)
5. [7Cups](https://www.7cups.com/) 5. [About.me](https://about.me/)
6. [9GAG](https://9gag.com/) 6. [Academia.edu](https://www.academia.edu/)
7. [About.me](https://about.me/) 7. [Alik.cz](https://www.alik.cz/)
8. [Academia.edu](https://www.academia.edu/) 8. [AllTrails](https://www.alltrails.com/)
9. [Alik.cz](https://www.alik.cz/) 9. [Anobii](https://www.anobii.com/)
10. [AllTrails](https://www.alltrails.com/) 10. [Archive.org](https://archive.org)
11. [Anobii](https://www.anobii.com/) 11. [Asciinema](https://asciinema.org)
12. [Aptoide](https://en.aptoide.com/) 12. [Ask Fedora](https://ask.fedoraproject.org/)
13. [Archive.org](https://archive.org) 13. [AskFM](https://ask.fm/)
14. [Asciinema](https://asciinema.org) 14. [Audiojungle](https://audiojungle.net/)
15. [Ask Fedora](https://ask.fedoraproject.org/) 15. [Avizo](https://www.avizo.cz/)
16. [AskFM](https://ask.fm/) 16. [BLIP.fm](https://blip.fm/)
17. [Audiojungle](https://audiojungle.net/) 17. [BOOTH](https://booth.pm/)
18. [Avizo](https://www.avizo.cz/) 18. [Badoo](https://badoo.com/)
19. [BLIP.fm](https://blip.fm/) 19. [Bandcamp](https://www.bandcamp.com/)
20. [BOOTH](https://booth.pm/) 20. [Bazar.cz](https://www.bazar.cz/)
21. [Badoo](https://badoo.com/) 21. [Behance](https://www.behance.net/)
22. [Bandcamp](https://www.bandcamp.com/) 22. [BinarySearch](https://binarysearch.io/)
23. [Bazar.cz](https://www.bazar.cz/) 23. [BitBucket](https://bitbucket.org/)
24. [Behance](https://www.behance.net/) 24. [BitCoinForum](https://bitcoinforum.com)
25. [BitBucket](https://bitbucket.org/) 25. [Blogger](https://www.blogger.com/)
26. [BitCoinForum](https://bitcoinforum.com) 26. [BodyBuilding](https://bodyspace.bodybuilding.com/)
27. [Blogger](https://www.blogger.com/) 27. [Bookcrossing](https://www.bookcrossing.com/)
28. [BodyBuilding](https://bodyspace.bodybuilding.com/) 28. [BuyMeACoffee](https://www.buymeacoffee.com/)
29. [Bookcrossing](https://www.bookcrossing.com/) 29. [BuzzFeed](https://buzzfeed.com/)
30. [BuyMeACoffee](https://www.buymeacoffee.com/) 30. [CNET](https://www.cnet.com/)
31. [BuzzFeed](https://buzzfeed.com/) 31. [CapFriendly](https://www.capfriendly.com/)
32. [CNET](https://www.cnet.com/) 32. [Carbonmade](https://carbonmade.com/)
33. [Carbonmade](https://carbonmade.com/) 33. [Career.habr](https://career.habr.com/)
34. [Career.habr](https://career.habr.com/) 34. [CashMe](https://cash.me/)
35. [CashMe](https://cash.me/) 35. [Cent](https://cent.co/)
36. [Cent](https://cent.co/) 36. [Championat](https://www.championat.com/)
37. [Championat](https://www.championat.com/) 37. [Chatujme.cz](https://chatujme.cz/)
38. [Chatujme.cz](https://chatujme.cz/) 38. [Chess](https://www.chess.com/)
39. [Chess](https://www.chess.com/ru/) 39. [Cloob](https://www.cloob.com/)
40. [Cloob](https://www.cloob.com/) 40. [CloudflareCommunity](https://community.cloudflare.com/)
41. [CloudflareCommunity](https://community.cloudflare.com/) 41. [Clozemaster](https://www.clozemaster.com)
42. [Clozemaster](https://www.clozemaster.com) 42. [Codecademy](https://www.codecademy.com/)
43. [Codecademy](https://www.codecademy.com/) 43. [Codechef](https://www.codechef.com/)
44. [Codechef](https://www.codechef.com/) 44. [Codewars](https://www.codewars.com)
45. [Coderwall](https://coderwall.com/) 45. [Contently](https://contently.com/)
46. [Codewars](https://www.codewars.com) 46. [Coroflot](https://coroflot.com/)
47. [ColourLovers](https://www.colourlovers.com/) 47. [Countable](https://www.countable.us/)
48. [Contently](https://contently.com/) 48. [Cracked](https://www.cracked.com/)
49. [Coroflot](https://coroflot.com/) 49. [Crevado](https://crevado.com/)
50. [Cracked](https://www.cracked.com/) 50. [DEV Community](https://dev.to/)
51. [CreativeMarket](https://creativemarket.com/) 51. [DailyMotion](https://www.dailymotion.com/)
52. [Crevado](https://crevado.com/) 52. [Designspiration](https://www.designspiration.net/)
53. [Crunchyroll](https://www.crunchyroll.com/) 53. [DeviantART](https://deviantart.com)
54. [DEV Community](https://dev.to/) 54. [Discogs](https://www.discogs.com/)
55. [DailyMotion](https://www.dailymotion.com/) 55. [Discuss.Elastic.co](https://discuss.elastic.co/)
56. [Designspiration](https://www.designspiration.net/) 56. [Disqus](https://disqus.com/)
57. [DeviantART](https://deviantart.com) 57. [Docker Hub](https://hub.docker.com/)
58. [Discogs](https://www.discogs.com/) 58. [Dribbble](https://dribbble.com/)
59. [Discuss.Elastic.co](https://discuss.elastic.co/) 59. [Duolingo](https://duolingo.com/)
60. [Disqus](https://disqus.com/) 60. [Ebay](https://www.ebay.com/)
61. [Docker Hub](https://hub.docker.com/) 61. [Ello](https://ello.co/)
62. [Dribbble](https://dribbble.com/) 62. [Etsy](https://www.etsy.com/)
63. [Duolingo](https://duolingo.com/) 63. [Euw](https://euw.op.gg/)
64. [Ebay](https://www.ebay.com/) 64. [EyeEm](https://www.eyeem.com/)
65. [Ello](https://ello.co/) 65. [F3.cool](https://f3.cool/)
66. [Etsy](https://www.etsy.com/) 66. [Facebook](https://www.facebook.com/)
67. [Euw](https://euw.op.gg/) 67. [Facenama](https://facenama.com/)
68. [EyeEm](https://www.eyeem.com/) 68. [Fandom](https://www.fandom.com/)
69. [F3.cool](https://f3.cool/) 69. [Filmogs](https://www.filmo.gs/)
70. [Facebook](https://www.facebook.com/) 70. [Flickr](https://www.flickr.com/)
71. [Facenama](https://facenama.com/) 71. [Flightradar24](https://www.flightradar24.com/)
72. [Fandom](https://www.fandom.com/) 72. [Flipboard](https://flipboard.com/)
73. [Filmogs](https://www.filmo.gs/) 73. [Football](https://www.rusfootball.info/)
74. [Fiverr](https://www.fiverr.com/) 74. [FortniteTracker](https://fortnitetracker.com/challenges)
75. [Flickr](https://www.flickr.com/) 75. [Freelance.habr](https://freelance.habr.com/)
76. [Flightradar24](https://www.flightradar24.com/) 76. [Freelancer.com](https://www.freelancer.com/)
77. [Flipboard](https://flipboard.com/) 77. [Freesound](https://freesound.org/)
78. [Football](https://www.rusfootball.info/) 78. [GDProfiles](https://gdprofiles.com/)
79. [FortniteTracker](https://fortnitetracker.com/challenges) 79. [Gamespot](https://www.gamespot.com/)
80. [Freelance.habr](https://freelance.habr.com/) 80. [GetMyUni](https://getmyuni.com/)
81. [Freesound](https://freesound.org/) 81. [Giphy](https://giphy.com/)
82. [GDProfiles](https://gdprofiles.com/) 82. [GitHub](https://www.github.com/)
83. [Gamespot](https://www.gamespot.com/) 83. [GitLab](https://gitlab.com/)
84. [Giphy](https://giphy.com/) 84. [Gitee](https://gitee.com/)
85. [GitHub](https://www.github.com/) 85. [GoodReads](https://www.goodreads.com/)
86. [GitLab](https://gitlab.com/) 86. [Gravatar](http://en.gravatar.com/)
87. [Gitee](https://gitee.com/) 87. [Gumroad](https://www.gumroad.com/)
88. [GoodReads](https://www.goodreads.com/) 88. [GunsAndAmmo](https://gunsandammo.com/)
89. [Gravatar](http://en.gravatar.com/) 89. [GuruShots](https://gurushots.com/)
90. [Gumroad](https://www.gumroad.com/) 90. [Hackaday](https://hackaday.io/)
91. [GunsAndAmmo](https://gunsandammo.com/) 91. [HackTheBox](https://forum.hackthebox.eu/)
92. [GuruShots](https://gurushots.com/) 92. [HackerNews](https://news.ycombinator.com/)
93. [HackTheBox](https://forum.hackthebox.eu/) 93. [HackerOne](https://hackerone.com/)
94. [HackerNews](https://news.ycombinator.com/) 94. [HackerRank](https://hackerrank.com/)
95. [HackerOne](https://hackerone.com/) 95. [House-Mixes.com](https://www.house-mixes.com/)
96. [HackerRank](https://hackerrank.com/) 96. [Houzz](https://houzz.com/)
97. [House-Mixes.com](https://www.house-mixes.com/) 97. [HubPages](https://hubpages.com/)
98. [Houzz](https://houzz.com/) 98. [Hubski](https://hubski.com/)
99. [HubPages](https://hubpages.com/) 99. [IFTTT](https://www.ifttt.com/)
100. [Hubski](https://hubski.com/) 100. [ImgUp.cz](https://imgup.cz/)
101. [IFTTT](https://www.ifttt.com/) 101. [Instagram](https://www.instagram.com/)
102. [ImageShack](https://imageshack.us/) 102. [Instructables](https://www.instructables.com/)
103. [ImgUp.cz](https://imgup.cz/) 103. [Issuu](https://issuu.com/)
104. [Instagram](https://www.instagram.com/) 104. [Itch.io](https://itch.io/)
105. [Instructables](https://www.instructables.com/) 105. [Jimdo](https://jimdosite.com/)
106. [Investing.com](https://www.investing.com/) 106. [Kaggle](https://www.kaggle.com/)
107. [Issuu](https://issuu.com/) 107. [Kali community](https://forums.kali.org/)
108. [Itch.io](https://itch.io/) 108. [Keybase](https://keybase.io/)
109. [Jimdo](https://jimdosite.com/) 109. [Kik](http://kik.me/)
110. [Kaggle](https://www.kaggle.com/) 110. [Kongregate](https://www.kongregate.com/)
111. [Kali community](https://forums.kali.org/) 111. [LOR](https://linux.org.ru/)
112. [KanoWorld](https://world.kano.me/) 112. [Launchpad](https://launchpad.net/)
113. [Keybase](https://keybase.io/) 113. [LeetCode](https://leetcode.com/)
114. [Kik](http://kik.me/) 114. [Letterboxd](https://letterboxd.com/)
115. [Kongregate](https://www.kongregate.com/) 115. [Lichess](https://lichess.org)
116. [LOR](https://linux.org.ru/) 116. [LiveJournal](https://www.livejournal.com/)
117. [Launchpad](https://launchpad.net/) 117. [LiveLeak](https://www.liveleak.com/)
118. [LeetCode](https://leetcode.com/) 118. [Lobsters](https://lobste.rs/)
119. [Letterboxd](https://letterboxd.com/) 119. [Lolchess](https://lolchess.gg/)
120. [Lichess](https://lichess.org) 120. [Medium](https://medium.com/)
121. [LiveJournal](https://www.livejournal.com/) 121. [Memrise](https://www.memrise.com/)
122. [LiveLeak](https://www.liveleak.com/) 122. [MixCloud](https://www.mixcloud.com/)
123. [Lobsters](https://lobste.rs/) 123. [Munzee](https://www.munzee.com/)
124. [Lolchess](https://lolchess.gg/) 124. [MyAnimeList](https://myanimelist.net/)
125. [Medium](https://medium.com/) 125. [MyMiniFactory](https://www.myminifactory.com/)
126. [MeetMe](https://www.meetme.com/) 126. [Myspace](https://myspace.com/)
127. [Memrise](https://www.memrise.com/) 127. [NICommunityForum](https://www.native-instruments.com/forum/)
128. [MixCloud](https://www.mixcloud.com/) 128. [NPM](https://www.npmjs.com/)
129. [MyAnimeList](https://myanimelist.net/) 129. [NPM-Package](https://www.npmjs.com/)
130. [Myspace](https://myspace.com/) 130. [NameMC (Minecraft.net skins)](https://namemc.com/)
131. [NICommunityForum](https://www.native-instruments.com/forum/) 131. [NationStates Nation](https://nationstates.net)
132. [NPM](https://www.npmjs.com/) 132. [NationStates Region](https://nationstates.net)
133. [NPM-Package](https://www.npmjs.com/) 133. [Naver](https://naver.com)
134. [NameMC (Minecraft.net skins)](https://namemc.com/) 134. [Newgrounds](https://newgrounds.com)
135. [NationStates Nation](https://nationstates.net) 135. [Nightbot](https://nightbot.tv/)
136. [NationStates Region](https://nationstates.net) 136. [NotABug.org](https://notabug.org/)
137. [Newgrounds](https://newgrounds.com) 137. [OK](https://ok.ru/)
138. [OK](https://ok.ru/) 138. [OpenStreetMap](https://www.openstreetmap.org/)
139. [OpenCollective](https://opencollective.com/) 139. [Opensource](https://opensource.com/)
140. [OpenStreetMap](https://www.openstreetmap.org/) 140. [Oracle Community](https://community.oracle.com)
141. [Oracle Community](https://community.oracle.com) 141. [Otzovik](https://otzovik.com/)
142. [Otzovik](https://otzovik.com/) 142. [OurDJTalk](https://ourdjtalk.com/)
143. [OurDJTalk](https://ourdjtalk.com/) 143. [PCGamer](https://pcgamer.com)
144. [PCGamer](https://pcgamer.com) 144. [PCPartPicker](https://pcpartpicker.com)
145. [PCPartPicker](https://pcpartpicker.com) 145. [PSNProfiles.com](https://psnprofiles.com/)
146. [PSNProfiles.com](https://psnprofiles.com/) 146. [Packagist](https://packagist.org/)
147. [Packagist](https://packagist.org/) 147. [Pastebin](https://pastebin.com/)
148. [Pastebin](https://pastebin.com/) 148. [Patreon](https://www.patreon.com/)
149. [Patreon](https://www.patreon.com/) 149. [Periscope](https://www.periscope.tv/)
150. [Periscope](https://www.periscope.tv/) 150. [Photobucket](https://photobucket.com/)
151. [Photobucket](https://photobucket.com/) 151. [Pinkbike](https://www.pinkbike.com/)
152. [Pinkbike](https://www.pinkbike.com/) 152. [Pinterest](https://www.pinterest.com/)
153. [Pinterest](https://www.pinterest.com/) 153. [PlayStore](https://play.google.com/store)
154. [PlayStore](https://play.google.com/store) 154. [Pling](https://www.pling.com/)
155. [Pling](https://www.pling.com/) 155. [Plug.DJ](https://plug.dj/)
156. [Plug.DJ](https://plug.dj/) 156. [Pokemon Showdown](https://pokemonshowdown.com)
157. [Pokemon Showdown](https://pokemonshowdown.com) 157. [PokerStrategy](http://www.pokerstrategy.net)
158. [PokerStrategy](http://www.pokerstrategy.net) 158. [Polarsteps](https://polarsteps.com/)
159. [Polygon](https://www.polygon.com/) 159. [Polygon](https://www.polygon.com/)
160. [ProductHunt](https://www.producthunt.com/) 160. [ProductHunt](https://www.producthunt.com/)
161. [PromoDJ](http://promodj.com/) 161. [PromoDJ](http://promodj.com/)
162. [Quora](https://www.quora.com/) 162. [Quora](https://www.quora.com/)
163. [Raidforums](https://raidforums.com/) 163. [Rajce.net](https://www.rajce.idnes.cz/)
164. [Rajce.net](https://www.rajce.idnes.cz/) 164. [Rate Your Music](https://rateyourmusic.com/)
165. [Rate Your Music](https://rateyourmusic.com/) 165. [Realmeye](https://www.realmeye.com/)
166. [Realmeye](https://www.realmeye.com/) 166. [Redbubble](https://www.redbubble.com/)
167. [Redbubble](https://www.redbubble.com/) 167. [Reddit](https://www.reddit.com/)
168. [Reddit](https://www.reddit.com/) 168. [Repl.it](https://repl.it/)
169. [Redsun.tf](https://redsun.tf/) 169. [ResearchGate](https://www.researchgate.net/)
170. [Repl.it](https://repl.it/) 170. [ReverbNation](https://www.reverbnation.com/)
171. [ResearchGate](https://www.researchgate.net/) 171. [Roblox](https://www.roblox.com/)
172. [ReverbNation](https://www.reverbnation.com/) 172. [RubyGems](https://rubygems.org/)
173. [Roblox](https://www.roblox.com/) 173. [Sbazar.cz](https://www.sbazar.cz/)
174. [RubyGems](https://rubygems.org/) 174. [Scratch](https://scratch.mit.edu/)
175. [Sbazar.cz](https://www.sbazar.cz/) 175. [Scribd](https://www.scribd.com/)
176. [Scratch](https://scratch.mit.edu/) 176. [ShitpostBot5000](https://www.shitpostbot.com/)
177. [Scribd](https://www.scribd.com/) 177. [Signal](https://community.signalusers.org)
178. [ShitpostBot5000](https://www.shitpostbot.com/) 178. [Slack](https://slack.com)
179. [Signal](https://community.signalusers.org) 179. [Slashdot](https://slashdot.org)
180. [Slack](https://slack.com) 180. [SlideShare](https://slideshare.net/)
181. [SlideShare](https://slideshare.net/) 181. [Smashcast](https://www.smashcast.tv/)
182. [Smashcast](https://www.smashcast.tv/) 182. [Smule](https://www.smule.com/)
183. [Smule](https://www.smule.com/) 183. [SoundCloud](https://soundcloud.com/)
184. [SoundCloud](https://soundcloud.com/) 184. [SourceForge](https://sourceforge.net/)
185. [SourceForge](https://sourceforge.net/) 185. [SoylentNews](https://soylentnews.org)
186. [Speedrun.com](https://speedrun.com/) 186. [SparkPeople](https://www.sparkpeople.com)
187. [Splits.io](https://splits.io) 187. [Speedrun.com](https://speedrun.com/)
188. [Sporcle](https://www.sporcle.com/) 188. [Splits.io](https://splits.io)
189. [SportsRU](https://www.sports.ru/) 189. [Sporcle](https://www.sporcle.com/)
190. [Spotify](https://open.spotify.com/) 190. [SportsRU](https://www.sports.ru/)
191. [Star Citizen](https://robertsspaceindustries.com/) 191. [Spotify](https://open.spotify.com/)
192. [Steam](https://steamcommunity.com/) 192. [Star Citizen](https://robertsspaceindustries.com/)
193. [SteamGroup](https://steamcommunity.com/) 193. [Steam](https://steamcommunity.com/)
194. [Steamid](https://steamid.uk/) 194. [SteamGroup](https://steamcommunity.com/)
195. [SublimeForum](https://forum.sublimetext.com/) 195. [Steamid](https://steamid.uk/)
196. [T-MobileSupport](https://support.t-mobile.com) 196. [Strava](https://www.strava.com/)
197. [TamTam](https://tamtam.chat/) 197. [SublimeForum](https://forum.sublimetext.com/)
198. [Taringa](https://taringa.net/) 198. [Taringa](https://taringa.net/)
199. [Tellonym.me](https://tellonym.me/) 199. [Telegram](https://t.me/)
200. [TikTok](https://www.tiktok.com/) 200. [Tellonym.me](https://tellonym.me/)
201. [Tinder](https://tinder.com/) 201. [Tinder](https://tinder.com/)
202. [TrackmaniaLadder](http://en.tm-ladder.com/index.php) 202. [TrackmaniaLadder](http://en.tm-ladder.com/index.php)
203. [TradingView](https://www.tradingview.com/) 203. [TradingView](https://www.tradingview.com/)
204. [Trakt](https://www.trakt.tv/) 204. [Trakt](https://www.trakt.tv/)
205. [TrashboxRU](https://trashbox.ru/) 205. [TrashboxRU](https://trashbox.ru/)
206. [Trello](https://trello.com/) 206. [Travellerspoint](https://www.travellerspoint.com)
207. [TripAdvisor](https://tripadvisor.com/) 207. [Trello](https://trello.com/)
208. [Twitch](https://www.twitch.tv/) 208. [TripAdvisor](https://tripadvisor.com/)
209. [Twitter](https://www.twitter.com/) 209. [TryHackMe](https://tryhackme.com/)
210. [Typeracer](https://typeracer.com) 210. [Twitch](https://www.twitch.tv/)
211. [Ultimate-Guitar](https://ultimate-guitar.com/) 211. [Twitter](https://mobile.twitter.com/)
212. [Unsplash](https://unsplash.com/) 212. [Typeracer](https://typeracer.com)
213. [VK](https://vk.com/) 213. [Ultimate-Guitar](https://ultimate-guitar.com/)
214. [VSCO](https://vsco.co/) 214. [Unsplash](https://unsplash.com/)
215. [Velomania](https://forum.velomania.ru/) 215. [VK](https://vk.com/)
216. [Venmo](https://venmo.com/) 216. [VSCO](https://vsco.co/)
217. [Viadeo](http://fr.viadeo.com/en/) 217. [Velomania](https://forum.velomania.ru/)
218. [Vimeo](https://vimeo.com/) 218. [Venmo](https://venmo.com/)
219. [Virgool](https://virgool.io/) 219. [Vimeo](https://vimeo.com/)
220. [VirusTotal](https://www.virustotal.com/) 220. [Virgool](https://virgool.io/)
221. [Wattpad](https://www.wattpad.com/) 221. [VirusTotal](https://www.virustotal.com/)
222. [We Heart It](https://weheartit.com/) 222. [Warrior Forum](https://www.warriorforum.com/)
223. [WebNode](https://www.webnode.cz/) 223. [Wattpad](https://www.wattpad.com/)
224. [Whonix Forum](https://forums.whonix.org/) 224. [We Heart It](https://weheartit.com/)
225. [Wikidot](http://www.wikidot.com/) 225. [WebNode](https://www.webnode.cz/)
226. [Wikipedia](https://www.wikipedia.org/) 226. [Whonix Forum](https://forums.whonix.org/)
227. [Wix](https://wix.com/) 227. [Wikidot](http://www.wikidot.com/)
228. [WordPress](https://wordpress.com) 228. [Wikipedia](https://www.wikipedia.org/)
229. [WordPressOrg](https://wordpress.org/) 229. [Windy](https://windy.com/)
230. [YandexCollection](https://yandex.ru/collections/) 230. [Wix](https://wix.com/)
231. [YouNow](https://www.younow.com/) 231. [WordPress](https://wordpress.com)
232. [YouPic](https://youpic.com/) 232. [WordPressOrg](https://wordpress.org/)
233. [YouTube](https://www.youtube.com/) 233. [Xbox Gamertag](https://xboxgamertag.com/)
234. [Zhihu](https://www.zhihu.com/) 234. [YouNow](https://www.younow.com/)
235. [Zomato](https://www.zomato.com/) 235. [YouPic](https://youpic.com/)
236. [akniga](https://akniga.org/profile/blue/) 236. [YouTube](https://www.youtube.com/)
237. [allmylinks](https://allmylinks.com/) 237. [Zhihu](https://www.zhihu.com/)
238. [authorSTREAM](http://www.authorstream.com/) 238. [akniga](https://akniga.org/profile/blue/)
239. [babyRU](https://www.baby.ru/) 239. [allmylinks](https://allmylinks.com/)
240. [babyblogRU](https://www.babyblog.ru/) 240. [aminoapp](https://aminoapps.com/)
241. [chaos.social](https://chaos.social/) 241. [authorSTREAM](http://www.authorstream.com/)
242. [couchsurfing](https://www.couchsurfing.com/) 242. [babyRU](https://www.baby.ru/)
243. [d3RU](https://d3.ru/) 243. [babyblogRU](https://www.babyblog.ru/)
244. [dailykos](https://www.dailykos.com) 244. [chaos.social](https://chaos.social/)
245. [datingRU](http://dating.ru) 245. [couchsurfing](https://www.couchsurfing.com/)
246. [devRant](https://devrant.com/) 246. [d3RU](https://d3.ru/)
247. [drive2](https://www.drive2.ru/) 247. [dailykos](https://www.dailykos.com)
248. [eGPU](https://egpu.io/) 248. [datingRU](http://dating.ru)
249. [easyen](https://easyen.ru/) 249. [devRant](https://devrant.com/)
250. [eintracht](https://eintracht.de) 250. [drive2](https://www.drive2.ru/)
251. [fixya](https://www.fixya.com) 251. [eGPU](https://egpu.io/)
252. [fl](https://www.fl.ru/) 252. [eintracht](https://eintracht.de)
253. [forum_guns](https://forum.guns.ru/) 253. [fixya](https://www.fixya.com)
254. [forumhouseRU](https://www.forumhouse.ru/) 254. [fl](https://www.fl.ru/)
255. [geocaching](https://www.geocaching.com/) 255. [forum_guns](https://forum.guns.ru/)
256. [gfycat](https://gfycat.com/) 256. [forumhouseRU](https://www.forumhouse.ru/)
257. [gpodder.net](https://gpodder.net/) 257. [geocaching](https://www.geocaching.com/)
258. [habr](https://habr.com/) 258. [gfycat](https://gfycat.com/)
259. [hackster](https://www.hackster.io) 259. [habr](https://habr.com/)
260. [hunting](https://www.hunting.ru/forum/) 260. [hackster](https://www.hackster.io)
261. [iMGSRC.RU](https://imgsrc.ru/) 261. [hunting](https://www.hunting.ru/forum/)
262. [igromania](http://forum.igromania.ru/) 262. [iMGSRC.RU](https://imgsrc.ru/)
263. [interpals](https://www.interpals.net/) 263. [igromania](http://forum.igromania.ru/)
264. [irecommend](https://irecommend.ru/) 264. [interpals](https://www.interpals.net/)
265. [jeuxvideo](http://www.jeuxvideo.com) 265. [irecommend](https://irecommend.ru/)
266. [kwork](https://www.kwork.ru/) 266. [jeuxvideo](http://www.jeuxvideo.com)
267. [labpentestit](https://lab.pentestit.ru/) 267. [kofi](https://ko-fi.com)
268. [last.fm](https://last.fm/) 268. [kwork](https://www.kwork.ru/)
269. [leasehackr](https://forum.leasehackr.com/) 269. [labpentestit](https://lab.pentestit.ru/)
270. [livelib](https://www.livelib.ru/) 270. [last.fm](https://last.fm/)
271. [mastodon.cloud](https://mastodon.cloud/) 271. [leasehackr](https://forum.leasehackr.com/)
272. [mastodon.social](https://chaos.social/) 272. [livelib](https://www.livelib.ru/)
273. [mastodon.technology](https://mastodon.xyz/) 273. [mastodon.cloud](https://mastodon.cloud/)
274. [mastodon.xyz](https://mastodon.xyz/) 274. [mastodon.social](https://chaos.social/)
275. [metacritic](https://www.metacritic.com/) 275. [mastodon.technology](https://mastodon.xyz/)
276. [mixer.com](https://mixer.com/) 276. [mastodon.xyz](https://mastodon.xyz/)
277. [moikrug](https://moikrug.ru/) 277. [mercadolivre](https://www.mercadolivre.com.br)
278. [mstdn.io](https://mstdn.io/) 278. [metacritic](https://www.metacritic.com/)
279. [nightbot](https://nightbot.tv/) 279. [moikrug](https://moikrug.ru/)
280. [nnRU](https://https://www.nn.ru/) 280. [mstdn.io](https://mstdn.io/)
281. [notabug.org](https://notabug.org/) 281. [nairaland.com](https://www.nairaland.com/)
282. [note](https://note.com/) 282. [nnRU](https://https://www.nn.ru/)
283. [opennet](https://www.opennet.ru/) 283. [note](https://note.com/)
284. [opensource](https://opensource.com/) 284. [opennet](https://www.opennet.ru/)
285. [osu!](https://osu.ppy.sh/) 285. [osu!](https://osu.ppy.sh/)
286. [pedsovet](http://pedsovet.su/) 286. [phpRU](https://php.ru/forum/)
287. [phpRU](https://php.ru/forum/) 287. [pikabu](https://pikabu.ru/)
288. [pikabu](https://pikabu.ru/) 288. [pr0gramm](https://pr0gramm.com/)
289. [pr0gramm](https://pr0gramm.com/) 289. [prog.hu](https://prog.hu/)
290. [pvpru](https://pvpru.com/) 290. [radio_echo_msk](https://echo.msk.ru/)
291. [radio_echo_msk](https://echo.msk.ru/) 291. [satsisRU](https://satsis.info/)
292. [radioskot](https://radioskot.ru/) 292. [social.tchncs.de](https://social.tchncs.de/)
293. [satsisRU](https://satsis.info/) 293. [spletnik](https://spletnik.ru/)
294. [segmentfault](https://segmentfault.com/) 294. [svidbook](https://www.svidbook.ru/)
295. [social.tchncs.de](https://social.tchncs.de/) 295. [toster](https://www.toster.ru/)
296. [soylentnews](https://soylentnews.org) 296. [tracr.co](https://tracr.co/)
297. [sparkpeople](https://www.sparkpeople.com) 297. [uid](https://uid.me/)
298. [spletnik](https://spletnik.ru/) 298. [4pda](https://4pda.ru/)
299. [svidbook](https://www.svidbook.ru/)
300. [toster](https://www.toster.ru/)
301. [tracr.co](https://tracr.co/)
302. [travellerspoint](https://www.travellerspoint.com)
303. [uid](https://uid.me/)
304. [warriorforum](https://www.warriorforum.com/)
305. [windy](https://windy.com/)
Alexa.com rank data fetched at (2020-05-09 04:38:38.544762 UTC)

Loading…
Cancel
Save