|
|
@ -1,26 +1,29 @@
|
|
|
|
"""Sherlock: Supported Site Listing
|
|
|
|
"""Sherlock: Supported Site Listing
|
|
|
|
|
|
|
|
|
|
|
|
This module generates the listing of supported sites.
|
|
|
|
This module generates the listing of supported sites.
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
import json
|
|
|
|
import json
|
|
|
|
import sys
|
|
|
|
import sys
|
|
|
|
import requests
|
|
|
|
import requests
|
|
|
|
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
|
|
|
import threading
|
|
|
|
from bs4 import BeautifulSoup as bs
|
|
|
|
from bs4 import BeautifulSoup as bs
|
|
|
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
|
|
|
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pool = list()
|
|
|
|
|
|
|
|
|
|
|
|
def get_rank(domain_to_query):
|
|
|
|
def get_rank(domain_to_query, dest):
|
|
|
|
result = -1
|
|
|
|
result = -1
|
|
|
|
url = "http://www.alexa.com/siteinfo/" + domain_to_query
|
|
|
|
url = "http://www.alexa.com/siteinfo/" + domain_to_query
|
|
|
|
page = requests.get(url).text
|
|
|
|
page = requests.get(url).text
|
|
|
|
soup = bs(page, features="lxml")
|
|
|
|
soup = bs(page, features="lxml")
|
|
|
|
for span in soup.find_all('span'):
|
|
|
|
for span in soup.find_all('span'):
|
|
|
|
if span.has_attr("class"):
|
|
|
|
if span.has_attr("class"):
|
|
|
|
if "globleRank" in span["class"]:
|
|
|
|
if "globleRank" in span["class"]:
|
|
|
|
for strong in span.find_all("strong"):
|
|
|
|
for strong in span.find_all("strong"):
|
|
|
|
if strong.has_attr("class"):
|
|
|
|
if strong.has_attr("class"):
|
|
|
|
if "metrics-data" in strong["class"]:
|
|
|
|
if "metrics-data" in strong["class"]:
|
|
|
|
result = int(strong.text.strip().replace(',', ''))
|
|
|
|
result = int(strong.text.strip().replace(',', ''))
|
|
|
|
return result
|
|
|
|
dest['rank'] = result
|
|
|
|
|
|
|
|
|
|
|
|
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter
|
|
|
|
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter
|
|
|
|
)
|
|
|
|
)
|
|
|
@ -31,26 +34,38 @@ parser.add_argument("--rank","-r",
|
|
|
|
args = parser.parse_args()
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
with open("data.json", "r", encoding="utf-8") as data_file:
|
|
|
|
with open("data.json", "r", encoding="utf-8") as data_file:
|
|
|
|
data = json.load(data_file)
|
|
|
|
data = json.load(data_file)
|
|
|
|
|
|
|
|
|
|
|
|
with open("sites.md", "w") as site_file:
|
|
|
|
with open("sites.md", "w") as site_file:
|
|
|
|
data_length = len(data)
|
|
|
|
data_length = len(data)
|
|
|
|
site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n')
|
|
|
|
site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n')
|
|
|
|
|
|
|
|
|
|
|
|
index = 1
|
|
|
|
|
|
|
|
for social_network in data:
|
|
|
|
for social_network in data:
|
|
|
|
url_main = data.get(social_network).get("urlMain")
|
|
|
|
url_main = data.get(social_network).get("urlMain")
|
|
|
|
|
|
|
|
data.get(social_network)["rank"] = 0
|
|
|
|
|
|
|
|
if args.rank:
|
|
|
|
|
|
|
|
th = threading.Thread(target=get_rank, args=(url_main, data.get(social_network)))
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
th = None
|
|
|
|
|
|
|
|
pool.append((url_main, url_main, th))
|
|
|
|
|
|
|
|
if args.rank:
|
|
|
|
|
|
|
|
th.start()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
index = 1
|
|
|
|
|
|
|
|
for social_network, url_main, th in pool:
|
|
|
|
|
|
|
|
if args.rank:
|
|
|
|
|
|
|
|
th.join()
|
|
|
|
site_file.write(f'{index}. [{social_network}]({url_main})\n')
|
|
|
|
site_file.write(f'{index}. [{social_network}]({url_main})\n')
|
|
|
|
if args.rank == True:
|
|
|
|
sys.stdout.write("\r{0}".format(f"Updated {index} out of {data_length} entries"))
|
|
|
|
data.get(social_network)["rank"] = get_rank(url_main)
|
|
|
|
sys.stdout.flush()
|
|
|
|
sys.stdout.write("\r{0}".format(f"Updated {index} out of {data_length} entries"))
|
|
|
|
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
index = index + 1
|
|
|
|
index = index + 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if args.rank:
|
|
|
|
|
|
|
|
site_file.write(f'\nAlexa.com rank data fetched at ({datetime.utcnow()} UTC)\n')
|
|
|
|
|
|
|
|
|
|
|
|
sorted_json_data = json.dumps(data, indent=2, sort_keys=True)
|
|
|
|
sorted_json_data = json.dumps(data, indent=2, sort_keys=True)
|
|
|
|
|
|
|
|
|
|
|
|
with open("data.json", "w") as data_file:
|
|
|
|
with open("data.json", "w") as data_file:
|
|
|
|
data_file.write(sorted_json_data)
|
|
|
|
data_file.write(sorted_json_data)
|
|
|
|
|
|
|
|
|
|
|
|
sys.stdout.write("\r{0}".format(f"Finished updating supported site listing!\n"))
|
|
|
|
print("\nFinished updating supported site listing!")
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|