-
Notifications
You must be signed in to change notification settings - Fork 0
/
check.py
55 lines (44 loc) · 1.29 KB
/
check.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import random
from urllib.parse import urlparse
#import requests
filenames = ["a.csv", "b.csv", "c.csv"]
global_list = set()
for team in filenames:
f = open(team)
data = f.read().split("\n")
data = [entry.strip('"') for entry in data]
netlocs = set()
twitter_links = 0
facebook_links = 0
for url in data:
if not url.startswith("http://") and not url.startswith("https://"):
url = "http://" + url
try:
parsed = urlparse(url)
except:
continue
if parsed.netloc == "facebook.com":
facebook_links += 1
if parsed.netloc == "twitter.com":
twitter_links += 1
netlocs.add(parsed.netloc)
#print(parsed)
global_list.add(parsed.netloc)
print(team)
print("sum:", len(netlocs))
print("twitter_links:", twitter_links)
print("facebook_links:", facebook_links)
#count_get = 0
#for url in random.sample(data, 20):
# if not url.startswith("http://") and not url.startswith("https://"):
# url = "http://" + url
# try:
# r = requests.get(url)
# count_get += 1
# except:
# continue
#print("count get:", count_get)
print()
#print()
#print("count global:", len(global_list))
#print()