forked from intchloe/don-t-touch-my-page-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpage.py
103 lines (84 loc) · 2.86 KB
/
page.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import socket
import requests
import socks
import stem.process
import argparse
import os.path
import tempfile
from difflib import SequenceMatcher
from stem.descriptor.remote import DescriptorDownloader
from stem.util import term
SOCKS_PORT = 1339
TIMEOUT = 15
argparse = argparse.ArgumentParser()
argparse.add_argument("-u", "--url", dest="url", help="URL to be checked")
args = argparse.parse_args()
global url
global file
url = args.url
if url is None:
url = "http://www.reddit.com/"
print("URL was not specified, defaulting to " + url)
if url.startswith("https://"):
print(term.format("Detected HTTPS connection, should be plaintext (HTTP)", term.Color.RED))
def get_clean():
global r1
socket.socket=temp
r1 = requests.get(url)
def del_fp():
if os.path.isfile("fp.txt"):
print("fp.txt exists. Deleting...")
os.remove("fp.txt")
def get_fps():
print("Downloadning fresh fingerprints...")
downloader = DescriptorDownloader(
use_mirrors = True,
timeout = 20,
)
query = downloader.get_server_descriptors()
for desc in query:
if desc.exit_policy.is_exiting_allowed():
fp = open("fp.txt", "a")
fp.write('{}\n'.format(desc.fingerprint))
fp.close()
del_fp()
get_fps()
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)
temp = socket.socket
socket.socket = socks.socksocket
def main():
file = open("fp.txt", mode="r")
for line in file.readlines():
line = line.strip()
tor_process = None
try:
tor_process = stem.process.launch_tor_with_config(
config = {
'SocksPort': str(SOCKS_PORT),
'ExitNodes': str(line),
"DataDirectory": tempfile.gettempdir() + os.pathsep + str(SOCKS_PORT)
}, timeout=TIMEOUT)
r2 = requests.get(url)
tor_process.kill()
get_clean()
#Bor jag jamfora GZIP eller klartext?
#if r2.headers['Content-Encoding'] == "gzip":
#is_gzip()
m = SequenceMatcher(None, r2.content, r1.content)
ratio = m.ratio()
ratio *= 100
if ratio == 100:
print(term.format("100% matching for node " + line, term.Color.GREEN))
else:
print(term.format("Ratio " + str(ratio) + " not matching fully for node " + line, term.Color.RED))
f = open(str(line).strip('\n'), 'wb')
f.write(r2.content)
f.close()
except Exception as e:
print(term.format("Error: " + str(e) + " for " + line, term.Color.YELLOW))
if not tor_process is None:
tor_process.kill()
if not tor_process is None:
tor_process.kill()
if __name__ == "__main__":
main()