-
Notifications
You must be signed in to change notification settings - Fork 48
/
dorkScanner.py
128 lines (102 loc) · 5.01 KB
/
dorkScanner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
import requests
import argparse
from functools import partial
from multiprocessing import Pool
from bs4 import BeautifulSoup as bsoup
GREEN, RED = '\033[1;32m', '\033[91m'
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-q', '--query', dest='query', help='Specify the Search Query within \'\'')
parser.add_argument('-e', '--engine', dest='engine', help='Specify the Search Engine (Google/Bing)')
parser.add_argument('-p', '--pages', dest='pages', help='Specify the Number of Pages (Default: 1)')
parser.add_argument('-P', '--processes', dest='processes', help='Specify the Number of Processes (Default: 2)')
options = parser.parse_args()
return options
def google_search(query, page):
base_url = 'https://www.google.com/search'
headers = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:71.0) Gecko/20100101 Firefox/71.0' }
params = { 'q': query, 'start': page * 10 }
resp = requests.get(base_url, params=params, headers=headers)
soup = bsoup(resp.text, 'html.parser')
links = soup.findAll("div", { "class" : "yuRUbf" })
result = []
for link in links:
result.append(link.find('a').get('href'))
return result
def bing_search(query, page):
base_url = 'https://www.bing.com/search'
headers = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:71.0) Gecko/20100101 Firefox/71.0' }
params = { 'q': query, 'first': page * 10 + 1 }
resp = requests.get(base_url, params=params, headers=headers)
soup = bsoup(resp.text, 'html.parser')
links = soup.findAll('cite')
result = []
for link in links:
result.append(link.text)
return result
def search_result(q, engine, pages, processes, result):
print('-' * 70)
print(f'Searching for: {q} in {pages} page(s) of {engine} with {processes} processes')
print('-' * 70)
print()
counter = 0
for range in result:
for r in range:
print('[+] ' + r)
counter += 1
print()
print('-' * 70)
print(f'Number of urls: {counter}')
print('-' * 70)
options = get_arguments()
banner = '''
██████╗░░█████╗░██████╗░██╗░░██╗ ░██████╗░█████╗░░█████╗░███╗░░██╗███╗░░██╗███████╗██████╗░
██╔══██╗██╔══██╗██╔══██╗██║░██╔╝ ██╔════╝██╔══██╗██╔══██╗████╗░██║████╗░██║██╔════╝██╔══██╗
██║░░██║██║░░██║██████╔╝█████═╝░ ╚█████╗░██║░░╚═╝███████║██╔██╗██║██╔██╗██║█████╗░░██████╔╝
██║░░██║██║░░██║██╔══██╗██╔═██╗░ ░╚═══██╗██║░░██╗██╔══██║██║╚████║██║╚████║██╔══╝░░██╔══██╗
██████╔╝╚█████╔╝██║░░██║██║░╚██╗ ██████╔╝╚█████╔╝██║░░██║██║░╚███║██║░╚███║███████╗██║░░██║
╚═════╝░░╚════╝░╚═╝░░╚═╝╚═╝░░╚═╝ ╚═════╝░░╚════╝░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝░░╚══╝╚══════╝╚═╝░░╚═╝
Made By: Madhav Mehndiratta (github.com/madhavmehndiratta)
'''
def main():
print()
if not options.query:
query = input('[?] Enter the Search Query: ')
else:
query = options.query
if not options.engine:
engine = input('[?] Choose the Search Engine (Google/Bing): ')
else:
engine = options.engine
if engine.lower() == 'google':
target = partial(google_search, query)
elif engine.lower() == 'bing':
target = partial(bing_search, query)
else:
print('[-] Invalid Option Entered!...Exiting the Program....')
exit()
if not options.pages:
pages = 1
else:
pages = options.pages
if not options.processes:
processes = 2
else:
processes = options.processes
with Pool(int(processes)) as p:
result = p.map(target, range(int(pages)))
search_result(query, engine, pages, processes, result)
print(GREEN + banner)
try:
main()
while True:
if options.query and options.engine:
exit()
else:
main()
except KeyboardInterrupt:
print('\nThanks For using!')
exit()
except TimeoutError:
print(RED + '\n[-] Too many requests, please try again later....')
exit()