-
Notifications
You must be signed in to change notification settings - Fork 53
/
Hx_config.py
161 lines (131 loc) · 4.72 KB
/
Hx_config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
import os
import queue
import shutil
import json
import random
'''
配置文件:
Root_Path路径信息 项目根目录的绝对路径
'''
Root_Path = os.path.dirname(os.path.abspath(__file__))
'''
Server酱your_key
'''
SERVERKEY = ''
'''
ARL的api配置和网络路径
'''
API_KEY = 'write_down_your_token' # 在 ARL/docker/config-docker.yaml里修改
arl_url_Path = 'https://127.0.0.1:5004'
'''
xray以后的参数
'''
plugins = None
'''
工具所在路径:
chrome路径信息 Chrome_Path 例如:C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe
Xray路径信息 Xray_Path 例如:D:\\Xray\\xray.exe
crawlergo 可执行文件的所在的位置 例如:C:\\Users\\Cl0udG0d\\Desktop\\sec_tools\\crawlergo\\crawlergo.exe
OneForAll 文件夹所在的位置 例如:C:\\Users\\Cl0udG0d\\Desktop\\OneForAll-master\\
subDomainsBrute 文件夹所在位置
'''
Chrome_Path = 'D:\\HxnineTails_Script\\chrome-win\\chrome.exe'
Xray_Path = 'D:\\HxnineTails_Script\\xray\\xray.exe'
crawlergo_Path = 'D:\\HxnineTails_Script\\crawlergo.exe'
OneForAll_Path = 'D:\\HxnineTails_Script\\OneForAll-master\\'
subDomainsBrute_Path = 'D:\\HxnineTails_Script\\subDomainsBrute-master\\'
subfinder_Path = 'D:\\HxnineTails_Script\\subfinder_2.4.5_windows_amd64\\'
# Chrome_Path='D:\\HxnineTails_Script\\chrome-win\\chrome.exe'
# Xray_Path='D:\\HxnineTails_Script\\xray\\xray.exe'
# crawlergo_Path='D:\\HxnineTails_Script\\crawlergo.exe'
# OneForAll_Path='D:\\HxnineTails_Script\\OneForAll-master\\'
# subDomainsBrute_Path='D:\\HxnineTails_Script\\subDomainsBrute-master\\'
# subfinder_Path='D:\\HxnineTails_Script\\subfinder_2.4.5_windows_amd64\\'
'''
一些全局通用配置:
portlist C段探测时的默认端口 默认为:80,8080,8000,8081,8001
sub_queue 存储子域名的队列,用于保存所有工具获取到的子域名
'''
portlist = ['80', '8080', '8000', '8081', '8001']
blacklist = ["spider", "org"]
ThreadNum = 5
PYTHON = "py -3.8"
'''
输出报告路径:
报告全部输出在save文件夹下
Xray_report_path Xray扫描后的输出html报告
CScan_report_path C段扫描后的输出txt文件
'''
Save_path = "{}\\save".format(Root_Path)
Xray_report_path = "{}\\save\\saveXray".format(Root_Path)
Xray_temp_report_path = '{}\\save\\saveTempXray'.format(Root_Path)
CScan_report_path = "{}\\save\\saveCplus\\".format(Root_Path)
Sub_report_path = "{}\\save\\saveSub\\".format(Root_Path)
Temp_path = "{}\\save\\temp\\".format(Root_Path)
JS_report_path = '{}\\save\\saveJS\\'.format(Root_Path)
ARL_save_path = '{}\\save\\saveARL\\'.format(Root_Path)
Crawlergo_save_path = '{}\\save\\saveCrawlergo\\'.format(Root_Path)
'''
全局队列
'''
sub_queue = queue.Queue()
target_queue = queue.Queue()
xray_queue = queue.Queue()
ppp_queue = queue.Queue()
'''
GetHeaders()函数
使用fake-useragent函数
返回一个随机生成的请求头,防止因为python自带的请求头而被屏蔽
'''
def GetHeaders():
try:
with open('Useragent.json', 'r') as f:
data = json.load(f)
data_browsers =data['browsers']
data_randomize = list(data['randomize'].values())
browser = random.choice(data_randomize)
headers = {'User-Agent': random.choice(data_browsers[browser])}
return headers
except Exception as e:
exit("[*]Hx_config.py : GetHeaders error!")
'''
颜色配置
'''
yellow = '\033[01;33m'
white = '\033[01;37m'
green = '\033[01;32m'
blue = '\033[01;34m'
red = '\033[1;31m'
end = '\033[0m'
def logo():
print(f'''
{blue}花溪九尾,懒人必备web扫描器!{green}
+-+-+-+-+-+-+-+-+-+-+-+
|{red}H|{yellow}X|n|{green}i|n|e|T|a|{yellow}i|l|{red}s|
{green}+-+-+-+-+-+-+-+-+-+-+-+{white}
v1.1{end}
''')
'''
递归删除临时保存目录下的保存信息
'''
def delModel():
saveFolderList = ['saveCplus', 'saveJS', 'saveSub', 'saveXray', 'saveARL', 'temp', 'saveTempXray', 'saveCrawlergo']
for tempFolder in saveFolderList:
shutil.rmtree("{}\\save\\{}".format(Root_Path, tempFolder))
os.mkdir("{}\\save\\{}".format(Root_Path, tempFolder))
print(f"{green}clean end :){end}")
return
'''
帮助信息
'''
def scanHelp():
print(
f'{green}scan.py [options]\n\t-a --attone <attack one url> example: scan.py -a https://www.baidu.com\n\t-s --attsrc <attack one src> example:{end}'
f'{green}scan.py -s baidu.com\n\t-d --attdetail <attack one src detail> example: scan.py -d baidu.com\n\t-r --readppp <readfilename attack>{end}'
f'{green}example: scan.py -r target.txt\n\t-t --thread number of threads\n\t-c --clean clean saveFiles\n\t-h --help output help information\n{end}'
)
def main():
print(Root_Path)
return
if __name__ == '__main__':
main()