-
Notifications
You must be signed in to change notification settings - Fork 5
/
TunFeatExtrARFFAllperPcap.py
243 lines (207 loc) · 13.1 KB
/
TunFeatExtrARFFAllperPcap.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
from scapy.all import *
from PcapFeatures import PcapFeatures
from CapLibrary import CapLibrary
import logging
import errno
# import csv
import json
import arff
class TunnelFeatureExtractorJSON(object):
def __init__(self):
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
# self.logger.setLevel(logging.INFO)
# self.logger.setLevel(logging.WARNING)
self.logger.debug("Testing debug message")
#print("Passed logging message")
self.capLib = CapLibrary()
def make_sure_path_exists(self, path):
try:
os.makedirs(path)
print("Path Created: ", path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def test_feature_extraction(self):
# # Either: ==> Test first pcap
path_list = self.capLib.get_paths_from_specific_lib_in_pcap_base('HTTPovDNS')
self.logger.debug('First Path: %s ' % str(path_list[0]).strip())
pcap_feat = PcapFeatures(str(path_list[2]).strip(), 'HTTP')
lens_seq = pcap_feat.getDnsReqLens()
self.logger.debug("Packet Length List-len: %i" % len(lens_seq))
self.logger.debug("First Pkt Length: %i" % lens_seq[0])
self.logger.debug("Second Pkt Length: %i" % lens_seq[1])
#pcap_feat.test_pkt_Reader()
pcap_feat.doPlot(lens_seq, 'red', 'DNS Req Entropy', 'Pkt #', 'Entropy')
# # or: ==> Test multiple pcaps
# for count, single_file_path in enumerate(self.capLib.get_paths_from_specific_lib_in_pcap_base('HTTPovDNS')):
# self.logger.debug("Pcap File Path #: %i" % count)
# pcap_feat = PcapFeatures(single_file_path, 'HTTPovDNS')
# lens_seq = pcap_feat.getDnsReqLens()
#
# self.logger.debug("Req Len seq len: %i" % len(lens_seq))
#
# pcap_feat.doPlot(lens_seq, 'r', 'DNS Req Entropy', 'Pkt #', 'Entropy')
def get_feature_vectors_and_write_to_file(self, protoLabel, featureName):
# Check if directory exists (i.e. feature_base, and sub directory of HTTPovDNS / FTPovDNS)
self.make_sure_path_exists("feature_base/JSON/" + protoLabel+ "/" + featureName)
self.make_sure_path_exists("feature_base/ARFF/" + protoLabel + "/" + featureName)
# curr_feature_filename = ""
# # Check if file exists
# if featureName == "DNS-Req-Lens":
# curr_feature_filename = "DNS_Layer_Req_Lengths.csv"
# elif featureName == "IP-Req-Lens":
# curr_feature_filename = "IP_Layer_Req_Lengths.csv"
# elif featureName == "DNS-Req-Qnames":
# curr_feature_filename = "DNS_Layer_Req_Query_names.csv"
# curr_feature_filePath = "feature_base/JSON/" + protoLabel + "/" + curr_feature_filename
# curr_feature_filePath = "feature_base/JSON/" + protoLabel + "/" + featureName + '/' + featureName + '.json'
curr_pcap_file_name = 'Not yet set.'
try:
feature_vect_list = None
json_obj_list = []
for count, single_file_path in enumerate(self.capLib.get_paths_from_specific_lib_in_pcap_base(protoLabel)):
self.logger.debug('-----------------------------')
self.logger.debug("Pcap File Path #: %i" % count)
curr_pcap_file_name = str(single_file_path).rsplit('/', 1)[1].strip()
self.logger.debug("Current PCAP File name: %s" % curr_pcap_file_name)
curr_feature_filePath = "feature_base/JSON/" + protoLabel + "/" + featureName + '/' + curr_pcap_file_name + '.json'
curr_feature_ARFF_filePath = "feature_base/ARFF/" + protoLabel + "/" + featureName + '/' + curr_pcap_file_name + '.arff'
pcap_feat = PcapFeatures(single_file_path, protoLabel)
feature_dict_list = []
#feature_dict = {}
#Choose the Feature to be extracted
# Get all features into one file
# if featureName == "All":
# DNS_Req_Len_vect_list = pcap_feat.getDnsReqLens()
# IP_Req_Len_vect_list = pcap_feat.get_ip_pkt_lengths()
# # Get only Specific Feature
if featureName == "DNS-Req-Lens" or featureName == "All":
feature_vect_list = pcap_feat.getDnsReqLens()
self.logger.debug("DNS-Req-Lens #: %i" % len(feature_vect_list))
feature_dict_list.append({'feature_name': "DNS-Req-Lens", 'values': feature_vect_list})
if featureName == "IP-Req-Lens" or featureName == "All":
#feature_vect_list = pcap_feat.test_pkt_Reader()
feature_vect_list = pcap_feat.get_ip_pkt_lengths()
self.logger.debug("IP-Req-Lens #: %i" % len(feature_vect_list))
feature_dict_list.append({'feature_name': "IP-Req-Lens", 'values': feature_vect_list})
if featureName == "DNS-Req-Qnames-Enc-Comp-Hex" or featureName == "All":
feature_vect_list = pcap_feat.getDnsReqQnames_upstream()
self.logger.debug("DNS-Req-Qnames-Enc-Comp-Hex #: %i" % len(feature_vect_list))
feature_dict_list.append({'feature_name': "DNS-Req-Qnames-Enc-Comp-Hex", 'values': feature_vect_list})
# HTTP Related Features
if featureName == "HTTP-Req-Bytes-Hex" or featureName == "All-HTTP":
feature_vect_list = pcap_feat.getHttpReqBytesHex()
self.logger.debug("Req Len seq len: %i" % len(feature_vect_list))
self.logger.debug("Number of features being captured: %i" % len(feature_dict_list))
self.logger.debug("First Feature: %s" % feature_dict_list[0]['feature_name'])
#self.logger.debug("2nd Feature: %s" % feature_dict_list[1]['feature_name_2'])
self.logger.debug("Populating feature vector from PCAP [%s]" % (curr_pcap_file_name))
#Add PCAP file name as primary key (at the head of the list)
# feature_vect_row = [pcapFilename] + feature_vect_list #<==== Also works but stackoverflow says code below is faster
#feature_vect_list.insert(0, curr_pcap_file_name)
#vect_csv_writer = csv.writer(csv_feature_file, delimiter=',')
# writerow takes a list i.e. []
# vect_csv_writer.writerow(feature_vect_row)
#vect_csv_writer.writerow(feature_vect_list)
if featureName == 'All':
json_obj_str = {'filename': curr_pcap_file_name,
'pcap-Md5-hash': '',
'protocol': protoLabel,
'props': feature_dict_list}
# 'props': features_json_str}
# 'props': feature_dict_list} #features_json_str
# Convert dictionary items to tuples with DataType strings
attrib_tuple = ()
attrib_list = []
attrib_type = ''
data_list_of_lists = []
for item in feature_dict_list:
if item["feature_name"] in ['DNS-Req-Lens', 'IP-Req-Lens']:
attrib_type = "STRING" # INTEGER
elif item["feature_name"] == 'DNS-Req-Qnames-Enc-Comp-Hex':
attrib_type = "STRING" # NUMERIC # STRING
attrib_list.append((item["feature_name"], attrib_type))
data_list_of_lists.append(item["values"])
arff_obj_str = {
'description': curr_pcap_file_name + '---' + protoLabel,
'relation': featureName + '---' + 'pcap-Md5-hash',
'attributes': attrib_list,
'data':[
# [":\n".join(map(str, item)) + ',\n' for item in data_list_of_lists]
# ['['+",\n".join(map(str, item))+']' for item in data_list_of_lists]
# [",\n".join(map(str, data_list_of_lists))]
data_list_of_lists # <--- VALID .ARFF BUT NOT READABLE IN GEDIT, but works in Notepad++
# [",\n".join(map(str, feature_vect_list))]
# [",\n".join(map(str, feature_vect_list))]
]
}
# arff_obj_str = {
# 'description': curr_pcap_file_name + '---' + protoLabel,
# 'relation': featureName + '---' + 'pcap-Md5-hash',
# 'attributes': [
# ('filename', 'INTEGER'),
# ('', 'INTEGER'),
# ('protocol', 'STRING'),
# ('feature-name', '')
# ],
# 'data': [
# [curr_pcap_file_name, '', protoLabel]
# ]
# }
# {'filename': curr_pcap_file_name,
# 'pcap-Md5-hash': '',
# 'protocol': protoLabel,
# 'props': feature_dict_list}
# 'props': features_json_str}
# 'props': feature_dict_list} #features_json_str
else:
json_obj_str = {'filename': curr_pcap_file_name,
'pcap-Md5-hash': '',
'protocol': protoLabel,
'props': {'feature-name': featureName,
'values': feature_vect_list}
}
# Ideally for the values i'd need square brackets [], but since it's a list it is recognized
arff_obj_str = {
'description': curr_pcap_file_name + '---' + protoLabel,
'relation': featureName + '---' + 'pcap-Md5-hash',
'attributes': [
(protoLabel+'---'+featureName, 'INTEGER') # STRING, INTEGER, NUMERIC
],
'data':[
[",\n".join(map(str, feature_vect_list))] # [feature_vect_list]
]
}
# {'filename': curr_pcap_file_name,
# 'pcap-Md5-hash': '',
# 'protocol': protoLabel,
# 'props': {'feature-name': featureName,
# 'values': feature_vect_list}}
# with open(curr_feature_filePath, mode='w') as json_feature_file: # <--- Works for JSON
# json.dump(json_obj_str, json_feature_file, indent=4, sort_keys=True) # <--- Works for JSON
# # Add each json_obj_str from an individual pcap file into a list containing all specific features in json format
# json_obj_list.append(json_obj_str)
# # Encode the list into a single file containing features of each pcap (comma separated for each pcap)
# json.dump(json_obj_list, json_feature_file, indent=4, sort_keys=True)
with open(curr_feature_ARFF_filePath, mode='w') as arff_feature_file:
arff.dump(arff_obj_str, arff_feature_file)
except IOError:
self.logger.debug("File IOError ... with: %s : %s" % (featureName, curr_pcap_file_name))
#self.write_feature_vector_instance_to_file(feat_vect_seq, protoLabel, curr_pcap_file_name)
# return feat_vect_seq
featureExt = TunnelFeatureExtractorJSON()
#featureExt.test_feature_extraction()
#featureExt.write_feature_vector_instance_to_file(featureExt.get_feature_vectors("HTTPovDNS"), "HTTPovDNS")
# # TESTING Capbase
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS", "DNS-Req-Lens") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS", "IP-Req-Lens") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS", "DNS-Req-Qnames-Enc-Comp-Hex") # <---- Works
featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS", "All") # <---- Works
#featureExt.get_feature_vectors_and_write_to_file("HTTP-Plain", "HTTP-Req-Bytes-Hex")
#featureExt.get_feature_vectors_and_write_to_file("HTTP-ovDNS-v-Plain-SIZE", "DNS-Req-Qnames-Enc-Comp-Hex") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS-Static", "DNS-Req-Lens") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS-Static", "IP-Req-Lens") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS-Static", "DNS-Req-Qnames-Enc-Comp-Hex") # <---- Works
# featureExt.get_feature_vectors_and_write_to_file("HTTPovDNS-Static", "All") # <---- Works