-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdata_loader.py
84 lines (68 loc) · 2.52 KB
/
data_loader.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
"""
Copyright 2024 Universitat Politècnica de Catalunya
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tensorflow as tf
import os
from pickle import load
def load_partition(path):
"""Loads a partition of the dataset.
Parameters
----------
path : str
Path to the partition directory.
Returns
-------
tf.data.Dataset
Loaded partition
"""
num_partitions = len(os.listdir(path))
ds = tf.data.Dataset.load(os.path.join(path, "0000"), compression="GZIP")
for ii in range(1, num_partitions):
ds = ds.concatenate(
tf.data.Dataset.load(os.path.join(path, f"{ii:04d}"), compression="GZIP")
)
return ds
def load_z_scores(path):
"""Returns the mean and std of the training dataset to compute the z-scores.
Returns
-------
(float, float, float, float)
Returns the mean and std of the IPG, the mean and std of the packet size,
in that order.
"""
with open(path, "rb") as ff:
return load(ff)
# def fix_time_dist(x, y):
# x["flow_time_dist"] = tf.where(
# tf.math.equal(x["flow_time_dist"], 3),
# tf.ones_like(x["flow_time_dist"]) * 2,
# x["flow_time_dist"],
# )
# return x, y
# def fix_data(old_name, new_name):
# num_partitions = len(os.listdir(old_name))
# os.makedirs(new_name, exist_ok=True)
# for ii in range(num_partitions):
# ds = (
# tf.data.Dataset.load(
# os.path.join(old_name, f"{ii:04d}"), compression="GZIP"
# )
# .prefetch(tf.data.experimental.AUTOTUNE)
# .map(fix_time_dist)
# )
# ds.save(os.path.join(new_name, f"{ii:04d}"), compression="GZIP")
# fix_data("old_split_ds/training", "split_ds/training")
# fix_data("old_split_ds/validation", "split_ds/validation")
# fix_data("old_split_ds/test", "split_ds/test")
# fix_data("old_combined_ds/training", "combined_ds/training")
# fix_data("old_combined_ds/validation", "combined_ds/validation")
# fix_data("old_combined_ds/test", "combined_ds/test")