-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathrun_smallscale.py
92 lines (84 loc) · 2.41 KB
/
run_smallscale.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import sys
import math
import numpy as np
from cbfssm.datasets import Actuator, Ballbeam, Drive, Furnace, Dryer
from cbfssm.training import Trainer
from cbfssm.outputs import Outputs
from cbfssm.outputs import OutputSummary
from cbfssm.model import CBFSSM
# Choose Tasks
datasets = [(Actuator, 'actuator', 0.5, 100),
(Ballbeam, 'ballbeam', 0.05, 10),
(Drive, 'drive', 0.5, 50),
(Dryer, 'dryer', 0.15, 100),
(Furnace, 'furnace', 0.15, 100)]
tasks = [int(sys.argv[1])] if len(sys.argv) > 1 else range(len(datasets))
# Execute Tasks
for task_nr in tasks:
#
# Config
#
root_dir = "run_output/smallscale/" + datasets[task_nr][1]
iterations = 5
# dataset
ds_sel = datasets[task_nr][0]
seq_len = 50
seq_stride = 1
# model
model_sel = CBFSSM
dim_x = 4
model_config = {
# dataset
'ds': ds_sel,
'batch_size': 10,
'shuffle': 10000,
# method
'dim_x': dim_x,
'ind_pnt_num': 20,
'samples': 50,
'learning_rate': 0.1,
'loss_factors': np.asarray([1., 0.]) * datasets[task_nr][2],
'k_factor': datasets[task_nr][3],
'recog_len': 16,
# variables init state
'zeta_pos': 2.,
'zeta_mean': 0.05 ** 2,
'zeta_var': 0.01 ** 2,
'var_x': np.asarray([0.002 ** 2] * dim_x),
'var_y': np.asarray([1. ** 2] * dim_x),
'gp_var': 0.5 ** 2,
'gp_len': 2.
}
# training
train = True
train_iterations = 30000
# evaluation
output_sel = Outputs
#
# Run
#
summary = OutputSummary(root_dir)
for it in range(iterations):
# iteration config
if iterations != 1:
print("\n=== Iteration %d ===\n" % it)
out_dir = root_dir if iterations == 1 else root_dir + "/run_%d" % it
# load
outputs = output_sel(out_dir)
ds = ds_sel(seq_len, seq_stride)
outputs.set_ds(ds)
model = model_sel(model_config)
outputs.set_model(model, out_dir)
# train
if train:
epochs = math.ceil(train_iterations/ds.train_in_batch.shape[0])
trainer = Trainer(model, out_dir)
trainer.train(ds, epochs)
outputs.set_trainer(trainer)
# evaluate
outputs.create_all()
summary.add_outputs(outputs)
#
# Summarize
#
summary.write_summary()