Skip to content

Commit 13c2a25

Browse files
committed
first commit
1 parent 7d94dcd commit 13c2a25

14 files changed

+1170
-0
lines changed

0207.csv

+500
Large diffs are not rendered by default.

0208.csv

+200
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
4,154,72,29,126,31.3,0.338,37,0
2+
0,121,66,30,165,34.3,0.203,33,1
3+
3,78,70,0,0,32.5,0.270,39,0
4+
2,130,96,0,0,22.6,0.268,21,0
5+
3,111,58,31,44,29.5,0.430,22,0
6+
2,98,60,17,120,34.7,0.198,22,0
7+
1,143,86,30,330,30.1,0.892,23,0
8+
1,119,44,47,63,35.5,0.280,25,0
9+
6,108,44,20,130,24.0,0.813,35,0
10+
2,118,80,0,0,42.9,0.693,21,1
11+
10,133,68,0,0,27.0,0.245,36,0
12+
2,197,70,99,0,34.7,0.575,62,1
13+
0,151,90,46,0,42.1,0.371,21,1
14+
6,109,60,27,0,25.0,0.206,27,0
15+
12,121,78,17,0,26.5,0.259,62,0
16+
8,100,76,0,0,38.7,0.190,42,0
17+
8,124,76,24,600,28.7,0.687,52,1
18+
1,93,56,11,0,22.5,0.417,22,0
19+
8,143,66,0,0,34.9,0.129,41,1
20+
6,103,66,0,0,24.3,0.249,29,0
21+
3,176,86,27,156,33.3,1.154,52,1
22+
0,73,0,0,0,21.1,0.342,25,0
23+
11,111,84,40,0,46.8,0.925,45,1
24+
2,112,78,50,140,39.4,0.175,24,0
25+
3,132,80,0,0,34.4,0.402,44,1
26+
2,82,52,22,115,28.5,1.699,25,0
27+
6,123,72,45,230,33.6,0.733,34,0
28+
0,188,82,14,185,32.0,0.682,22,1
29+
0,67,76,0,0,45.3,0.194,46,0
30+
1,89,24,19,25,27.8,0.559,21,0
31+
1,173,74,0,0,36.8,0.088,38,1
32+
1,109,38,18,120,23.1,0.407,26,0
33+
1,108,88,19,0,27.1,0.400,24,0
34+
6,96,0,0,0,23.7,0.190,28,0
35+
1,124,74,36,0,27.8,0.100,30,0
36+
7,150,78,29,126,35.2,0.692,54,1
37+
4,183,0,0,0,28.4,0.212,36,1
38+
1,124,60,32,0,35.8,0.514,21,0
39+
1,181,78,42,293,40.0,1.258,22,1
40+
1,92,62,25,41,19.5,0.482,25,0
41+
0,152,82,39,272,41.5,0.270,27,0
42+
1,111,62,13,182,24.0,0.138,23,0
43+
3,106,54,21,158,30.9,0.292,24,0
44+
3,174,58,22,194,32.9,0.593,36,1
45+
7,168,88,42,321,38.2,0.787,40,1
46+
6,105,80,28,0,32.5,0.878,26,0
47+
11,138,74,26,144,36.1,0.557,50,1
48+
3,106,72,0,0,25.8,0.207,27,0
49+
6,117,96,0,0,28.7,0.157,30,0
50+
2,68,62,13,15,20.1,0.257,23,0
51+
9,112,82,24,0,28.2,1.282,50,1
52+
0,119,0,0,0,32.4,0.141,24,1
53+
2,112,86,42,160,38.4,0.246,28,0
54+
2,92,76,20,0,24.2,1.698,28,0
55+
6,183,94,0,0,40.8,1.461,45,0
56+
0,94,70,27,115,43.5,0.347,21,0
57+
2,108,64,0,0,30.8,0.158,21,0
58+
4,90,88,47,54,37.7,0.362,29,0
59+
0,125,68,0,0,24.7,0.206,21,0
60+
0,132,78,0,0,32.4,0.393,21,0
61+
5,128,80,0,0,34.6,0.144,45,0
62+
4,94,65,22,0,24.7,0.148,21,0
63+
7,114,64,0,0,27.4,0.732,34,1
64+
0,102,78,40,90,34.5,0.238,24,0
65+
2,111,60,0,0,26.2,0.343,23,0
66+
1,128,82,17,183,27.5,0.115,22,0
67+
10,92,62,0,0,25.9,0.167,31,0
68+
13,104,72,0,0,31.2,0.465,38,1
69+
5,104,74,0,0,28.8,0.153,48,0
70+
2,94,76,18,66,31.6,0.649,23,0
71+
7,97,76,32,91,40.9,0.871,32,1
72+
1,100,74,12,46,19.5,0.149,28,0
73+
0,102,86,17,105,29.3,0.695,27,0
74+
4,128,70,0,0,34.3,0.303,24,0
75+
6,147,80,0,0,29.5,0.178,50,1
76+
4,90,0,0,0,28.0,0.610,31,0
77+
3,103,72,30,152,27.6,0.730,27,0
78+
2,157,74,35,440,39.4,0.134,30,0
79+
1,167,74,17,144,23.4,0.447,33,1
80+
0,179,50,36,159,37.8,0.455,22,1
81+
11,136,84,35,130,28.3,0.260,42,1
82+
0,107,60,25,0,26.4,0.133,23,0
83+
1,91,54,25,100,25.2,0.234,23,0
84+
1,117,60,23,106,33.8,0.466,27,0
85+
5,123,74,40,77,34.1,0.269,28,0
86+
2,120,54,0,0,26.8,0.455,27,0
87+
1,106,70,28,135,34.2,0.142,22,0
88+
2,155,52,27,540,38.7,0.240,25,1
89+
2,101,58,35,90,21.8,0.155,22,0
90+
1,120,80,48,200,38.9,1.162,41,0
91+
11,127,106,0,0,39.0,0.190,51,0
92+
3,80,82,31,70,34.2,1.292,27,1
93+
10,162,84,0,0,27.7,0.182,54,0
94+
1,199,76,43,0,42.9,1.394,22,1
95+
8,167,106,46,231,37.6,0.165,43,1
96+
9,145,80,46,130,37.9,0.637,40,1
97+
6,115,60,39,0,33.7,0.245,40,1
98+
1,112,80,45,132,34.8,0.217,24,0
99+
4,145,82,18,0,32.5,0.235,70,1
100+
10,111,70,27,0,27.5,0.141,40,1
101+
6,98,58,33,190,34.0,0.430,43,0
102+
9,154,78,30,100,30.9,0.164,45,0
103+
6,165,68,26,168,33.6,0.631,49,0
104+
1,99,58,10,0,25.4,0.551,21,0
105+
10,68,106,23,49,35.5,0.285,47,0
106+
3,123,100,35,240,57.3,0.880,22,0
107+
8,91,82,0,0,35.6,0.587,68,0
108+
6,195,70,0,0,30.9,0.328,31,1
109+
9,156,86,0,0,24.8,0.230,53,1
110+
0,93,60,0,0,35.3,0.263,25,0
111+
3,121,52,0,0,36.0,0.127,25,1
112+
2,101,58,17,265,24.2,0.614,23,0
113+
2,56,56,28,45,24.2,0.332,22,0
114+
0,162,76,36,0,49.6,0.364,26,1
115+
0,95,64,39,105,44.6,0.366,22,0
116+
4,125,80,0,0,32.3,0.536,27,1
117+
5,136,82,0,0,0.0,0.640,69,0
118+
2,129,74,26,205,33.2,0.591,25,0
119+
3,130,64,0,0,23.1,0.314,22,0
120+
1,107,50,19,0,28.3,0.181,29,0
121+
1,140,74,26,180,24.1,0.828,23,0
122+
1,144,82,46,180,46.1,0.335,46,1
123+
8,107,80,0,0,24.6,0.856,34,0
124+
13,158,114,0,0,42.3,0.257,44,1
125+
2,121,70,32,95,39.1,0.886,23,0
126+
7,129,68,49,125,38.5,0.439,43,1
127+
2,90,60,0,0,23.5,0.191,25,0
128+
7,142,90,24,480,30.4,0.128,43,1
129+
3,169,74,19,125,29.9,0.268,31,1
130+
0,99,0,0,0,25.0,0.253,22,0
131+
4,127,88,11,155,34.5,0.598,28,0
132+
4,118,70,0,0,44.5,0.904,26,0
133+
2,122,76,27,200,35.9,0.483,26,0
134+
6,125,78,31,0,27.6,0.565,49,1
135+
1,168,88,29,0,35.0,0.905,52,1
136+
2,129,0,0,0,38.5,0.304,41,0
137+
4,110,76,20,100,28.4,0.118,27,0
138+
6,80,80,36,0,39.8,0.177,28,0
139+
10,115,0,0,0,0.0,0.261,30,1
140+
2,127,46,21,335,34.4,0.176,22,0
141+
9,164,78,0,0,32.8,0.148,45,1
142+
2,93,64,32,160,38.0,0.674,23,1
143+
3,158,64,13,387,31.2,0.295,24,0
144+
5,126,78,27,22,29.6,0.439,40,0
145+
10,129,62,36,0,41.2,0.441,38,1
146+
0,134,58,20,291,26.4,0.352,21,0
147+
3,102,74,0,0,29.5,0.121,32,0
148+
7,187,50,33,392,33.9,0.826,34,1
149+
3,173,78,39,185,33.8,0.970,31,1
150+
10,94,72,18,0,23.1,0.595,56,0
151+
1,108,60,46,178,35.5,0.415,24,0
152+
5,97,76,27,0,35.6,0.378,52,1
153+
4,83,86,19,0,29.3,0.317,34,0
154+
1,114,66,36,200,38.1,0.289,21,0
155+
1,149,68,29,127,29.3,0.349,42,1
156+
5,117,86,30,105,39.1,0.251,42,0
157+
1,111,94,0,0,32.8,0.265,45,0
158+
4,112,78,40,0,39.4,0.236,38,0
159+
1,116,78,29,180,36.1,0.496,25,0
160+
0,141,84,26,0,32.4,0.433,22,0
161+
2,175,88,0,0,22.9,0.326,22,0
162+
2,92,52,0,0,30.1,0.141,22,0
163+
3,130,78,23,79,28.4,0.323,34,1
164+
8,120,86,0,0,28.4,0.259,22,1
165+
2,174,88,37,120,44.5,0.646,24,1
166+
2,106,56,27,165,29.0,0.426,22,0
167+
2,105,75,0,0,23.3,0.560,53,0
168+
4,95,60,32,0,35.4,0.284,28,0
169+
0,126,86,27,120,27.4,0.515,21,0
170+
8,65,72,23,0,32.0,0.600,42,0
171+
2,99,60,17,160,36.6,0.453,21,0
172+
1,102,74,0,0,39.5,0.293,42,1
173+
11,120,80,37,150,42.3,0.785,48,1
174+
3,102,44,20,94,30.8,0.400,26,0
175+
1,109,58,18,116,28.5,0.219,22,0
176+
9,140,94,0,0,32.7,0.734,45,1
177+
13,153,88,37,140,40.6,1.174,39,0
178+
12,100,84,33,105,30.0,0.488,46,0
179+
1,147,94,41,0,49.3,0.358,27,1
180+
1,81,74,41,57,46.3,1.096,32,0
181+
3,187,70,22,200,36.4,0.408,36,1
182+
6,162,62,0,0,24.3,0.178,50,1
183+
4,136,70,0,0,31.2,1.182,22,1
184+
1,121,78,39,74,39.0,0.261,28,0
185+
3,108,62,24,0,26.0,0.223,25,0
186+
0,181,88,44,510,43.3,0.222,26,1
187+
8,154,78,32,0,32.4,0.443,45,1
188+
1,128,88,39,110,36.5,1.057,37,1
189+
7,137,90,41,0,32.0,0.391,39,0
190+
0,123,72,0,0,36.3,0.258,52,1
191+
1,106,76,0,0,37.5,0.197,26,0
192+
6,190,92,0,0,35.5,0.278,66,1
193+
2,88,58,26,16,28.4,0.766,22,0
194+
9,170,74,31,0,44.0,0.403,43,1
195+
9,89,62,0,0,22.5,0.142,33,0
196+
10,101,76,48,180,32.9,0.171,63,0
197+
2,122,70,27,0,36.8,0.340,27,0
198+
5,121,72,23,112,26.2,0.245,30,0
199+
1,126,60,0,0,30.1,0.349,47,1
200+
1,93,70,31,0,30.4,0.315,23,0

deep.py

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
from keras.models import Sequential
2+
from keras.layers import Dense
3+
import numpy
4+
dataset = numpy.loadtxt("0207.csv", delimiter=",")
5+
X = dataset[:,0:8]
6+
Y = dataset[:,8]
7+
dataset2 = numpy.loadtxt("0208.csv", delimiter=",")
8+
Z = dataset2[:,0:8]
9+
Q = dataset2[:,8]
10+
11+
model = Sequential()
12+
model.add(Dense(15, input_dim=8,init='uniform', activation='softplus'))
13+
model.add(Dense(1, init='uniform', activation='sigmoid'))
14+
15+
16+
model.compile(loss='mse', optimizer='Adam', metrics=['accuracy'])
17+
18+
19+
history = model.fit(X, Y, nb_epoch=50, batch_size=10)
20+
21+
22+
loss, accuracy = model.evaluate(X, Y)
23+
print("\nLoss: %.2f, Accuracy: %.2f%%" % (loss, accuracy*100))
24+
25+
26+
# serialize model to JSON
27+
model_json = model.to_json()
28+
with open("model.json", "w") as json_file:
29+
json_file.write(model_json)
30+
# serialize weights to HDF5
31+
model.save_weights("model.h5")
32+
print("Saved model to disk")
33+

model.h5

2.05 MB
Binary file not shown.

model.json

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"class_name": "Sequential", "keras_version": "1.2.1", "config": [{"class_name": "Dense", "config": {"W_constraint": null, "b_constraint": null, "name": "dense_1", "output_dim": 1024, "activity_regularizer": null, "trainable": true, "init": "uniform", "bias": true, "input_dtype": "float32", "input_dim": 8, "b_regularizer": null, "W_regularizer": null, "activation": "softplus", "batch_input_shape": [null, 8]}}, {"class_name": "Dropout", "config": {"p": 0.7342146978592597, "trainable": true, "name": "dropout_1"}}, {"class_name": "Dense", "config": {"W_constraint": null, "b_constraint": null, "name": "dense_2", "activity_regularizer": null, "trainable": true, "init": "glorot_uniform", "bias": true, "input_dim": 1024, "b_regularizer": null, "W_regularizer": null, "activation": "linear", "output_dim": 512}}, {"class_name": "Activation", "config": {"activation": "softplus", "trainable": true, "name": "activation_1"}}, {"class_name": "Dropout", "config": {"p": 0.692539034315719, "trainable": true, "name": "dropout_2"}}, {"class_name": "Dense", "config": {"W_constraint": null, "b_constraint": null, "name": "dense_3", "activity_regularizer": null, "trainable": true, "init": "uniform", "bias": true, "input_dim": 512, "b_regularizer": null, "W_regularizer": null, "activation": "sigmoid", "output_dim": 1}}]}

run.py

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
from keras.models import Sequential
2+
from keras.layers import Dense
3+
from keras.models import model_from_json
4+
import numpy
5+
6+
dataset2 = numpy.loadtxt("0208.csv", delimiter=",")
7+
Z = dataset2[:,0:8]
8+
Q = dataset2[:,8]
9+
# load json and create model
10+
json_file = open('model.json', 'r')
11+
loaded_model_json = json_file.read()
12+
json_file.close()
13+
loaded_model = model_from_json(loaded_model_json)
14+
# load weights into new model
15+
loaded_model.load_weights("model.h5")
16+
print("Loaded model from disk")
17+
# test data
18+
loaded_model.compile(loss='mse', optimizer='Adamax', metrics=['accuracy'])
19+
score = loaded_model.evaluate(Z, Q, verbose=0)
20+
print "for test %s: %.2f%%" % (loaded_model.metrics_names[1], score[1]*100)
21+
22+
# prediction
23+
probabilities = loaded_model.predict(Z)
24+
predictions = [float(round(x)) for x in probabilities]
25+
accuracy = numpy.mean(predictions == Q)
26+
print("Prediction Accuracy: %.2f%%" % (accuracy*100))
27+

test1.py

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import numpy
2+
from sklearn.model_selection import GridSearchCV
3+
from keras.models import Sequential
4+
from keras.layers import Dense
5+
from keras.wrappers.scikit_learn import KerasClassifier
6+
from keras.wrappers.scikit_learn import BaseWrapper
7+
import copy
8+
def custom_get_params(self, **params):
9+
res = copy.deepcopy(self.sk_params)
10+
res.update({'build_fn': self.build_fn})
11+
return res
12+
13+
BaseWrapper.get_params = custom_get_params
14+
15+
16+
# Function to create model, required for KerasClassifier
17+
def create_model():
18+
# create model
19+
model = Sequential()
20+
model.add(Dense(12, input_dim=8, activation='relu'))
21+
model.add(Dense(1, activation='sigmoid'))
22+
# Compile model
23+
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
24+
return model
25+
# fix random seed for reproducibility
26+
seed = 7
27+
numpy.random.seed(seed)
28+
# load dataset
29+
dataset = numpy.loadtxt("0207.csv", delimiter=",")
30+
# split into input (X) and output (Y) variables
31+
X = dataset[:,0:8]
32+
Y = dataset[:,8]
33+
# create model
34+
model = KerasClassifier(build_fn=create_model, verbose=0)
35+
# define the grid search parameters
36+
batch_size = [10, 20, 40, 60, 80, 100]
37+
epochs = [10, 50, 100]
38+
param_grid = dict(batch_size=batch_size, nb_epoch=epochs)
39+
grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1)
40+
grid_result = grid.fit(X, Y)
41+
# summarize results
42+
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
43+
means = grid_result.cv_results_['mean_test_score']
44+
stds = grid_result.cv_results_['std_test_score']
45+
params = grid_result.cv_results_['params']
46+
for mean, stdev, param in zip(means, stds, params):
47+
print("%f (%f) with: %r" % (mean, stdev, param))
48+

test2.py

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import numpy
2+
from sklearn.model_selection import GridSearchCV
3+
from keras.models import Sequential
4+
from keras.layers import Dense
5+
from keras.wrappers.scikit_learn import KerasClassifier
6+
7+
from keras.wrappers.scikit_learn import BaseWrapper
8+
import copy
9+
def custom_get_params(self, **params):
10+
res = copy.deepcopy(self.sk_params)
11+
res.update({'build_fn': self.build_fn})
12+
return res
13+
14+
BaseWrapper.get_params = custom_get_params
15+
16+
17+
# Function to create model, required for KerasClassifier
18+
def create_model(optimizer='adam'):
19+
# create model
20+
model = Sequential()
21+
model.add(Dense(12, input_dim=8, activation='relu'))
22+
model.add(Dense(1, activation='sigmoid'))
23+
# Compile model
24+
model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
25+
return model
26+
# fix random seed for reproducibility
27+
seed = 7
28+
numpy.random.seed(seed)
29+
# load dataset
30+
dataset = numpy.loadtxt("0207.csv", delimiter=",")
31+
# split into input (X) and output (Y) variables
32+
X = dataset[:,0:8]
33+
Y = dataset[:,8]
34+
# create model
35+
model = KerasClassifier(build_fn=create_model, nb_epoch=50, batch_size=10, verbose=0)
36+
# define the grid search parameters
37+
optimizer = ['SGD', 'RMSprop', 'Adagrad', 'Adadelta', 'Adam', 'Adamax', 'Nadam']
38+
param_grid = dict(optimizer=optimizer)
39+
grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1)
40+
grid_result = grid.fit(X, Y)
41+
# summarize results
42+
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
43+
means = grid_result.cv_results_['mean_test_score']
44+
stds = grid_result.cv_results_['std_test_score']
45+
params = grid_result.cv_results_['params']
46+
for mean, stdev, param in zip(means, stds, params):
47+
print("%f (%f) with: %r" % (mean, stdev, param))
48+

0 commit comments

Comments
 (0)