forked from VaradScript/Emotion-Detection
-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
119 lines (86 loc) · 3.99 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
from keras.applications import MobileNet
from keras.models import Sequential,Model
from keras.layers import Dense,Dropout,Activation,Flatten,GlobalAveragePooling2D
from keras.layers import Conv2D,MaxPooling2D,ZeroPadding2D
from keras.layers.normalization import BatchNormalization
from keras.preprocessing.image import ImageDataGenerator
# MobileNet is designed to work with images of dim 224,224
img_rows,img_cols = 224,224
MobileNet = MobileNet(weights='imagenet',include_top=False,input_shape=(img_rows,img_cols,3))
# Here we freeze the last 4 layers
# Layers are set to trainable as True by default
for layer in MobileNet.layers:
layer.trainable = True
# Let's print our layers
for (i,layer) in enumerate(MobileNet.layers):
print(str(i),layer.__class__.__name__,layer.trainable)
def addTopModelMobileNet(bottom_model, num_classes):
"""creates the top or head of the model that will be
placed ontop of the bottom layers"""
top_model = bottom_model.output
top_model = GlobalAveragePooling2D()(top_model)
top_model = Dense(1024,activation='relu')(top_model)
top_model = Dense(1024,activation='relu')(top_model)
top_model = Dense(512,activation='relu')(top_model)
top_model = Dense(num_classes,activation='softmax')(top_model)
return top_model
num_classes = 5
FC_Head = addTopModelMobileNet(MobileNet, num_classes)
model = Model(inputs = MobileNet.input, outputs = FC_Head)
print(model.summary())
train_data_dir = '/Users/durgeshthakur/Deep Learning Stuff/Emotion Classification/fer2013/train'
validation_data_dir = '/Users/durgeshthakur/Deep Learning Stuff/Emotion Classification/fer2013/validation'
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=30,
width_shift_range=0.3,
height_shift_range=0.3,
horizontal_flip=True,
fill_mode='nearest'
)
validation_datagen = ImageDataGenerator(rescale=1./255)
batch_size = 32
train_generator = train_datagen.flow_from_directory(
train_data_dir,
target_size = (img_rows,img_cols),
batch_size = batch_size,
class_mode = 'categorical'
)
validation_generator = validation_datagen.flow_from_directory(
validation_data_dir,
target_size=(img_rows,img_cols),
batch_size=batch_size,
class_mode='categorical')
from keras.optimizers import RMSprop,Adam
from keras.callbacks import ModelCheckpoint,EarlyStopping,ReduceLROnPlateau
checkpoint = ModelCheckpoint(
'emotion_face_mobilNet.h5',
monitor='val_loss',
mode='min',
save_best_only=True,
verbose=1)
earlystop = EarlyStopping(
monitor='val_loss',
min_delta=0,
patience=10,
verbose=1,restore_best_weights=True)
learning_rate_reduction = ReduceLROnPlateau(monitor='val_acc',
patience=5,
verbose=1,
factor=0.2,
min_lr=0.0001)
callbacks = [earlystop,checkpoint,learning_rate_reduction]
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=0.001),
metrics=['accuracy']
)
nb_train_samples = 24176
nb_validation_samples = 3006
epochs = 25
history = model.fit_generator(
train_generator,
steps_per_epoch=nb_train_samples//batch_size,
epochs=epochs,
callbacks=callbacks,
validation_data=validation_generator,
validation_steps=nb_validation_samples//batch_size)