-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmodels.py
66 lines (54 loc) · 2.29 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from tensorflow.keras.layers import *
from tensorflow.keras.regularizers import *
from tensorflow.keras.models import Model
def get_encoder(imsize,args,n_intermed_features = 1024, latent_dim=512,ft_bank_baseline = 128):
input_img = Input(shape = (imsize[0],imsize[1],imsize[2],1))
feature = Conv3D(ft_bank_baseline, kernel_size=3,
padding='same',strides=2)(input_img)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Conv3D(ft_bank_baseline*2, kernel_size=3,
padding='same',strides=2)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Conv3D(ft_bank_baseline*2, kernel_size=3,
padding='same',strides=2)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Conv3D(ft_bank_baseline*2, kernel_size=3,
padding='same',strides=2)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Flatten()(feature)
feature = Dense(latent_dim*4)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Dense(latent_dim*4)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
feature = BatchNormalization()(feature)
feature = Dense(n_intermed_features,
kernel_regularizer=l2(1e-4))(feature)
feature_dense = Flatten()(feature)
encoder = Model(inputs=input_img,outputs=feature_dense)
return encoder, input_img, feature_dense
def get_regressor(outsize,feature_dense,args,n_intermed_features=1024, latent_dim=512):
inputs_x = Input(shape=(n_intermed_features,))
feature = Dense(latent_dim*4,kernel_regularizer=l2(1e-4))(inputs_x)
feature = LeakyReLU(alpha=0.3)(feature)
if args.nobatch:
feature = Dropout(0.3)(feature)
else:
feature = BatchNormalization()(feature)
feature = Dense(latent_dim*2)(feature)
feature = LeakyReLU(alpha=0.3)(feature)
if args.nobatch:
feature = Dropout(0.3)(feature)
else:
feature = BatchNormalization()(feature)
feature = Dense(latent_dim*2,kernel_regularizer=l2(1e-4))(feature)
feature = LeakyReLU(alpha=0.3)(feature)
cf = Concatenate(axis=1)([Reshape((1, outsize[2]))\
(Dense(outsize[2],activation='softmax',kernel_regularizer=l2(1e-4))(feature))\
for _ in range(outsize[1])])
regressor = Model(inputs = inputs_x,outputs=cf)
return regressor