Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
bruce-edelman committed Jul 20, 2023
1 parent bb1d967 commit 5a351b1
Showing 1 changed file with 0 additions and 73 deletions.
73 changes: 0 additions & 73 deletions diploshic/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,76 +115,3 @@ def construct_model(input_shape, domain_adaptation=False, da_weight=1):
model = Model(inputs=[model_in], outputs=[output])
model.compile(loss=masked_cce, optimizer="adam", metrics=[masked_categorical_accuracy])
return model


def construct_model_v2(input_shape, domain_adaptation=False, da_weight=1):
model_in = Input(input_shape)
h = Conv2D(128, 3, activation="relu", padding="same", name="conv1_1")(
model_in
)
h = Conv2D(64, 3, activation="relu", padding="same", name="conv1_2")(h)
h = MaxPooling2D(pool_size=3, name="pool1", padding="same")(h)
h = Dropout(0.15, name="drop1")(h)
h = Flatten(name="flaten1")(h)

dh = Conv2D(
128,
2,
activation="relu",
dilation_rate=[1, 3],
padding="same",
name="dconv1_1",
)(model_in)
dh = Conv2D(
64,
2,
activation="relu",
dilation_rate=[1, 3],
padding="same",
name="dconv1_2",
)(dh)
dh = MaxPooling2D(pool_size=2, name="dpool1")(dh)
dh = Dropout(0.15, name="ddrop1")(dh)
dh = Flatten(name="dflaten1")(dh)

dh1 = Conv2D(
128,
2,
activation="relu",
dilation_rate=[1, 4],
padding="same",
name="dconv4_1",
)(model_in)
dh1 = Conv2D(
64,
2,
activation="relu",
dilation_rate=[1, 4],
padding="same",
name="dconv4_2",
)(dh1)
dh1 = MaxPooling2D(pool_size=2, name="d1pool1")(dh1)
dh1 = Dropout(0.15, name="d1drop1")(dh1)
dh1 = Flatten(name="d1flaten1")(dh1)

h_concated = concatenate([h, dh, dh1])
h = Dense(512, name="512dense", activation="relu")(h_concated)
h = Dropout(0.2, name="drop7")(h)
h = Dense(128, name="last_dense", activation="relu")(h)
h = Dropout(0.1, name="drop8")(h)
output = Dense(5, name="predictor", activation="softmax")(h)
if domain_adaptation:
da = GradReverse()(h_concated)
da = Dense(512, name="DA512dense", activation="relu")(da)
da = Dense(256, name="DA256dense", activation="relu")(da)
da = Dense(128, name="DA128dense", activation="relu")(da)
domain_output = Dense(1, name="discriminator", activation="sigmoid")(da)
model = Model(inputs=[model_in], outputs=[output, domain_output])
model.compile(optimizer='adam',
loss={'predictor': masked_cce, 'discriminator': masked_bce},
loss_weights = [1, da_weight], # equal weighing of two tasks
metrics={'predictor': 'accuracy', 'discriminator': 'accuracy'})
else:
model = Model(inputs=[model_in], outputs=[output])
model.compile(loss=masked_cce, optimizer="adam", metrics=["accuracy"])
return model

0 comments on commit 5a351b1

Please sign in to comment.