Skip to content

Commit

Permalink
temp fix
Browse files Browse the repository at this point in the history
  • Loading branch information
jyaacoub committed Dec 15, 2023
1 parent 7917602 commit 3400292
Showing 1 changed file with 9 additions and 9 deletions.
18 changes: 9 additions & 9 deletions src/models/pro_mod.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,21 +351,21 @@ def forward_pro(self, data):
self.edge_weight != 'binary') else None

target_x = self.relu(target_x)
ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
training=self.training)
# ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
# training=self.training)

# conv1
xt = self.pro_conv1(target_x, ei_drp, ew)
xt = self.pro_conv1(target_x, ei, ew)
xt = self.relu(xt)
ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
training=self.training)
# ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
# training=self.training)
# conv2
xt = self.pro_conv2(xt, ei_drp, ew)
xt = self.pro_conv2(xt, ei, ew)
xt = self.relu(xt)
ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
training=self.training)
# ei_drp, _, _ = dropout_node(ei, p=self.dropout_prot_p, num_nodes=target_x.shape[0],
# training=self.training)
# conv3
xt = self.pro_conv3(xt, ei_drp, ew)
xt = self.pro_conv3(xt, ei, ew)
xt = self.relu(xt)

# flatten/pool
Expand Down

0 comments on commit 3400292

Please sign in to comment.