Skip to content

Commit

Permalink
Remove in_conv in ResNet.
Browse files Browse the repository at this point in the history
  • Loading branch information
chrischute committed Dec 9, 2018
1 parent 73922d0 commit 3374f19
Showing 1 changed file with 1 addition and 5 deletions.
6 changes: 1 addition & 5 deletions models/resnet/resnet.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import torch
import torch.nn as nn
import torch.nn.functional as F

Expand All @@ -18,8 +17,7 @@ class ResNet(nn.Module):
"""
def __init__(self, in_channels, mid_channels, out_channels, num_blocks, kernel_size, padding):
super(ResNet, self).__init__()
self.in_norm = nn.BatchNorm2d(in_channels, affine=False)
self.in_conv = WNConv2d(2 * in_channels, mid_channels, kernel_size, padding, bias=True)
self.in_conv = WNConv2d(in_channels, mid_channels, kernel_size, padding, bias=True)
self.in_skip = WNConv2d(mid_channels, mid_channels, kernel_size=1, padding=0, bias=True)

self.blocks = nn.ModuleList([ResidualBlock(mid_channels, mid_channels)
Expand All @@ -31,8 +29,6 @@ def __init__(self, in_channels, mid_channels, out_channels, num_blocks, kernel_s
self.out_conv = WNConv2d(mid_channels, out_channels, kernel_size=1, padding=0, bias=True)

def forward(self, x):
x = self.in_norm(x)
x = F.relu(torch.cat((x, -x), dim=1))
x = self.in_conv(x)
x_skip = self.in_skip(x)

Expand Down

0 comments on commit 3374f19

Please sign in to comment.