-
Notifications
You must be signed in to change notification settings - Fork 0
/
block.py
103 lines (88 loc) · 4.04 KB
/
block.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import torch
import torch.nn as nn
import torch.nn.functional as F
from spectral_normalization import SpectralNorm
class ResBlock(nn.Module):
"""Residual Block with instance normalization."""
def __init__(self, in_channels, out_channels):
super(ResBlock, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(out_channels, affine=True, track_running_stats=True),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(out_channels, affine=True, track_running_stats=True))
def forward(self, x):
return self.main(x) + x
class ConvBlock(nn.Module):
def __init__(self, dim_in, dim_out, spec_norm=False, LR=0.01, stride=1, up=False):
super(ConvBlock, self).__init__()
self.up = up
if self.up:
self.up_smaple = nn.UpsamplingBilinear2d(scale_factor=2)
else:
self.up_smaple = None
if spec_norm:
self.main = nn.Sequential(
SpectralNorm(nn.Conv2d(dim_in, dim_out, kernel_size=3, stride=stride, padding=1, bias=False)),
nn.BatchNorm2d(dim_out, affine=True, track_running_stats=True),
nn.LeakyReLU(LR, inplace=False),
)
else:
self.main = nn.Sequential(
nn.Conv2d(dim_in, dim_out, kernel_size=3, stride=stride, padding=1, bias=False),
nn.BatchNorm2d(dim_out, affine=True, track_running_stats=True),
nn.LeakyReLU(LR, inplace=False),
)
def forward(self, x1, x2=None):
if self.up_smaple is not None:
x1 = self.up_smaple(x1)
# input is CHW
diffY = x2.size()[2] - x1.size()[2]
diffX = x2.size()[3] - x1.size()[3]
x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2,
diffY // 2, diffY - diffY // 2])
# if you have padding issues, see
# https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a
# https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd
x = torch.cat([x2, x1], dim=1)
return self.main(x)
else:
return self.main(x1)
"""
class ConvUpBlock(nn.Module):
def __init__(self, dim_in, dim_out, spec_norm=False, LR=0.01, up=False):
super(ConvUpBlock, self).__init__()
self.up = up
if self.up:
self.up_smaple = nn.UpsamplingBilinear2d(scale_factor=2)
else:
self.up_smaple = None
if spec_norm:
self.main = nn.Sequential(
SpectralNorm(nn.Conv2d(dim_in, dim_out, kernel_size=3, stride=1, padding=1, bias=False)),
nn.BatchNorm2d(dim_out, affine=True, track_running_stats=True),
nn.LeakyReLU(LR, inplace=False),
)
else:
self.main = nn.Sequential(
nn.Conv2d(dim_in, dim_out, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(dim_out, affine=True, track_running_stats=True),
nn.LeakyReLU(LR, inplace=False),
)
def forward(self, x1, x2):
if self.up_smaple is not None:
x1 = self.up_smaple(x1)
# input is CHW
diffY = x2.size()[2] - x1.size()[2]
diffX = x2.size()[3] - x1.size()[3]
x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2,
diffY // 2, diffY - diffY // 2])
# if you have padding issues, see
# https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a
# https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd
x = torch.cat([x2, x1], dim=1)
return self.main(x)
else:
return self.main(x1)
"""