-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfdm_model.py
100 lines (82 loc) · 3.18 KB
/
fdm_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import torch
import torch.nn as nn
import torch.nn.functional as F
class HE(nn.Module):
def __init__(self, dt, c, h):
super(HE, self).__init__()
self.alpha = dt*c/(h**2)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.delta = torch.Tensor([[[[0., 1., 0.], [1., -4., 1], [0., 1., 0.]]]]).to(device)
self.pad = nn.ReplicationPad2d(1)
def forward(self, x):
u_pad = self.pad(x)
z = F.conv2d(u_pad, self.delta)
x = x + self.alpha*z
return x
class FE(nn.Module):
def __init__(self, dt, c, r, h):
super(FE, self).__init__()
self.alpha = dt * c / (h ** 2)
self.beta = r*dt
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.delta = torch.Tensor([[[[0., 1., 0.], [1., -4., 1], [0., 1., 0.]]]]).to(device)
self.pad = nn.ReplicationPad2d(1)
def forward(self, x):
u_pad = self.pad(x)
z = F.conv2d(u_pad, self.delta)
x = self.alpha * z + (1+self.beta)*x - self.beta*x**2
return x
class AC(nn.Module):
def __init__(self, dt, c, r, h):
super(AC, self).__init__()
self.alpha = dt * c / (h ** 2)
self.beta = r*dt
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.delta = torch.Tensor([[[[0., 1., 0.], [1., -4., 1], [0., 1., 0.]]]]).to(device)
self.pad = nn.ReplicationPad2d(1)
def forward(self, x):
u_pad = self.pad(x)
z = F.conv2d(u_pad, self.delta)
x = self.alpha * z + (1+self.beta)*x - self.beta*x**3
return x
class Tanh(nn.Module):
def __init__(self, dt, c, r, h):
super(Tanh, self).__init__()
self.alpha = dt * c / (h ** 2)
self.beta = r*dt
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.delta = torch.Tensor([[[[0., 1., 0.], [1., -4., 1], [0., 1., 0.]]]]).to(device)
self.pad = nn.ReplicationPad2d(1)
def forward(self, x):
u_pad = self.pad(x)
z = F.conv2d(u_pad, self.delta)
x = self.alpha * z + x + self.beta*torch.tanh(x)
return x
class Sine(nn.Module):
def __init__(self, dt, c, r, h):
super(Sine, self).__init__()
self.alpha = dt * c / (h ** 2)
self.beta = r*dt
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.delta = torch.Tensor([[[[0., 1., 0.], [1., -4., 1], [0., 1., 0.]]]]).to(device)
self.pad = nn.ReplicationPad2d(1)
self.pi = torch.acos(torch.zeros(1)).item() * 2
def forward(self, x):
u_pad = self.pad(x)
z = F.conv2d(u_pad, self.delta)
x = self.alpha * z + x - self.beta*torch.sin(self.pi*x)
return x
def fdm(name="ac", dt=0, c=0, r=0, h=0):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Device Type: ", device)
if name == "he":
model = HE(dt, c, h)
elif name == "fe":
model = FE(dt, c, r, h)
elif name == "tanh":
model = Tanh(dt, c, r, h)
elif name == "sine":
model = Sine(dt, c, r, h)
else:
model = AC(dt, c, r, h) # default
return model.to(device)