-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathops.py
87 lines (64 loc) · 3.11 KB
/
ops.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
class Sigmoid(layers.Layer):
def __init__(self):
super(Sigmoid, self).__init__()
def call(self, inputs):
return keras.activations.sigmoid(inputs)
class Tanh(layers.Layer):
def __init__(self):
super(Tanh, self).__init__()
def call(self, inputs):
return keras.activations.tanh(inputs)
class Conv2D(layers.Layer):
def __init__(self, filters, kernel_size, strides=2):
super(Conv2D, self).__init__()
self.conv_op = layers.Conv2D(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same',
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.02),
use_bias=True,
bias_initializer=keras.initializers.Constant(value=0.0))
def call(self, inputs):
return self.conv_op(inputs)
class BatchNorm(layers.Layer):
def __init__(self, is_training=False):
super(BatchNorm, self).__init__()
self.bn = tf.keras.layers.BatchNormalization(epsilon=1e-5,
momentum=0.9,
scale=True,
trainable=is_training)
def call(self, inputs, training):
x = self.bn(inputs, training=training)
return x
class DenseLayer(layers.Layer):
def __init__(self, hidden_n, is_input=False):
super(DenseLayer, self).__init__()
self.fc_op = layers.Dense(hidden_n,
kernel_initializer=keras.initializers.RandomNormal(stddev=0.02),
bias_initializer=keras.initializers.Constant(value=0.0))
def call(self, inputs):
x = self.fc_op(inputs)
return x
class UpConv2D(layers.Layer):
def __init__(self, filters, kernel_size, strides):
super(UpConv2D, self).__init__()
self.up_conv_op = layers.Conv2DTranspose(filters,
kernel_size=kernel_size,
strides=strides,
padding='same',
kernel_initializer=keras.initializers.RandomNormal(stddev=0.02),
use_bias=True,
bias_initializer=keras.initializers.Constant(value=0.0))
def call(self, inputs):
x = self.up_conv_op(inputs)
return x
def conv_cond_concat(x, y):
"""Concatenate conditioning vector on feature map axis."""
x_shapes = tf.shape(x)
y_shapes = tf.shape(y)
y = tf.reshape(y, [-1, 1, 1, y_shapes[1]])
y_shapes = tf.shape(y)
return tf.concat([x, y*tf.ones([x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])], 3)