-
Notifications
You must be signed in to change notification settings - Fork 0
/
ops.py
73 lines (56 loc) · 2.58 KB
/
ops.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import tensorflow as tf
from tensorflow.nn import relu
from tensorflow.nn import leaky_relu as lrelu
flatten = tf.layers.flatten
def deconv2d(inputs, filters,kernel_size=5, strides=2, name=None):
return tf.layers.conv2d_transpose(inputs,
filters,
kernel_size,
strides,
'SAME',
kernel_initializer=tf.glorot_normal_initializer(),
use_bias=False,
name=name)
def conv2d(inputs, filters,kernel_size=5,strides=2, name=None):
return tf.layers.conv2d(inputs,
filters,
kernel_size,
strides,
'SAME',
kernel_initializer=tf.glorot_normal_initializer(),
use_bias=False,
name=name)
'''
Do not use [tf.layers.batch_normalization]
ues [contrib.layers.batch_norm]
'''
def bat_norm(inputs,is_training,name=None):
return tf.contrib.layers.batch_norm(inputs,
decay=0.9,
updates_collections= None,
epsilon=1e-5,
scale=True,
is_training=is_training,
scope=name)
def dense(inputs, units, activation=None, name=None):
return tf.layers.dense(inputs,
units=units,
activation=activation,
kernel_initializer=tf.glorot_normal_initializer(),
use_bias = False,
name=name)
def deconv2d_layer(inputs, filters, is_training,name=None):
tensor = deconv2d(inputs,filters, name=name+"Deconv2d")
tensor = bat_norm(tensor, is_training=is_training, name=name+"bn")
tensor = relu(tensor,name=name+"relu")
return tensor
def conv2d_layer(inputs, filters, is_training, batch_norm=True, name=None):
tensor = conv2d(inputs,filters,name=name+"conv2d")
if batch_norm:
tensor = bat_norm(tensor, is_training=is_training,name=name+"bn")
tensor = lrelu(tensor,name=name+"lrelu")
return tensor
def sigmoid_cross_entropy(logits, labels):
return tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits,
labels=labels))