forked from MU94W/TFCommon
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathActivation.py
38 lines (28 loc) · 1.34 KB
/
Activation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import tensorflow as tf
from TFCommon.Initializer import gaussian_initializer, random_orthogonal_initializer
class Maxout(object):
"""Maxout activator - arXiv:1302.4389v4 [stat.ML] 20 Sep 2013
- Maxout Networks
Args:
"""
def __init__(self, units):
self._units = units
@property
def units(self):
return self._units
def __call__(self, x, scope=None):
with tf.variable_scope(scope or type(self).__name__):
# Check if the input size exist.
input_size = x.get_shape().with_rank(2)[1]
if input_size is None:
raise ValueError("Expecting input_size to be set.")
maxout_Wo = tf.get_variable(name='Wo', shape=(input_size, 2*self._units),
initializer=gaussian_initializer(mean=0.0, std=0.01))
maxout_b = tf.get_variable(name='b', shape=(2*self._units,),
initializer=tf.constant_initializer(0.0))
# 1st. Compute on all the 2 channels and reshape.
t = tf.matmul(x, maxout_Wo) + maxout_b
t = tf.reshape(t, shape=(-1, self._units, 2))
# 2nd. Do maxout op, now has shape: (None, self._units)
maxout_t = tf.reduce_max(t, axis=-1)
return maxout_t