def prelu(self):
def _prelu(_x):
orig_shape = self.shape(_x)
_x = tf.reshape(_x, [orig_shape[0], -1])
with tf.variable_scope(self.generate_name(), reuse=self._reuse):
alphas = tf.get_variable('prelu',
_x.get_shape()[-1],
initializer=tf.random_normal_initializer(mean=0.0,stddev=0.01),
dtype=tf.float32)
pos = tf.nn.relu(_x)
neg = alphas * (_x - abs(_x)) * 0.5
self.add_weights(alphas)
return tf.reshape(pos + neg, orig_shape)
return _prelu
评论列表
文章目录