def tf_apply(self, x, update):
if self.name == 'elu':
x = tf.nn.elu(features=x)
elif self.name == 'none':
x = tf.identity(input=x)
elif self.name == 'relu':
x = tf.nn.relu(features=x)
if 'relu' in self.summary_labels:
non_zero = tf.cast(x=tf.count_nonzero(input_tensor=x), dtype=tf.float32)
size = tf.cast(x=tf.reduce_prod(input_tensor=tf.shape(input=x)), dtype=tf.float32)
summary = tf.summary.scalar(name='relu', tensor=(non_zero / size))
self.summaries.append(summary)
elif self.name == 'selu':
# https://arxiv.org/pdf/1706.02515.pdf
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
negative = alpha * tf.nn.elu(features=x)
x = scale * tf.where(condition=(x >= 0.0), x=x, y=negative)
elif self.name == 'sigmoid':
x = tf.sigmoid(x=x)
elif self.name == 'softmax':
x = tf.nn.softmax(logits=x)
elif self.name == 'softplus':
x = tf.nn.softplus(features=x)
elif self.name == 'tanh':
x = tf.nn.tanh(x=x)
else:
raise TensorForceError('Invalid non-linearity: {}'.format(self.name))
return x
评论列表
文章目录