def hard_sigmoid(x, name=None):
"""Hard sigmoid implementation. This is a very rough approximation
of a real sigmoid function, but is much faster to calculate.
Parameters
----------
x : Tensor
The tensor to apply the nonlinearity to.
name : str, optional
Variable scope to use.
Returns
----------
x: Tensor
Output of the nonlinearity.
"""
with tf.op_scope([x], name, 'hard_sigmoid'):
x = (0.2 * x) + 0.5
x = tf.clip_by_value(x, tf.cast(0., dtype=tf.float32),
tf.cast(1., dtype=tf.float32))
return x
评论列表
文章目录