def to_function(self):
if self.nonlinearity.lower() == "clipped_relu":
return clipped_relu()
if self.nonlinearity.lower() == "crelu":
return crelu()
if self.nonlinearity.lower() == "elu":
return elu()
if self.nonlinearity.lower() == "hard_sigmoid":
return hard_sigmoid()
if self.nonlinearity.lower() == "leaky_relu":
return leaky_relu()
if self.nonlinearity.lower() == "relu":
return relu()
if self.nonlinearity.lower() == "sigmoid":
return sigmoid()
if self.nonlinearity.lower() == "softmax":
return softmax()
if self.nonlinearity.lower() == "softplus":
return softplus()
if self.nonlinearity.lower() == "tanh":
return tanh()
if self.nonlinearity.lower() == "bst":
return bst()
raise NotImplementedError()
python类hard_sigmoid()的实例源码
def to_function(self):
if self.nonlinearity.lower() == "clipped_relu":
return clipped_relu()
if self.nonlinearity.lower() == "crelu":
return crelu()
if self.nonlinearity.lower() == "elu":
return elu()
if self.nonlinearity.lower() == "hard_sigmoid":
return hard_sigmoid()
if self.nonlinearity.lower() == "leaky_relu":
return leaky_relu()
if self.nonlinearity.lower() == "relu":
return relu()
if self.nonlinearity.lower() == "sigmoid":
return sigmoid()
if self.nonlinearity.lower() == "softmax":
return softmax()
if self.nonlinearity.lower() == "softplus":
return softplus()
if self.nonlinearity.lower() == "tanh":
return tanh()
raise NotImplementedError()
def to_function(self):
if self.nonlinearity.lower() == "clipped_relu":
return clipped_relu()
if self.nonlinearity.lower() == "crelu":
return crelu()
if self.nonlinearity.lower() == "elu":
return elu()
if self.nonlinearity.lower() == "hard_sigmoid":
return hard_sigmoid()
if self.nonlinearity.lower() == "leaky_relu":
return leaky_relu()
if self.nonlinearity.lower() == "relu":
return relu()
if self.nonlinearity.lower() == "sigmoid":
return sigmoid()
if self.nonlinearity.lower() == "softmax":
return softmax()
if self.nonlinearity.lower() == "softplus":
return softplus()
if self.nonlinearity.lower() == "tanh":
return tanh()
raise NotImplementedError()
def to_function(self):
if self.nonlinearity.lower() == "clipped_relu":
return clipped_relu()
if self.nonlinearity.lower() == "crelu":
return crelu()
if self.nonlinearity.lower() == "elu":
return elu()
if self.nonlinearity.lower() == "hard_sigmoid":
return hard_sigmoid()
if self.nonlinearity.lower() == "leaky_relu":
return leaky_relu()
if self.nonlinearity.lower() == "relu":
return relu()
if self.nonlinearity.lower() == "sigmoid":
return sigmoid()
if self.nonlinearity.lower() == "softmax":
return softmax()
if self.nonlinearity.lower() == "softplus":
return softplus()
if self.nonlinearity.lower() == "tanh":
return tanh()
raise NotImplementedError()
def ramp_loss(z):
"""
Ramp loss function.
l(z) = 1 if z <= -1
l(z) = (1-z) / 2 if -1 < z <= 1
l(z) = 0 if 1 < z
"""
return F.hard_sigmoid(- 2.5 * z)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = functions.hard_sigmoid(x)
self.assertIs(y.data.dtype, x_data.dtype)
expect = numpy.minimum(1.0, numpy.maximum(0.0, self.x * 0.2 + 0.5))
gradient_check.assert_allclose(
y.data, expect, **self.check_forward_option)
def __init__(self):
self._function = "hard_sigmoid"
pass
def __call__(self, x):
return F.hard_sigmoid(x)
def __init__(self):
self._function = "hard_sigmoid"
pass
def __call__(self, x):
return F.hard_sigmoid(x)
def __init__(self):
self._function = "hard_sigmoid"
pass
def __call__(self, x):
return F.hard_sigmoid(x)
def __init__(self):
self._function = "hard_sigmoid"
pass
def __call__(self, x):
return F.hard_sigmoid(x)