def get_output_for(self, input, deterministic=False, **kwargs):
"""
Parameters
----------
input : tensor
output from the previous layer
deterministic : bool
If true dropout and scaling is disabled, see notes
"""
if deterministic or self.p == 0:
return input
else:
# Using theano constant to prevent upcasting
one = T.constant(1)
retain_prob = one - self.p
if self.rescale:
input /= retain_prob
mask = _srng.binomial(input.shape[:2], p=retain_prob,
dtype=theano.config.floatX)
axes = [0, 1] + (['x'] * (input.ndim - 2))
mask = mask.dimshuffle(*axes)
return input * mask
评论列表
文章目录