def shared_dropout_layer(shape, use_noise, trng, value, scaled=True):
#re-scale dropout at training time, so we don't need to at test time
if scaled:
proj = tensor.switch(
use_noise,
trng.binomial(shape, p=value, n=1,
dtype='float32')/value,
theano.shared(numpy.float32(1.)))
else:
proj = tensor.switch(
use_noise,
trng.binomial(shape, p=value, n=1,
dtype='float32'),
theano.shared(numpy.float32(value)))
return proj
# feedforward layer: affine transformation + point-wise nonlinearity
评论列表
文章目录