def __init__(self, rng, input, n_in, n_out, dropout_rate, rescale,
W=None, b=None, b_v=0., activation=None):
"""
rescale: Boolean. Can be only used when applying dropout.
"""
if rescale:
one = T.constant(1)
retain_prob = one - dropout_rate
input /= retain_prob
super(DropoutHiddenLayer, self).__init__(
input=input, n_in=n_in, n_out=n_out, W=W, b=b,
activation=activation, rng=rng)
if dropout_rate > 0.:
self.output = dropout_from_layer(rng, self.output, p=dropout_rate)
评论列表
文章目录