def __init__(self, rng, name, is_train, x, n_in, n_out, W=None, b=None, activation=ReLU, p=0.5):
"""p is the probability of NOT dropping out a unit"""
self.name = name
self.x = x
bound = np.sqrt(6./(n_in+n_out))
if W is None:
W_values = np.asarray(
rng.uniform(
low=-bound,
high=bound,
size=(n_in, n_out)
),
dtype=theano.config.floatX)
if activation == theano.tensor.nnet.sigmoid:
W_values *= 4
W = theano.shared(value=W_values, name='W', borrow=True)
if b is None:
# b_values = np.zeros((n_out,), dtype=theano.config.floatX)
b_values = np.ones((n_out,), dtype=theano.config.floatX) * np.cast[theano.config.floatX](bound)
b = theano.shared(value=b_values, name='b', borrow=True)
self.W = W
self.b = b
lin_output= T.dot(x, self.W) + self.b
output = (
lin_output if activation is None
else activation(lin_output))
def drop(x, rng=rng, p=p):
"""p is the probability of NOT dropping out a unit"""
srng = RandomStreams(rng.randint(999999))
mask = srng.binomial(n=1, p=p, size=x.shape, dtype=theano.config.floatX)
return x * mask
train_output = drop(np.cast[theano.config.floatX](1./p) * output)
self.output = T.switch(T.neq(is_train, 0), train_output, output)
self.params = [self.W, self.b]
评论列表
文章目录