def test_darn(dim_in=5, dim_h=3, dim_out=7, n_samples=13):
darn = DARN(dim_in, dim_h, dim_out, 2, h_act='T.tanh', out_act='T.nnet.sigmoid')
tparams = darn.set_tparams()
X = T.matrix('X', dtype=floatX)
H = T.matrix('H', dtype=floatX)
C = darn(H)
NLP = darn.neg_log_prob(X, C)
f = theano.function([X, H], [C, NLP])
x = np.random.randint(0, 2, size=(n_samples, dim_out)).astype(floatX)
h = np.random.randint(0, 2, size=(n_samples, dim_in)).astype(floatX)
c_t, nlp_t = f(x, h)
print c_t.shape
d_np = np.tanh(np.dot(h, darn.params['W0']) + darn.params['b0'])
c_np = np.dot(d_np, darn.params['W1']) + darn.params['b1']
assert np.allclose(c_t, c_np), (c_t, c_np)
z_np = np.zeros((n_samples, dim_out)).astype(floatX) + darn.params['bar'][None, :] + c_np
for i in xrange(dim_out):
for j in xrange(i + 1, dim_out):
z_np[:, i] += darn.params['War'][j, i] * x[:, j]
p_np = sigmoid(z_np)
p_np = np.clip(p_np, 1e-7, 1 - 1e-7)
nlp_np = (- x * np.log(p_np) - (1 - x) * np.log(1 - p_np)).sum(axis=1)
assert np.allclose(nlp_t, nlp_np), (nlp_t, nlp_np)
samples, updates_s = darn.sample(C, n_samples=n_samples-1)
f = theano.function([H], samples, updates=updates_s)
print f(h)
评论列表
文章目录