def test_transpose_basic(self):
# this should be a transposed softmax
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=0)
# test that function contains softmax and no div.
theano.function([c], p_y)
# printing.debugprint(f)
# test that function contains softmax and no div.
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
theano.function([c], T.grad(p_y.sum(), c))
finally:
config.warn.sum_div_dimshuffle_bug = backup
# printing.debugprint(g)
raise SkipTest('Optimization not enabled for the moment')
评论列表
文章目录