def test1(self):
# basic test that the optimization work with scalar broadcasted
x = tensor.matrix('x')
y = tensor.scalar('y')
z = tensor.matrix('z')
f = function([x, y, z], tensor.exp(x + y + z)[0], mode=mode_opt)
# Check stacktrace was copied over correctly after opt was applied
self.assertTrue(check_stack_trace(f, ops_to_check=[
Subtensor, tensor.DimShuffle]))
prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor)
assert isinstance(prog[1].op, tensor.DimShuffle)
assert isinstance(prog[2].op, tensor.Subtensor)
assert isinstance(prog[3].op.scalar_op, theano.scalar.
Composite) # Composite{add,add}
assert len(prog) == 4
f([[0, 1], [2, 3]], 4, [[4, 5], [6, 7]])
# let debugmode test something
评论列表
文章目录