def setUp(self):
# The optimization requires the shape feature so we need to compile in
# FAST_RUN mode.
mode = theano.config.mode
if mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
self.mode = compile.mode.get_mode(mode)
python类compile()的实例源码
def test_broadcasted_dims(self):
# This test a case that caused a crash during optimization
shp = (1, 1, 1, 1)
rng = numpy.random.RandomState(utt.fetch_seed())
a = shared(rng.rand(*shp).astype(config.floatX))
out = self.max_pool_c01b(a, 1, 1, 1)
# max_pool_c01b use -inf and this will trigger DebugMode error.
mode = copy.copy(theano.compile.get_default_mode())
mode.check_isfinite = False
f = theano.function([], out, mode=mode)
f()
def test_no_shapeopt(self):
# Test that a basic example works even when ShapeOpt is excluded
X = T.matrix()
expr = X.shape[0]
mode = theano.compile.get_default_mode().excluding('ShapeOpt')
f = theano.function([X], expr, mode=mode)
print(f([[1, 2], [2, 3]]))
def test_local_remove_all_assert1(self):
# remove assert condition that are unknown
mode = theano.config.mode
if mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
mode = compile.mode.get_mode(mode).including('local_remove_all_assert')
x = T.scalar()
y = T.scalar()
f = theano.function([x, y], theano.tensor.opt.assert_op(x, y),
mode=mode)
if isinstance(mode, theano.compile.debugmode.DebugMode):
# DebugMode will run the original version with the Assert
self.assertRaises(AssertionError, f, 1, 0)
else:
f(1, 0) # Without opt, it should fail.
topo = f.maker.fgraph.toposort()
assert len(topo) == 1, topo
assert topo[0].op == deep_copy_op, topo
mode = compile.mode.get_default_mode()
a = theano.tensor.opt.assert_op(x, T.eq(x, 0).any())
f = theano.function([x], a, mode=mode.excluding('unsafe'))
topo = f.maker.fgraph.toposort()
a_op = [n for n in topo if isinstance(n.op, T.opt.Assert)]
assert len(a_op) == 1
def test_local_mul_specialize():
mode = theano.config.mode
if mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
mode = compile.mode.get_mode(mode)
mode = mode.excluding('fusion')
v = T.vector()
m = T.vector()
f = function([v], v * 1, mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
nodes == [deep_copy_op]
f = function([v], v * 0, mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [Shape_i(0), T.alloc]
f = function([v], v * (-1), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [T.neg]
f = function([v, m], v * 1 * (-m), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [T.mul]
f = function([v, m], v * 0 * (-m), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [Shape_i(0), T.alloc]
f = function([v, m], v * (-1) * (-m), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [T.mul]
f = function([v, m], v * (-1) * m, mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert nodes == [T.mul]
def test_local_useless_tile(self):
v = T.vector()
m = T.matrix()
mode = None
if theano.config.mode == "FAST_COMPILE":
mode = "FAST_RUN"
for var, data in [(v, [1, 2, 3]), (m, [[1, 2], [3, 4]])]:
# When len(repeat pattern) <= var.ndim, everything is removed
# for ndim in range(1, var.ndim):
for ndim in range(var.ndim + 1):
f = theano.function([var], tile(var, (1,) * ndim), mode=mode)
topo = f.maker.fgraph.toposort()
assert len(topo) == 1
assert isinstance(topo[0].op, compile.DeepCopyOp)
f(data)
# In this case the opt only removes nodes,
# no need to check_stack_trace
# When len(repeat pattern) > var.ndim, only a dimshuffle should be
# left, but there can be a DeepCopy as well
for ndim in range(var.ndim + 1, var.ndim + 3):
f = theano.function([var], tile(var, (1,) * ndim), mode=mode)
topo = f.maker.fgraph.toposort()
assert len(topo) <= 2
assert isinstance(topo[0].op, DimShuffle)
assert check_stack_trace(f, ops_to_check=[DimShuffle])
f(data)
def test_local_pow_specialize_device_more_aggressive_on_cpu():
mode = theano.config.mode
if mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
mode = compile.mode.get_mode(mode)
mode = mode.excluding('fusion').excluding('gpu')
v = T.vector()
val = numpy.arange(10, dtype=theano.config.floatX)
val_no0 = numpy.arange(1, 10, dtype=theano.config.floatX)
f = function([v], v ** (15), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert len(nodes) == 1
assert len(f.maker.fgraph.toposort()[0].op.scalar_op.fgraph.apply_nodes) == 6
assert isinstance(nodes[0].scalar_op, theano.scalar.Composite)
utt.assert_allclose(f(val), val ** 15)
f = function([v], v ** (-15), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert len(nodes) == 2
assert len(f.maker.fgraph.toposort()[0].op.scalar_op.fgraph.apply_nodes) == 6
assert isinstance(nodes[0].scalar_op, theano.scalar.Composite)
assert isinstance(nodes[-1].scalar_op, theano.scalar.basic.Inv)
utt.assert_allclose(f(val_no0), val_no0 ** (-15))
f = function([v], v ** (16), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert len(nodes) == 1
assert len(f.maker.fgraph.toposort()[0].op.scalar_op.fgraph.apply_nodes) == 4
assert isinstance(nodes[0].scalar_op, theano.scalar.Composite)
utt.assert_allclose(f(val), val ** 16)
f = function([v], v ** (-16), mode=mode)
nodes = [node.op for node in f.maker.fgraph.toposort()]
assert len(nodes) == 2
assert len(f.maker.fgraph.toposort()[0].op.scalar_op.fgraph.apply_nodes) == 4
assert isinstance(nodes[0].scalar_op, theano.scalar.Composite)
assert isinstance(nodes[-1].scalar_op, theano.scalar.basic.Inv)
utt.assert_allclose(f(val_no0), val_no0 ** (-16))
def test_local_useless_rebroadcast(self):
mode = theano.compile.get_default_mode().including('canonicalize')
v1 = T.vector()
v2 = T.vector()
j = T.join(0, v1, v2)
f = theano.function([v1, v2], j, mode=mode)
f([1, 2], [3, 4, 5])
e = f.maker.fgraph.toposort()
assert len([n for n in e if isinstance(n.op, T.Rebroadcast)]) == 0
assert check_stack_trace(f, ops_to_check='all')
def test_rebroadcast_rebroadcast(self):
mode = theano.compile.get_default_mode().including('canonicalize')
m = T.matrix()
s = T.addbroadcast(m, 0, 1)
v = T.unbroadcast(s, 1)
f = theano.function([m], v, mode=mode)
f([[76]])
e = f.maker.fgraph.toposort()
rebroadcast_nodes = [n for n in e if isinstance(n.op, T.Rebroadcast)]
assert len(rebroadcast_nodes) == 1
assert rebroadcast_nodes[0].op.axis == {0: True}
def setUp(self):
self.mode = theano.compile.get_default_mode().including(
'canonicalize', 'local_fill_to_alloc')
def setUp(self):
mode = theano.compile.get_default_mode()
self.mode = mode.including('local_func_inv')
def test_constant_get_stabilized():
"""
Currently Theano enable the constant_folding optimization before stabilization optimization.
This cause some stabilization optimization not being implemented and thus cause inf value to appear
when it should not.
.. note: we can't simply move the constant_folding optimization to specialize as this break other optimization!
We will need to partially duplicate some canonicalize optimzation to specialize to fix this issue.
"""
x2 = T.scalar()
y2 = T.log(1 + T.exp(x2))
mode = theano.compile.get_default_mode()
mode.check_isfinite = False
f2 = theano.function([x2], y2, mode=mode)
try:
assert len(f2.maker.fgraph.toposort()) == 1
assert f2.maker.fgraph.toposort()[0].op == \
theano.tensor.nnet.sigm.softplus
assert f2(800) == 800
x = T.as_tensor_variable(800)
y = T.log(1 + T.exp(x))
f = theano.function([], y, mode=mode)
assert len(f.maker.fgraph.toposort()) == 0
assert numpy.isinf(f())
# When this error is fixed, the following line should be ok.
assert f() == 800, f()
except AssertionError:
raise SkipTest('Theano optimizes constant before stabilization. '
'This breaks stabilization optimization in some '
'cases. See #504.')
def setUp(self):
self.mode = theano.compile.mode.get_default_mode().including(
'canonicalize', 'fast_run').excluding('gpu', 'fusion')
self.mode._optimizer.position_cutoff = 1.50001
if theano.config.cxx == '' and not theano.scalar.basic_scipy.imported_scipy_special:
raise SkipTest("erf need a c++ compiler or scipy")
def setUp(self):
self.mode = theano.compile.get_default_mode().including('canonicalize',
'specialize')
def setUp(self):
self.mode = theano.compile.get_default_mode().including(
'canonicalize',
'specialize',
'uncanonicalize', 'local_max_and_argmax')
def setUp(self):
self.mode = theano.compile.get_default_mode().including('canonicalize')
def test_0(self):
mode = theano.compile.get_default_mode().including(
'local_useless_reshape')
i = T.iscalar('i')
m = theano.tensor.mgrid[0:i,]
f = theano.function([i], m, mode=mode)
topo = f.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
def test_1(self):
x = theano.tensor.matrix('x')
r = x.reshape(x.shape)
m0 = theano.compile.get_default_mode()
m1 = m0.including('local_useless_reshape')
f1 = theano.function([x], r, mode=m1)
topo = f1.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
m2 = m1.excluding('ShapeOpt')
f2 = theano.function([x], r, mode=m2)
topo = f2.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
def test_2(self):
x = theano.tensor.matrix('x')
r = x.reshape([Shape_i(i)(x) for i in xrange(x.ndim)])
m0 = theano.compile.get_default_mode()
m1 = m0.including('local_useless_reshape')
f1 = theano.function([x], r, mode=m1)
topo = f1.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
m2 = m1.excluding('ShapeOpt')
f2 = theano.function([x], r, mode=m2)
topo = f2.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
def test_local_reshape_lift():
x = tensor.tensor4()
out = T.exp(x).reshape([x.size])
assert out.ndim == 1
mode = compile.mode.get_default_mode()
mode = mode.including('local_reshape_lift')
f = theano.function([x], out, mode=mode)
f(numpy.random.rand(5, 4, 3, 2).astype(config.floatX))
topo = f.maker.fgraph.toposort()
assert isinstance(topo[-2].op, tensor.Reshape)
assert isinstance(topo[-1].op, tensor.Elemwise)