def foldr(fn, elems, initializer=None, name=None):
'''Reduce elems using fn to combine them from right to left.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[-1] in case of None)
name: A string name for the foldr node in the graph
# Returns
Same type and shape as initializer
'''
if initializer is None:
initializer = elems[-1]
elems = elems[:-1]
# We need to change the order of the arguments because theano accepts x as
# first parameter and accumulator as second
fn2 = lambda x, acc: fn(acc, x)
return theano.foldr(fn2, elems, initializer, name=name)[0]
python类foldr()的实例源码
def foldr(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from right to left.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[-1] in case of None)
name: A string name for the foldr node in the graph
# Returns
Same type and shape as initializer
"""
if initializer is None:
initializer = elems[-1]
elems = elems[:-1]
# We need to change the order of the arguments because theano accepts x as
# first parameter and accumulator as second
fn2 = lambda x, acc: fn(acc, x)
return theano.foldr(fn2, elems, initializer, name=name)[0]
def foldr(fn, elems, initializer=None, name=None):
'''Reduce elems using fn to combine them from right to left.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[-1] in case of None)
name: A string name for the foldr node in the graph
# Returns
Same type and shape as initializer
'''
if initializer is None:
initializer = elems[-1]
elems = elems[:-1]
# We need to change the order of the arguments because theano accepts x as
# first parameter and accumulator as second
fn2 = lambda x, acc: fn(acc, x)
return theano.foldr(fn2, elems, initializer, name=name)[0]
def foldr(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from right to left.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[-1] in case of None)
name: A string name for the foldr node in the graph
# Returns
Same type and shape as initializer
"""
if initializer is None:
initializer = elems[-1]
elems = elems[:-1]
# We need to change the order of the arguments because theano accepts x as
# first parameter and accumulator as second
fn2 = lambda x, acc: fn(acc, x)
return theano.foldr(fn2, elems, initializer, name=name)[0]
def foldr(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from right to left.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[-1] in case of None)
name: A string name for the foldr node in the graph
# Returns
Same type and shape as initializer
"""
if initializer is None:
initializer = elems[-1]
elems = elems[:-1]
# We need to change the order of the arguments because theano accepts x as
# first parameter and accumulator as second
fn2 = lambda x, acc: fn(acc, x)
return theano.foldr(fn2, elems, initializer, name=name)[0]
def test_foldr_memory_consumption(self):
x = theano.shared(numpy.asarray(
numpy.random.uniform(size=(10,)), dtype=theano.config.floatX))
o, _ = theano.foldr(lambda v, acc: acc + v,
x,
theano.tensor.constant(
numpy.asarray(0.,
dtype=theano.config.floatX)))
mode = theano.compile.mode.FAST_RUN
mode = mode.excluding('inplace')
f1 = theano.function([], o, mode=mode)
inputs, outputs = clone_optimized_graph(f1)
scan_nodes = grab_scan_node(outputs[0])
assert scan_nodes is not None
scan_node = scan_nodes[0]
f1 = theano.function(inputs, scan_node.inputs[2])
# Originally, the shape would have been 1 due to the SaveMem
# optimization reducing the size to the number of taps (in this case
# 1) provided to the inner function. Now, because of the memory-reuse
# feature in Scan it can be 2 because SaveMem needs to keep a
# larger buffer to avoid aliasing between the inputs and the outputs.
if theano.config.scan.allow_output_prealloc:
assert f1().shape[0] == 2
else:
assert f1().shape[0] == 1
gx = theano.tensor.grad(o, x)
f2 = theano.function([], gx)
utt.assert_allclose(f2(), numpy.ones((10,)))