def neural_gpu_body(inputs, hparams, name=None):
"""The core Neural GPU."""
with tf.variable_scope(name, "neural_gpu"):
def step(state, inp): # pylint: disable=missing-docstring
x = tf.nn.dropout(state, 1.0 - hparams.dropout)
for layer in xrange(hparams.num_hidden_layers):
x = common_layers.conv_gru(
x, (hparams.kernel_height, hparams.kernel_width),
hparams.hidden_size,
name="cgru_%d" % layer)
# Padding input is zeroed-out in the modality, we check this by summing.
padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 0.00001)
new_state = tf.where(padding_inp, state, x) # No-op where inp is padding.
return new_state
return tf.foldl(
step,
tf.transpose(inputs, [1, 0, 2, 3]),
initializer=inputs,
parallel_iterations=1,
swap_memory=True)
python类foldl()的实例源码
def foldl(fn, elems, initializer=None, name=None):
'''Reduce elems using fn to combine them from left to right.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[0] in case of None)
name: A string name for the foldl node in the graph
# Returns
Same type and shape as initializer
'''
return tf.foldl(fn, elems, initializer=initializer, name=name)
tensorflow_backend.py 文件源码
项目:deep-learning-keras-projects
作者: jasmeetsb
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def foldl(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from left to right.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance `lambda acc, x: acc + x`
elems: tensor
initializer: The first value used (`elems[0]` in case of None)
name: A string name for the foldl node in the graph
# Returns
Same type and shape as initializer
"""
return tf.foldl(fn, elems, initializer=initializer, name=name)
def foldl(fn, elems, initializer=None, name=None):
'''Reduce elems using fn to combine them from left to right.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance lambda acc, x: acc + x
elems: tensor
initializer: The first value used (elems[0] in case of None)
name: A string name for the foldl node in the graph
# Returns
Same type and shape as initializer
'''
return tf.foldl(fn, elems, initializer=initializer, name=name)
def foldl(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from left to right.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance `lambda acc, x: acc + x`
elems: tensor
initializer: The first value used (`elems[0]` in case of None)
name: A string name for the foldl node in the graph
# Returns
Same type and shape as initializer
"""
return tf.foldl(fn, elems, initializer=initializer, name=name)
def foldl(fn, elems, initializer=None, name=None):
"""Reduce elems using fn to combine them from left to right.
# Arguments
fn: Callable that will be called upon each element in elems and an
accumulator, for instance `lambda acc, x: acc + x`
elems: tensor
initializer: The first value used (`elems[0]` in case of None)
name: A string name for the foldl node in the graph
# Returns
Tensor with same type and shape as `initializer`.
"""
return tf.foldl(fn, elems, initializer=initializer, name=name)
def _calculate_average_and_put(self, group_id, item, m):
keys = item['keys']
tf.reset_default_graph()
sess = tf.Session()
new_vars = []
m_cal_and_put = SimpleMeasurement('cal_and_put', m)
m_init = SimpleMeasurement('init', m)
init_op = tf.global_variables_initializer()
sess.run(init_op)
m_init.end_measure()
for v in item['variables']:
count = 0
name = 'average_%s' % v
ts = []
for key in keys:
raw = self.rc.get(key)
# TODO: check raw is not None
util.restore_graph(key, raw)
g = sess.graph
t = g.get_tensor_by_name('%s/%s:0' % (key, v))
ts.append(t)
count += 1
m_cal = SimpleMeasurement('cal', m)
avg = tf.foldl(tf.add, ts) / count
new_var = tf.Variable(avg, name=name)
sess.run(new_var.initializer)
sess.run(new_var)
new_vars.append(name)
m_cal.end_measure()
g = sess.graph
g_def = g.as_graph_def()
constants = graph_util.convert_variables_to_constants(
sess, g_def, new_vars)
s = constants.SerializeToString()
self.rc.set(group_id, s)
sess.close()
m_cal_and_put.end_measure()