def _blinear(args, args2, output_size, bias, bias_start=0.0):
'''Apply _linear ops to the two parallele layers with same
wights'''
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError(
"linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
'weight', [total_arg_size, output_size / 2], dtype=dtype)
# apply weights
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
res2 = math_ops.matmul(args2[0], weights)
else:
# ipdb.set_trace()
res = math_ops.matmul(array_ops.concat(1, args), weights)
res2 = math_ops.matmul(array_ops.concat(1, args2), weights)
if not bias:
return res, res2
# apply bias
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
biases = vs.get_variable(
'bias', [output_size] / 2,
dtype=dtype,
initializer=init_ops.constant_initializer(
bias_start, dtype=dtype))
return nn_ops.bias_add(res, biases), nn_ops.bias_add(res2, biases)
ln_lstm2.py 文件源码
python
阅读 21
收藏 0
点赞 0
评论 0
评论列表
文章目录