def regretion_loss(outputs,target_y,batch_size=BATCHSIZE,n_steps=TIMESTEP):
target_y=tf.reshape(target_y, [-1,LONGITUDE, WIDTH])
outputs=tf.reshape(outputs,[-1,LONGITUDE, WIDTH])
losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
[tf.reshape(outputs, [-1], name='reshape_pred')],
[tf.reshape(target_y, [-1], name='reshape_target')],
[tf.ones([batch_size * n_steps], dtype=tf.float32)],
average_across_timesteps=True,
softmax_loss_function=tf.square(tf.subtract(target_y, outputs)),
name='losses')
with tf.name_scope('average_cost'):
cost = tf.div(
tf.reduce_sum(losses, name='losses_sum'),batch_size,name='average_cost')
return cost
self_coded_defs.py 文件源码
python
阅读 72
收藏 0
点赞 0
评论 0
评论列表
文章目录