def Train(self,
loss,
learning_rate,
clip_value_min,
clip_value_max,
name='training'):
tf.scalar_summary(':'.join([name, loss.op.name]), loss)
optimizer = tf.train.AdagradOptimizer(learning_rate)
grads_and_vars = optimizer.compute_gradients(loss)
clipped_grads_and_vars = [
(tf.clip_by_value(g, clip_value_min, clip_value_max), v)
for g, v in grads_and_vars
]
for g, v in clipped_grads_and_vars:
_ = tf.histogram_summary(':'.join([name, v.name]), v)
_ = tf.histogram_summary('%s: gradient for %s' % (name, v.name), g)
train_op = optimizer.apply_gradients(clipped_grads_and_vars)
return train_op
评论列表
文章目录