def train(self):
learning_rate = tf.train.exponential_decay(self.learning_rate,
self.global_step,self.decay_steps,
self.decay_rate,staircase = True)
#use grad_clip to hand exploding or vanishing gradients
optimizer = tf.train.AdamOptimizer(learning_rate)
grads_and_vars = optimizer.compute_gradients(self.loss_val)
for idx ,(grad,var) in enumerate(grads_and_vars):
if grad is not None:
grads_and_vars[idx] = (tf.clip_by_norm(grad,self.grad_clip),var)
train_op = optimizer.apply_gradients(grads_and_vars, global_step = self.global_step)
return train_op
RCNNModelWithLSTM.py 文件源码
python
阅读 30
收藏 0
点赞 0
评论 0
评论列表
文章目录