def _build_train_op(self):
self.global_step = tf.Variable(0, trainable=False)
self.loss = tf.nn.ctc_loss(labels=self.labels,
inputs=self.logits,
sequence_length=self.seq_len)
self.cost = tf.reduce_mean(self.loss)
tf.summary.scalar('cost', self.cost)
self.lrn_rate = tf.train.exponential_decay(FLAGS.initial_learning_rate,
self.global_step,
FLAGS.decay_steps,
FLAGS.decay_rate,
staircase=True)
# self.optimizer = tf.train.RMSPropOptimizer(learning_rate=self.lrn_rate,
# momentum=FLAGS.momentum).minimize(self.cost,
# global_step=self.global_step)
# self.optimizer = tf.train.MomentumOptimizer(learning_rate=self.lrn_rate,
# momentum=FLAGS.momentum,
# use_nesterov=True).minimize(self.cost,
# global_step=self.global_step)
self.optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.initial_learning_rate,
beta1=FLAGS.beta1,
beta2=FLAGS.beta2).minimize(self.loss,
global_step=self.global_step)
train_ops = [self.optimizer] + self._extra_train_ops
self.train_op = tf.group(*train_ops)
# Option 2: tf.contrib.ctc.ctc_beam_search_decoder
# (it's slower but you'll get better results)
# decoded, log_prob = tf.nn.ctc_greedy_decoder(logits, seq_len,merge_repeated=False)
self.decoded, self.log_prob = tf.nn.ctc_beam_search_decoder(self.logits,
self.seq_len,
merge_repeated=False)
self.dense_decoded = tf.sparse_tensor_to_dense(self.decoded[0], default_value=-1)
cnn_lstm_otc_ocr.py 文件源码
python
阅读 31
收藏 0
点赞 0
评论 0
评论列表
文章目录