def infer(self, output_maxlen=128):
"""Build model for inference.
"""
self.input_data = tf.placeholder(tf.int32, [1, None], name='input_data')
self.input_lengths = None
def infer_helper():
return seq2seq.GreedyEmbeddingHelper(
self._output_onehot,
start_tokens=tf.fill([1], self._output_sos_id),
end_token=self._output_eos_id)
self._build_model(1, infer_helper, decoder_maxiters=output_maxlen, alignment_history=True)
# Also See
# https://groups.google.com/a/tensorflow.org/forum/#!topic/discuss/dw3Y2lnMAJc
评论列表
文章目录