def ndlstm_base_dynamic(inputs, noutput, scope=None, reverse=False):
"""Run an LSTM, either forward or backward.
This is a 1D LSTM implementation using dynamic_rnn and
the TensorFlow LSTM op.
Args:
inputs: input sequence (length, batch_size, ninput)
noutput: depth of output
scope: optional scope name
reverse: run LSTM in reverse
Returns:
Output sequence (length, batch_size, noutput)
"""
with tf.variable_scope(scope, "SeqLstm", [inputs]):
# TODO(tmb) make batch size, sequence_length dynamic
# example: sequence_length = tf.shape(inputs)[0]
_, batch_size, _ = _shape(inputs)
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(noutput, state_is_tuple=False)
state = tf.zeros([batch_size, lstm_cell.state_size])
sequence_length = int(inputs.get_shape()[0])
sequence_lengths = tf.to_int64(tf.fill([batch_size], sequence_length))
if reverse:
inputs = tf.reverse_v2(inputs, [0])
outputs, _ = tf.nn.dynamic_rnn(lstm_cell,
inputs,
sequence_lengths,
state,
time_major=True)
if reverse:
outputs = tf.reverse_v2(outputs, [0])
return outputs
评论列表
文章目录