run.py 文件源码

python
阅读 27 收藏 0 点赞 0 评论 0

项目:returnn-benchmarks 作者: rwth-i6 项目源码 文件源码
def BiRNN(x, weights, biases):

    # Prepare data shape to match `bidirectional_rnn` function requirements
    # Current data input shape: (batch_size, n_steps, n_input)
    # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)

    # Permuting batch_size and n_steps
    x = tf.transpose(x, [1, 0, 2])
    # Reshape to (n_steps*batch_size, n_input)
    x = tf.reshape(x, [-1, n_input])
    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)
    x = tf.split(0, n_steps, x)

    # Define lstm cells with tensorflow
    with tf.variable_scope("lstm1") as scope1:
        lstm_fw_cell_1 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        lstm_bw_cell_1 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        outputs_1, _, _ = rnn.bidirectional_rnn(lstm_fw_cell_1, lstm_bw_cell_1, x, dtype=tf.float32)

    with tf.variable_scope("lstm2") as scope2:
        lstm_fw_cell_2 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        lstm_bw_cell_2 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        outputs_2, _, _ = rnn.bidirectional_rnn(lstm_fw_cell_2, lstm_bw_cell_2, outputs_1, dtype=tf.float32)

    with tf.variable_scope("lstm3") as scope3:
        lstm_fw_cell_3 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        lstm_bw_cell_3 = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
        outputs_3, _, _ = rnn.bidirectional_rnn(lstm_fw_cell_3, lstm_bw_cell_3, outputs_2, dtype=tf.float32)

    outputs = outputs_3
    outputs = tf.reshape(tf.concat(0, outputs), [MAX_LEN*BATCH_SIZE,n_hidden*2])
    # Linear activation, using rnn inner loop last output
    return tf.matmul(outputs, weights['out']) + biases['out']
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号