def test_Bidirectional():
rnn = recurrent.SimpleRNN
nb_sample = 2
dim = 2
timesteps = 2
output_dim = 2
for mode in ['sum', 'concat']:
x = np.random.random((nb_sample, timesteps, dim))
target_dim = 2 * output_dim if mode == 'concat' else output_dim
y = np.random.random((nb_sample, target_dim))
# test with Sequential model
model = Sequential()
model.add(wrappers.Bidirectional(rnn(output_dim),
merge_mode=mode, input_shape=(timesteps, dim)))
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, nb_epoch=1, batch_size=1)
# test config
model.get_config()
model = model_from_json(model.to_json())
model.summary()
# test stacked bidirectional layers
model = Sequential()
model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True),
merge_mode=mode, input_shape=(timesteps, dim)))
model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode))
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, nb_epoch=1, batch_size=1)
# test with functional API
input = Input((timesteps, dim))
output = wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)(input)
model = Model(input, output)
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, nb_epoch=1, batch_size=1)
# Bidirectional and stateful
input = Input(batch_shape=(1, timesteps, dim))
output = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(input)
model = Model(input, output)
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, nb_epoch=1, batch_size=1)
评论列表
文章目录