def create_model(input_shape, input_var, mask_shape, mask_var, lstm_size=250, output_classes=26,
w_init=las.init.Orthogonal()):
gate_parameters = Gate(
W_in=w_init, W_hid=w_init,
b=las.init.Constant(0.))
cell_parameters = Gate(
W_in=w_init, W_hid=w_init,
# Setting W_cell to None denotes that no cell connection will be used.
W_cell=None, b=las.init.Constant(0.),
# By convention, the cell nonlinearity is tanh in an LSTM.
nonlinearity=tanh)
l_in = InputLayer(input_shape, input_var, 'input')
l_mask = InputLayer(mask_shape, mask_var, 'mask')
f_lstm, b_lstm = create_blstm(l_in, l_mask, lstm_size, cell_parameters, gate_parameters, 'lstm')
l_sum = ElemwiseSumLayer([f_lstm, b_lstm], name='sum')
l_forward_slice1 = SliceLayer(l_sum, -1, 1, name='slice1')
# Now, we can apply feed-forward layers as usual.
# We want the network to predict a classification for the sequence,
# so we'll use a the number of classes.
l_out = DenseLayer(
l_forward_slice1, num_units=output_classes, nonlinearity=las.nonlinearities.softmax, name='output')
return l_out
评论列表
文章目录