def Encoder(hidden_size, activation=None, return_sequences=True, bidirectional=False, use_gru=True):
if activation is None:
activation = ELU()
if use_gru:
def _encoder(x):
if bidirectional:
branch_1 = GRU(int(hidden_size/2), activation='linear',
return_sequences=return_sequences, go_backwards=False)(x)
branch_2 = GRU(int(hidden_size/2), activation='linear',
return_sequences=return_sequences, go_backwards=True)(x)
x = concatenate([branch_1, branch_2])
x = activation(x)
return x
else:
x = GRU(hidden_size, activation='linear',
return_sequences=return_sequences)(x)
x = activation(x)
return x
else:
def _encoder(x):
if bidirectional:
branch_1 = LSTM(int(hidden_size/2), activation='linear',
return_sequences=return_sequences, go_backwards=False)(x)
branch_2 = LSTM(int(hidden_size/2), activation='linear',
return_sequences=return_sequences, go_backwards=True)(x)
x = concatenate([branch_1, branch_2])
x = activation(x)
return x
else:
x = LSTM(hidden_size, activation='linear',
return_sequences=return_sequences)(x)
x = activation(x)
return x
return _encoder
评论列表
文章目录