def __init__(self, output_dim, input_length=None, composer_activation='linear',
return_mode='last_output', weights=None, **kwargs):
'''
Arguments:
output_dim (int)
input_length (int)
composer_activation (str): activation used in the MLP
return_mode (str): One of last_output, all_outputs, output_and_memory
This is analogous to the return_sequences flag in Keras' Recurrent.
last_output returns only the last h_t
all_outputs returns the whole sequence of h_ts
output_and_memory returns the last output and the last memory concatenated
(needed if this layer is followed by a MMA-NSE)
weights (list): Initial weights
'''
self.output_dim = output_dim
self.input_dim = output_dim # Equation 2 in the paper makes this assumption.
self.initial_weights = weights
self.input_spec = [InputSpec(ndim=3)]
self.input_length = input_length
self.composer_activation = composer_activation
super(NSE, self).__init__(**kwargs)
self.reader = LSTM(self.output_dim, dropout_W=0.0, dropout_U=0.0, consume_less="gpu",
name="{}_reader".format(self.name))
# TODO: Let the writer use parameter dropout and any consume_less mode.
# Setting dropout to 0 here to eliminate the need for constants.
# Setting consume_less to gpu to eliminate need for preprocessing
self.writer = LSTM(self.output_dim, dropout_W=0.0, dropout_U=0.0, consume_less="gpu",
name="{}_writer".format(self.name))
self.composer = Dense(self.output_dim * 2, activation=self.composer_activation,
name="{}_composer".format(self.name))
if return_mode not in ["last_output", "all_outputs", "output_and_memory"]:
raise Exception("Unrecognized return mode: %s" % (return_mode))
self.return_mode = return_mode
评论列表
文章目录