def __init__(self, num_units,
factor_size,
initializer=None,
num_proj=None,
forget_bias=1.0,
activation=tanh):
"""
Initializes parameters of F-LSTM cell
:param num_units: int, The number of units in the G-LSTM cell
:param initializer: (optional) The initializer to use for the weight and
projection matrices.
:param num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
:param factor_size: factorization size
:param forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training.
:param activation: Activation function of the inner states.
"""
self._num_units = num_units
self._initializer = initializer
self._num_proj = num_proj
self._forget_bias = forget_bias
self._activation = activation
self._factor_size = factor_size
assert (self._num_units > self._factor_size)
if self._num_proj:
assert (self._num_proj > self._factor_size)
if num_proj:
self._state_size = (LSTMStateTuple(num_units, num_proj))
self._output_size = num_proj
else:
self._state_size = (LSTMStateTuple(num_units, num_units))
self._output_size = num_units
评论列表
文章目录