def __init__(self, num_units, forget_bias=1.0, activation=tf.tanh, layer_norm=False, update_bias=1.0):
"""
Initialize the stack of Skip LSTM cells
:param num_units: list of int, the number of units in each LSTM cell
:param forget_bias: float, the bias added to forget gates
:param activation: activation function of the inner states
:param layer_norm: bool, whether to use layer normalization
:param update_bias: float, initial value for the bias added to the update state gate
"""
if not isinstance(num_units, list):
num_units = [num_units]
self._num_units = num_units
self._num_layers = len(self._num_units)
self._forget_bias = forget_bias
self._activation = activation
self._layer_norm = layer_norm
self._update_bias = update_bias
skip_rnn_cells.py 文件源码
python
阅读 32
收藏 0
点赞 0
评论 0
评论列表
文章目录