def __init__(self,
num_units,
memory,
params,
self_matching = False,
memory_len = None,
reuse=None,
kernel_initializer=None,
bias_initializer=None,
is_training = True,
use_SRU = False):
super(gated_attention_Wrapper, self).__init__(_reuse=reuse)
cell = SRUCell if use_SRU else GRUCell
self._cell = cell(num_units, is_training = is_training)
self._num_units = num_units
self._activation = math_ops.tanh
self._kernel_initializer = kernel_initializer
self._bias_initializer = bias_initializer
self._attention = memory
self._params = params
self._self_matching = self_matching
self._memory_len = memory_len
self._is_training = is_training
评论列表
文章目录