def __call__(self, inputs, state, scope=None):
with _checked_scope(
self, scope or "attention_based_gru_cell", reuse=self._reuse):
with vs.variable_scope("gates"):
# We start with bias of 1.0 to not reset and not update.
inputs, g_t = array_ops.split(
inputs, num_or_size_splits=[self._num_units, 1], axis=1)
reset_gate = sigmoid(_linear(
[inputs, state], self._num_units, True, 1.0))
with vs.variable_scope("candidate"):
h_tilde = self._activation(_linear(
[inputs, reset_gate * state], self._num_units, True))
new_h = g_t * h_tilde + (1 - g_t) * state
return new_h, new_h
评论列表
文章目录