def _step(self, x_, m_, h_, c_):
preact= tensor.dot(h_, self.U) + _slice(x_, 0, self.hidden_dim * 5)
# i: input. f: forget. o: output. t: transform.
# j: input w\ non-linearity. k: input w\o non-linearity.
i = tensor.nnet.sigmoid(_slice(preact, 0, self.hidden_dim))
f = tensor.nnet.sigmoid(_slice(preact, 1, self.hidden_dim) + self.forget_bias)
o = tensor.nnet.sigmoid(_slice(preact, 2, self.hidden_dim))
t = tensor.nnet.sigmoid(_slice(preact, 3, self.hidden_dim))
j = tensor.tanh(_slice(preact, 4, self.hidden_dim))
k = _slice(x_, 5, self.hidden_dim)
c = f * c_ + i * j
c = m_[:, None] * c + (1. - m_)[:, None] * c_
h = t * o * tensor.tanh(c) + (1. - t) * k
if self.recurrent_dropout_layer != None:
h = self.recurrent_dropout_layer.connect(h, self.is_train)
h = m_[:, None] * h + (1. - m_)[:, None] * h_
return h, c
评论列表
文章目录