def get_layer(self, x_in, C_in, ty_i): # op,
n_steps = C_in.shape[0]
def __logsumexp(x, axis=None):
xmax = x.max(axis=axis, keepdims=True)
xmax_ = x.max(axis=axis)
return xmax_ + T.log(T.exp(x - xmax).sum(axis=axis))
def __step(_C, _x):
#scores = T.dot( T.dot(_x, self._params['U']) + self._params['b'], self._params['v0'])
scores = T.dot(T.nnet.sigmoid(T.dot(_x, self._params[
'U1']) + T.dot(_C, self._params['U2']) + self._params['b']), self._params['v0'])
return scores.flatten()
y_out, _ = theano.scan(
__step, sequences=C_in, non_sequences=x_in, name='classification_layer', n_steps=n_steps)
norm_y = y_out.flatten() - __logsumexp(y_out)
f_lc_debug = theano.function(
[x_in, C_in, ty_i], [y_out, norm_y, norm_y[ty_i]])
return norm_y[ty_i], T.argmax(norm_y), f_lc_debug
评论列表
文章目录