def forward_cpu(self, inputs):
x, t = inputs
if chainer.is_debug():
self._check_input_values(x, t)
log_y = softmax_log(x, False)
if self.cache_score:
self.y = numpy.exp(log_y)
log_yd = numpy.rollaxis(log_y, 1)
log_yd = log_yd.reshape(len(log_yd), -1)
log_p = log_yd[numpy.maximum(t.ravel(), 0), six.moves.range(t.size)]
# deal with the case where the SoftmaxCrossEntropy is
# unpickled from the old version
if getattr(self, 'normalize', True):
count = (t != self.ignore_label).sum()
else:
count = len(x)
self._coeff = 1.0 / max(count, 1)
y = (log_p * (t.ravel() != self.ignore_label)).sum(keepdims=True) \
* (-self._coeff)
return y.reshape(()),
评论列表
文章目录