def forward_cpu(self, inputs):
x, t = inputs
if chainer.is_debug():
self._check_input_values(x, t)
log_y = log_softmax._log_softmax(x, self.use_cudnn)
if self.cache_score:
self.y = numpy.exp(log_y)
if self.class_weight is not None:
if self.class_weight.shape != x.shape:
shape = [1 if d != 1 else -1 for d in six.moves.range(x.ndim)]
self.class_weight = numpy.broadcast_to(
self.class_weight.reshape(shape), x.shape)
log_y *= self.class_weight
log_yd = numpy.rollaxis(log_y, 1)
log_yd = log_yd.reshape(len(log_yd), -1)
log_p = log_yd[numpy.maximum(t.ravel(), 0), numpy.arange(t.size)]
# deal with the case where the SoftmaxCrossEntropy is
# unpickled from the old version
if self.normalize:
count = (t != self.ignore_label).sum()
else:
count = len(x)
self._coeff = 1.0 / max(count, 1)
y = (log_p * (t.ravel() != self.ignore_label)).sum(keepdims=True) \
* (-self._coeff)
return y.reshape(()),
softmax_cross_entropy.py 文件源码
python
阅读 23
收藏 0
点赞 0
评论 0
评论列表
文章目录