def add_loss(self, net, task_layer_list, deploy):
""" Add the loss layers """
# concatenate layers in the order specified by task_layer_list, compute the sigmoid
lh.add_concat(net, bottom=task_layer_list, name='score'+self.postfix, axis=1)
lh.add_sigmoid(net, bottom=net['score'+self.postfix], name='prob'+self.postfix, in_place=False)
if not deploy:
if self.loss_layer == 'Sigmoid':
lh.add_sigmoid_entropy_loss(net, bottom=[net['score'+self.postfix], net[self.label_names]],
name='loss'+self.postfix, loss_weight=1.0, phase=caffe.TRAIN)
elif self.loss_layer == 'Square':
lh.add_euclidean_loss(net, bottom=[net['prob'+self.postfix], net[self.label_names]],
name='loss'+self.postfix, loss_weight=1.0, phase=caffe.TRAIN)
else:
print 'The layer type {} is not recognized!'.format(self.loss_layer)
raise
lh.add_multilabel_err_layer(net, bottom=[net['prob'+self.postfix], net[self.label_names]],
name='error'+self.postfix)
评论列表
文章目录