def loss(self, class_scores, labels, images):
# polys = tf.sparse_tensor_to_dense(polys, default_value=-1)
# mask = polys >= 0
# polys = tf.boolean_mask(polys, mask)
# labels_oh = tf.one_hot(labels, self.num_classes+1)
# new_balance = tf.reduce_sum(labels_oh, axis=[0, 1])/tf.reduce_sum(labels_oh)
# class_balance = tf.Variable(tf.zeros([self.num_classes+1]),
# trainable=False, name="class_balance")
# balance = tf.assign(class_balance,
# class_balance * 0.999 + new_balance * (1 - 0.999))
# labels = tf.Print(labels, [balance], "balance", summarize=100)
labels = tf.cast(labels, tf.int64)
if self.exclude_class is not None:
m = tf.cast(tf.not_equal(labels, tf.cast(self.exclude_class, tf.int64)), tf.int64)
labels_without_exclude = labels * m
labs = tf.one_hot(labels_without_exclude, self.num_classes)
else:
labels_without_exclude = labels
labs = tf.one_hot(labels, self.num_classes + 1)
labels_without_exclude = tf.reshape(labels_without_exclude, [self.batch_size, self.height, self.width, self.num_classes])
cls_scores = tf.reshape(class_scores, [self.batch_size, self.height, self.width, self.num_classes, 2])
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels_without_exclude, logits=cls_scores, name="loss")
# loss = self.__softmax_crossentropy(class_scores, labs)
#weights = tf.gather(self.label_weights, tf.reshape(labels_without_exclude, [-1]))
weights = tf.tile([[[self.label_weights]]], [self.batch_size, self.height, self.width, 1])
weights = weights * tf.cast(labels_without_exclude, dtype=tf.float32)
backgroundweights = tf.tile([[[self.background_weights]]], [self.batch_size, self.height, self.width, 1])
weights = tf.where(tf.equal(weights, 0), tf.ones_like(weights) * backgroundweights, weights)
# weights = tf.reshape(tf.tile(tf.expand_dims(tf.reduce_max(tf.reshape(weights, [-1, self.num_classes]), axis=1), axis=1), [1, self.num_classes]), [self.batch_size, self.height, self.width, self.num_classes])
#weights = tf.Print(weights, [tf.shape(weights), weights], "weights", summarize=1024)
loss = loss * weights
if self.exclude_class is not None:
loss = tf.where(tf.equal(labels, tf.cast(self.exclude_class, tf.int64)), tf.zeros_like(loss, dtype=tf.float32), loss)#tf.boolean_mask(loss, tf.not_equal(labels, tf.cast(self.exclude_class, tf.int64)))
loss2 = tf.reduce_sum(loss)
tf.add_to_collection('losses', tf.identity(loss2,
name="losses"))
return tf.add_n(tf.get_collection('losses'), name='total_loss'), tf.nn.softmax(cls_scores)[0], labels_without_exclude[0], loss[0]
评论列表
文章目录