def negativeLogLikelihoodWeighted(self, y, weightPerClass):
#Weighting the cost of the different classes in the cost-function, in order to counter class imbalance.
e1 = np.finfo(np.float32).tiny
addTinyProbMatrix = T.lt(self.p_y_given_x_train, 4*e1) * e1
weights = weightPerClass.dimshuffle('x', 0, 'x', 'x', 'x')
log_p_y_given_x_train = T.log(self.p_y_given_x_train + addTinyProbMatrix)
weighted_log_probs = log_p_y_given_x_train * weights
wShape = weighted_log_probs.shape
# Re-arrange
idx0 = T.arange( wShape[0] ).dimshuffle( 0, 'x','x','x')
idx2 = T.arange( wShape[2] ).dimshuffle('x', 0, 'x','x')
idx3 = T.arange( wShape[3] ).dimshuffle('x','x', 0, 'x')
idx4 = T.arange( wShape[4] ).dimshuffle('x','x','x', 0)
return -T.mean( weighted_log_probs[ idx0, y, idx2, idx3, idx4] )
评论列表
文章目录