def lifted_struct_loss(f_a, f_p, alpha=1.0):
"""Lifted struct loss function.
Args:
f_a (~chainer.Variable): Feature vectors as anchor examples.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f_a.
Each example must be the same class for each example in f_a.
alpha (~float): The margin parameter.
Returns:
~chainer.Variable: Loss value.
See: `Deep Metric Learning via Lifted Structured Feature Embedding \
<http://www.cv-foundation.org/openaccess/content_cvpr_2016/papers/\
Song_Deep_Metric_Learning_CVPR_2016_paper.pdf>`_
"""
assert f_a.shape == f_p.shape, 'f_a and f_p must have same shape.'
n = 2 * f_a.shape[0] # use shape[0] due to len(Variable) returns its size
f = F.vstack((f_a, f_p))
D_sq = squared_distance_matrix(f)
pairs_p = np.arange(n).reshape(2, -1) # indexes of positive pairs
row = []
col = []
for i, j in pairs_p.T:
row.append([i] * (n - 2) + [j] * (n - 2))
col.append(np.tile(np.delete(np.arange(n), (i, j)), 2))
row = np.ravel(row)
col = np.ravel(col)
pairs_n = np.vstack((row, col))
distances_p = F.sqrt(D_sq[pairs_p[0], pairs_p[1]])
distances_n = F.sqrt(D_sq[pairs_n[0], pairs_n[1]])
distances_n = distances_n.reshape((n // 2, -1))
loss_ij = F.logsumexp(alpha - distances_n, axis=1) + distances_p
return F.sum(F.relu(loss_ij) ** 2) / n
lifted_struct_loss.py 文件源码
python
阅读 15
收藏 0
点赞 0
评论 0
评论列表
文章目录