def softmax(x, gap = False):
d = x.shape
maxdist = x[:, 0]
pclass = scipy.zeros([d[0], 1], dtype = scipy.integer)
for i in range(1, d[1], 1):
l = x[:, i] > maxdist
pclass[l] = i
maxdist[l] = x[l, i]
if gap == True:
x = scipy.absolute(maxdist - x)
x[0:d[0], pclass] = x*scipy.ones([d[1], d[1]])
#gaps = pmin(x)# not sure what this means; gap is never called with True
raise ValueError('gap = True is not implemented yet')
result = dict()
if gap == True:
result['pclass'] = pclass
#result['gaps'] = gaps
raise ValueError('gap = True is not implemented yet')
else:
result['pclass'] = pclass;
return(result)
# end of softmax
# =========================================
评论列表
文章目录