def outActivate(aValue, logScale=False):
"""
out activate function: softmax; same dimension as aValue
"""
if logScale:
return aValue - misc.logsumexp(aValue)
else:
return np.exp(aValue) / np.sum(np.exp(aValue), axis=0)
## node a: pre-activation
评论列表
文章目录