def softmax_cost(out,y, theta3, filt1, filt2):
eout = np.exp(out, dtype=np.float128)
probs = eout/sum(eout)
p = sum(y*probs)
cost = -np.log(p) ## (Only data loss. No regularised loss)
return cost,probs
## Returns gradient for all the paramaters in each iteration
评论列表
文章目录