def minibatch_update(self,x,y,lr,regularization):
n_sample = x.shape[0]
info = x
hidden_cache = []
for i in xrange(self.n_hidden + 1):
if i == self.n_hidden:
probs = softmax(info.dot(self.W[i]) + self.b[i])
else:
info = sigmoid(info.dot(self.W[i]) + self.b[i])
hidden_cache.append(info)
loss = neg_log_likelihood(probs,y)
probs[np.arange(n_sample),y] -= 1.0
errors = probs
for i in range(self.n_hidden,-1,-1):
if i >= 1:
hidden_out = hidden_cache[i - 1]
grad_hidden_out = errors.dot(self.W[i].T)
self.W[i] -= (lr * (hidden_out.T).dot(errors) + regularization * self.W[i])
self.b[i] -= lr * np.sum(errors,axis = 0)
errors = hidden_out * (1 - hidden_out) * grad_hidden_out
else:
hidden_out = x
self.W[i] -= (lr * (hidden_out.T).dot(errors) + regularization * self.W[i])
self.b[i] -= lr * np.sum(errors,axis = 0)
return loss
评论列表
文章目录