def __iadd__(self, gradient):
"""
Add two gradient together
"""
# Tensor layer
if gradient.dV is not None: # Case for the leaf (indead, only depend of the softmax error so no tensor gradient is set)
self.dV += gradient.dV # Tensor of the RNTN layer
self.dW += gradient.dW # Regular term of the RNTN layer
self.db += gradient.db # Bias for the regular term of the RNTN layer
# Softmax (Computed in any case)
self.dWs += gradient.dWs # Softmax classifier
self.dbs += gradient.dbs # Bias of the softmax classifier
# Words
self.dL += gradient.dL # We merge the two lists (Backpropagate the dL gradient on the upper nodes)
return self
评论列表
文章目录