def contractive_penality(self, h, linear_hid, contraction_level=0.0,
batch_size=-1):
if batch_size == -1 or batch_size == 0:
raise Exception("invalid batch size.")
grad = T.grad(h.sum(), linear_hid)
jacob = T.dot(T.sqr(grad), T.sqr(self.hidden.W.sum(axis=0)))
frob_norm_jacob = T.sum(jacob) / batch_size
contract_pen = contraction_level * frob_norm_jacob
return contract_pen
评论列表
文章目录