def batch_normalization(self, input, # other arguments are ignored
name, is_training, activation_fn=None, scale=True, eps=0.001):
output = F.batch_norm(input, self.weights[name + '/moving_mean'], self.weights[name + '/moving_variance'],
weight=self.weights[name + '/gamma'], bias=self.weights[name + '/beta'], eps=eps)
if activation_fn is not None:
if activation_fn == 'relu':
output = F.relu(output)
else:
raise NotImplementedError
return output
评论列表
文章目录