def differentiable_backward(self, g):
if self.normalize_input:
raise NotImplementedError
if self.activation is F.leaky_relu:
g = backward_leaky_relu(self.x, g)
elif self.activation is F.relu:
g = backward_relu(self.x, g)
elif self.activation is F.tanh:
g = backward_tanh(self.x, g)
elif self.activation is F.sigmoid:
g = backward_sigmoid(self.x, g)
elif not self.activation is None:
raise NotImplementedError
if self.norm == 'ln':
g = backward_layernormalization(self.nx, g, self.n)
elif not self.norm is None:
raise NotImplementedError
if self.nn == 'down_conv' or self.nn == 'conv':
g = backward_convolution(None, g, self.c)
elif self.nn == 'linear':
g = backward_linear(None, g, self.c)
elif self.nn == 'up_deconv':
g = backward_deconvolution(None, g, self.c)
else:
raise NotImplementedError
return g
评论列表
文章目录