def forward_one_step(self, x, test):
f = activations[self.activation_function]
chain = [x]
# Hidden layers
for i in range(self.n_hidden_layers):
u = getattr(self, "layer_%i" % i)(chain[-1])
if self.apply_batchnorm:
if i == 0 and self.apply_batchnorm_to_input is False:
pass
else:
u = getattr(self, "batchnorm_%i" % i)(u, test=test)
output = f(u)
if self.apply_dropout:
output = F.dropout(output, train=not test)
chain.append(output)
# Output
u = getattr(self, "layer_%i" % self.n_hidden_layers)(chain[-1])
if self.apply_batchnorm:
u = getattr(self, "batchnorm_%i" % self.n_hidden_layers)(u, test=test)
chain.append(f(u))
return chain[-1]
评论列表
文章目录