def forward_one_step(self, x, test):
f = activations[self.activation_function]
chain = [x]
# Hidden convolutinal layers
for i in range(self.n_hidden_layers):
u = getattr(self, "layer_%i" % i)(chain[-1])
if self.apply_batchnorm:
if i == 0 and self.apply_batchnorm_to_input is False:
pass
else:
u = getattr(self, "batchnorm_%i" % i)(u, test=test)
chain.append(f(u))
if self.projection_type == "fully_connection":
u = self.projection_layer(chain[-1])
if self.apply_batchnorm:
u = self.projection_batchnorm(u, test=test)
chain.append(f(u))
elif self.projection_type == "global_average_pooling":
batch_size = chain[-1].data.shape[0]
n_maps = chain[-1].data[0].shape[0]
chain.append(F.average_pooling_2d(chain[-1], self.top_filter_size))
chain.append(F.reshape(chain[-1], (batch_size, n_maps)))
u = self.projection_layer(chain[-1])
if self.apply_batchnorm:
u = self.projection_batchnorm(u, test=test)
chain.append(f(u))
else:
raise NotImplementedError()
return chain[-1]
评论列表
文章目录