def h(self, x, train, finetune):
# First convolution layer.
h = self[0](x)
h = F.dropout(h, ratio=self.dropout, train=train)
# Residual blocks.
for i in range(1, len(self) - 2):
h = self[i](h, train, finetune)
# Batch normalization.
h = self[-2](h, test=not train, finetune=finetune)
h = F.relu(h)
# Average pooling.
h = F.max_pooling_2d(h, ksize=2, pad=0)
# Prediction layer 5.
h = self[-1](h)
h = F.reshape(h, (h.data.shape[0], 5))
return h
评论列表
文章目录