def forward(self, x):
# layer1
h = F.relu(self.conv1(x))
h = F.max_pool2d(h, 3, stride=2)
# layer2
h = F.relu(self.conv2(h))
h = F.max_pool2d(h, 3, stride=2)
# layer3-5
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.relu(self.conv5(h))
h = F.max_pool2d(h, 3, stride=2)
h = h.view(-1, 256*6*6)
# layer6-8
h = F.dropout(F.relu(self.fc6(h)), training=self.training)
h = F.dropout(F.relu(self.fc7(h)), training=self.training)
h = self.fc8(h)
return h.view(-1, self.Nj, 2)
评论列表
文章目录