def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
# A different (control flow based) way to control dropout
if self.training:
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
else:
x = F.relu(F.max_pool2d(self.conv2(x), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
if self.training:
x = F.dropout(x, training=True)
x = self.fc2(x)
# Check for NaNs and infinites
nans = np.sum(np.isnan(x.data.numpy()))
infs = np.sum(np.isinf(x.data.numpy()))
if nans > 0:
print("There is {} NaN at the output layer".format(nans))
if infs > 0:
print("There is {} infinite values at the output layer".format(infs))
return F.log_softmax(x)
评论列表
文章目录