def forward(self, embeddings, softmax):
def act(x):
return F.leaky_relu(x, negative_slope=0.2, inplace=True)
x = act(self.fc1_bn(self.fc1(embeddings)))
x = add_white_noise(x, 0.005, self.training)
x = F.dropout(x, p=0.1, training=self.training)
x = self.fc2(x)
if softmax:
return F.softmax(x)
else:
return x
评论列表
文章目录