def forward(self, x):
h_relu = self.linear1(x).clamp(min=0)
h_relu = torch.unsqueeze(torch.unsqueeze(h_relu, 2), 3) # -> N x H x 1 x 1
h_expand = h_relu.expand(64, H, h, w).contiguous().view(64, -1) # -> N x H x h x w
y_pred = self.linear2(h_expand) # -> N x D_out
return y_pred
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
评论列表
文章目录