def forward(self, means):
# TODO: remove this
self.save_for_backward(means)
T = self.R.shape[0]
dim = means.dim()
# Add batch axis if necessary
if dim == 2:
T_, D = means.shape
B = 1
means = means.view(B, T_, D)
else:
B, T_, D = means.shape
# Check if means has proper shape
reshaped = not (T == T_)
if not reshaped:
static_dim = means.shape[-1] // self.num_windows
reshaped_means = means.contiguous().view(
B, T, self.num_windows, -1).transpose(
1, 2).contiguous().view(B, -1, static_dim)
else:
static_dim = means.shape[-1]
reshaped_means = means
out = torch.matmul(self.R, reshaped_means)
if dim == 2:
return out.view(-1, static_dim)
return out
评论列表
文章目录