def test_last_dim_softmax_handles_mask_correctly(self):
batch_size = 1
length_1 = 4
length_2 = 3
num_options = 5
options_array = numpy.zeros((batch_size, length_1, length_2, num_options))
for i in range(length_1):
for j in range(length_2):
options_array[0, i, j] = [2, 4, 0, 1, 6]
mask = Variable(torch.IntTensor([[1, 1, 1, 1, 0]]))
options_tensor = Variable(torch.from_numpy(options_array).float())
softmax_tensor = util.last_dim_softmax(options_tensor, mask).data.numpy()
assert softmax_tensor.shape == (batch_size, length_1, length_2, num_options)
for i in range(length_1):
for j in range(length_2):
assert_almost_equal(softmax_tensor[0, i, j],
[0.112457, 0.830953, 0.015219, 0.041371, 0.0],
decimal=5)
评论列表
文章目录