def printnorm_backward(self, input_, output):
global backward_grad
# input is a tuple of packed inputs
# output is a Variable. output.data is the Tensor we are interested
print('Inside ' + self.__class__.__name__ + ' backward')
print('')
print('input: ', type(input_))
print('input[0]: ', type(input_[0]))
print('output: ', type(output))
print('output[0]: ', type(output[0]))
print('')
print('input size:', input_[0].size())
print('output size:', len(output))
print('output[0] size:', output[0].size())
print('output norm:', output[0].data.norm())
backward_grad = input_[0].data.numpy()
# This could be useful for using the features produced by a pretrained network
# If all you care about is this feature vector, then use a Variable with volatile=True to speed up inference
评论列表
文章目录