def propdown(self, hid):
""" This function propagates the hidden units activation downwords to the visible units
:param hid: Variable Matrix(batch_size, out_channels, image_height_out, image_width_out) - given h_sample
:return: Variable Matrix(batch_size, in_channels, image_height, image_width) - probability for each visible units to be v_j = 1
"""
batch_size = hid.data.shape[0]
if self.real == 0:
W_flipped = F.swapaxes(CF.flip(self.conv.W, axes=(2, 3)), axis1=0, axis2=1)
pre_sigmoid_activation = F.convolution_2d(hid, W_flipped, self.conv.a, pad=self.ksize-1)
# F.matmul(hid, self.l.W) + F.broadcast_to(self.l.a, (batch_size, self.n_visible))
v_mean = F.sigmoid(pre_sigmoid_activation)
#print('W info ', self.conv.W.data.shape, 'W_flipped info ', W_flipped.data.shape)
#print('W info ', self.conv.W.data[3, 0, 2, 3], 'W_flipped info ', W_flipped.data[0, 3, 8, 7])
#print('W info ', self.conv.W.data[3, 0, 8, 7], 'W_flipped info ', W_flipped.data[0, 3, 2, 3])
#print('W info ', self.conv.W.data[19, 0, 4, 0], 'W_flipped info ', W_flipped.data[0, 19, 6, 10])
#print('pre_sigmoidactivation', F.sum(pre_sigmoid_activation).data)
#print('v_mean', v_mean.data.shape)
#print('v_mean sum', F.sum(v_mean).data)
#print('hid', hid.data.shape)
else:
# TODO: check
W_flipped = F.swapaxes(CF.flip(self.conv.W, axes=(2, 3)), axis1=0, axis2=1)
v_mean = F.convolution_2d(hid, W_flipped, self.conv.a, pad=self.ksize-1)
return v_mean
评论列表
文章目录