def __call__(self, q_input, a_input, *args, **kwargs):
# convolve input feature maps with filters
q_conv_out = conv2d(
input=q_input,
filters=self.W,
filter_shape=self.filter_shape
)
a_conv_out = conv2d(
input=a_input,
filters=self.W,
filter_shape=self.filter_shape
)
# add the bias term. Since the bias is a vector (1D array), we first
# reshape it to a tensor of shape (1, n_filters, 1, 1). Each bias will
# thus be broadcasted across mini-batches and feature map
# width & height
if self.non_linear == "tanh":
q_conv_out_tanh = Tanh(q_conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
a_conv_out_tanh = Tanh(a_conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
q_output = pool.pool_2d(input=q_conv_out_tanh, ws=self.pool_size, ignore_border=True) # max
a_output = pool.pool_2d(input=a_conv_out_tanh, ws=self.pool_size, ignore_border=True)
elif self.non_linear == "relu":
q_conv_out_relu = ReLU(q_conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
a_conv_out_relu = ReLU(a_conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
q_output = pool.pool_2d(input=q_conv_out_relu, ws=self.pool_size, ignore_border=True)
a_output = pool.pool_2d(input=a_conv_out_relu, ws=self.pool_size, ignore_border=True)
else:
q_output = pool.pool_2d(input=q_conv_out, ws=self.pool_size, ignore_border=True)
a_output = pool.pool_2d(input=a_conv_out, ws=self.pool_size, ignore_border=True)
return q_output, a_output
评论列表
文章目录