def forward_batch(self, x, mask):
"""
:param x: (batch, length, dim)
:param mask: (batch, length, )
:return: (batch, length, hidden_dim)
"""
# conv_after_length = length - kernel + 2 * padding_size + 1
new_x = x
if self.padding_size > 0:
# (padding_size + length + padding_size, dim)
new_x = temporal_padding_3d(x, (self.padding_size, self.padding_size))
# (batch, conv_after_length)
mask = temporal_padding_mask(mask, kernel_size=self.kernel_size, padding_size=self.padding_size)
elif self.padding_size == 0:
# (batch, conv_after_length)
mask = temporal_padding_mask(mask, kernel_size=self.kernel_size, padding_size=0)
else:
raise RuntimeError("Dilation Rate >= 0")
# safe_x = temporal_padding_3d(x, (0, self.kernel_size - x.shape[1]))
# safe_mask = T.ones((x.shape[0], ), dtype=theano.config.floatX).dimshuffle([0, 'x'])
# !!! convert safe_mask from col to matrix
# safe_mask = T.unbroadcast(safe_mask, 1)
# x, mask = ifelse(T.gt(self.kernel_size - x.shape[1], 0),
# (safe_x, safe_mask),
# (new_x, mask))
# (batch, conv_after_length, hidden_dim)
conv_result = self.forward_conv_batch(new_x)
# new_x = Print(new_x)
# mask = Print()(mask)
pooling_result = get_pooling_batch(conv_result, mask, self.pooling)
dropout_out = dropout_from_layer(pooling_result, self.dropout)
return self.act.activate(dropout_out + self.b)
评论列表
文章目录