def apply(self, is_train, x, mask=None):
if self.map_layer is not None:
x = self.map_layer.apply(is_train, x, mask)
rank = len(x.shape) - 2
if mask is not None:
shape = tf.shape(x)
mask = tf.sequence_mask(tf.reshape(mask, (-1,)), shape[-2])
mask = tf.cast(tf.reshape(mask, (shape[0], shape[1], shape[2], 1)), tf.float32)
# this min_val thing is kind of a hack, really we should do something like compute the
# min val over the entire batch, or maybe just pick a very negative values, or maybe
# do something a bit more finicky with tf.bool_mask
# In practice it doesn't seem to be problem, and some of the earlier models used these
# scheme so I have been sticking with it.
if self.min_val == 0:
x *= mask
else:
x = x * mask + self.min_val * (1 - mask)
return tf.maximum(tf.reduce_max(x, axis=rank), tf.fill([1] * (len(x.shape)-1),
float(self.min_val)))
else:
return tf.reduce_max(x, axis=rank)
评论列表
文章目录