def _to_attention(self, raw_att, with_bias=True):
bbox = FixedStdAttention.attention_to_bbox(self, raw_att)
us = bbox[..., :2]
if with_bias:
us += self.offset_bias
ds = bbox[..., 2:4] / (self.glimpse_size[np.newaxis, :2] - 1)
ss = self._stride_to_std(ds)
ap = tf.concat(axis=tf.rank(raw_att) - 1, values=(us, ss, ds), name='attention')
ap.set_shape(raw_att.get_shape()[:-1].concatenate((6,)))
return ap
评论列表
文章目录