def bbox_to_attention(self, bbox):
with tf.variable_scope('ratm_bbox_to_attention'):
us = bbox[..., :2] / self.inpt_size[np.newaxis, :2]
ss = 0.5 * bbox[..., 2:] / self.inpt_size[np.newaxis, :2]
ds = bbox[..., 2:] / (self.inpt_size[np.newaxis, :2] - 1.)
att = tf.concat(axis=tf.rank(bbox) - 1, values=(us, ss, ds))
return att
评论列表
文章目录