def attention_weights(features, tau=10.0, num_hidden=512):
"""computing attention weights
Args:
features: [B,N,F]
Returns:
[B,N] tensor with soft attention weights for each sample
"""
B, N, F = features.get_shape().as_list()
with tf.variable_scope('attention'):
x = tf.reshape(features, [-1, F])
x = slim.fully_connected(x, num_hidden, scope='fc0')
x = slim.fully_connected(x, 1, activation_fn=None, scope='fc1')
x = tf.reshape(x, features.get_shape()[:2])
alpha = tf.reshape(slim.softmax(x / tau), [B,N,])
return alpha
volleyball_train_stage_a.py 文件源码
python
阅读 14
收藏 0
点赞 0
评论 0
评论列表
文章目录