def imagenet(self, image_feat, reuse=False,skip=False):
if skip:
return image_feat
with tf.variable_scope('image_net', reuse=reuse) as scope:
wd = tf.contrib.layers.l2_regularizer(self.weight_decay)
image_fc1 = tf.nn.dropout(tf.contrib.layers.fully_connected(image_feat,2048, weights_regularizer=wd,scope='i_fc1'),keep_prob=self.keep_prob)
#logits1 = tf.contrib.layers.fully_connected(image_fc1, self.num_class, weights_regularizer=wd, scope='i_fc1_softmax')
logits = tf.contrib.layers.fully_connected(image_fc1, self.num_class,activation_fn=None, weights_regularizer=wd, scope='i_fc2_softmax')
#drop_fc1 = tf.nn.dropout(image_fc1, self.keep_prob, name='drop_fc1')
image_fc2 = tf.contrib.layers.fully_connected(image_fc1, 512, activation_fn=None, weights_regularizer=wd, scope='i_fc2')
image_fc2_bn = tf.contrib.layers.batch_norm(image_fc2, center=True, scale=True, is_training=self.is_training,
reuse=reuse, decay=0.999, updates_collections=None,
scope='i_fc2_bn')
embed = image_fc2_bn / tf.norm(image_fc2_bn,axis=-1,keep_dims=True)
self.endpoint['image_fc1'] = image_fc1
self.endpoint['image_fc2'] = embed
#self.endpoint['logits1'] = logits1
self.endpoint['logits'] = logits
return embed,logits
Bidirectionnet_GMM9000feat_softmaxloss.py 文件源码
python
阅读 31
收藏 0
点赞 0
评论 0
评论列表
文章目录