def build_summary(self):
tf.summary.scalar('loss/reg_loss', tf.add_n(self.reg_loss))
tf.summary.scalar('loss/softmax_loss',self.softmaxloss)
tf.summary.scalar('loss/total_loss', self.total_loss)
if self.is_skip:
tf.summary.histogram('activation/image_fc2',self.image_fc2)
if not self.is_TopKloss:
tf.summary.histogram('data_similarity/imsim',tf.sign(tf.nn.relu(self.image_margin-self.im_similarity)))
tf.summary.histogram('data_similarity/sensim',tf.sign(tf.nn.relu(self.sen_margin-self.sen_similarity)))
tf.summary.scalar('msic/dneg', self.d_neg)
tf.summary.scalar('msic/dpos', self.d_pos)
for name, tensor in self.endpoint.items():
tf.summary.histogram('activation/' + name, tensor)
t_var = tf.trainable_variables()
watch_list = ['s_fc1', 's_fc2']
if not self.is_skip:
watch_list += ['i_fc1', 'i_fc2']
for watch_scope in watch_list:
watch_var = [var for var in t_var if watch_scope+'/weights' in var.name]
tf.summary.histogram('weights/'+watch_scope, watch_var[0])
Bidirectionnet_GMM9000feat_softmaxloss.py 文件源码
python
阅读 36
收藏 0
点赞 0
评论 0
评论列表
文章目录