def tower_loss(scope, images, labels):
"""Calculate the total loss on a single tower running the BBBC006 model.
Args:
scope: unique prefix string identifying the BBBC006 tower, e.g. 'tower_0'
images: Images. 4D tensor of shape [batch_size, height, width, 3].
labels: Labels. 1D tensor of shape [batch_size].
Returns:
Tensor of shape [] containing the total loss for a batch of data
"""
# Build inference Graph.
c_fuse, s_fuse = bbbc006.inference(images)
# Build the portion of the Graph calculating the losses. Note that we will
# assemble the total_loss using a custom function below.
_ = bbbc006.loss(c_fuse, s_fuse, labels)
# Assemble all of the losses for the current tower only.
losses = tf.get_collection('losses', scope)
# Calculate the total loss for the current tower.
total_loss = tf.add_n(losses, name='total_loss')
# Attach a scalar summary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
loss_name = re.sub('%s_[0-9]*/' % bbbc006.TOWER_NAME, '', l.op.name)
tf.summary.scalar(loss_name, l)
return total_loss
评论列表
文章目录