def run_evaluation(self, master):
""" The function for running model evaluation on the master. """
while True:
with tf.Graph().as_default():
features, timestamps, time_bounds, mmsis, count = self._feature_data_reader(
utility.TEST_SPLIT, False)
objectives = self.model.build_inference_net(features,
timestamps, mmsis)
aggregate_metric_maps = [o.build_test_metrics()
for o in objectives]
summary_ops = []
update_ops = []
for names_to_values, names_to_updates in aggregate_metric_maps:
for metric_name, metric_value in names_to_values.iteritems(
):
op = tf.summary.scalar(metric_name, metric_value)
op = tf.Print(op, [metric_value], metric_name)
summary_ops.append(op)
for update_op in names_to_updates.values():
update_ops.append(update_op)
count = min(max(count, MIN_TEST_EXAMPLES), MAX_TEST_EXAMPLES)
num_evals = math.ceil(count / float(self.model.batch_size))
# Setup the global step.
slim.get_or_create_global_step()
merged_summary_ops = tf.summary.merge(summary_ops)
try:
slim.evaluation.evaluation_loop(
master,
self.checkpoint_dir,
self.eval_dir,
num_evals=num_evals,
eval_op=update_ops,
summary_op=merged_summary_ops,
eval_interval_secs=120,
timeout=20 * 60,
variables_to_restore=variables.
get_variables_to_restore())
except (tf.errors.CancelledError, tf.errors.AbortedError):
logging.warning(
'Caught cancel/abort while running `slim.learning.train`; reraising')
raise
except:
logging.exception(
'Error while running slim.evaluation.evaluation_loop, ignoring')
continue
trainer.py 文件源码
python
阅读 29
收藏 0
点赞 0
评论 0
评论列表
文章目录