def tf_model_eval_distance(sess, x, model1, model2, X_test):
"""
Compute the L1 distance between prediction of original and squeezed data.
:param sess: TF session to use when training the graph
:param x: input placeholder
:param model1: model output original predictions
:param model2: model output squeezed predictions
:param X_test: numpy array with training inputs
:return: a float vector with the distance value
"""
# Define sympbolic for accuracy
# acc_value = keras.metrics.categorical_accuracy(y, model)
l2_diff = tf.sqrt( tf.reduce_sum(tf.square(tf.sub(model1, model2)),
axis=1))
l_inf_diff = tf.reduce_max(tf.abs(tf.sub(model1, model2)), axis=1)
l1_diff = tf.reduce_sum(tf.abs(tf.sub(model1, model2)), axis=1)
l1_dist_vec = np.zeros((len(X_test)))
with sess.as_default():
# Compute number of batches
nb_batches = int(math.ceil(float(len(X_test)) / FLAGS.batch_size))
assert nb_batches * FLAGS.batch_size >= len(X_test)
for batch in range(nb_batches):
if batch % 100 == 0 and batch > 0:
print("Batch " + str(batch))
# Must not use the `batch_indices` function here, because it
# repeats some examples.
# It's acceptable to repeat during training, but not eval.
start = batch * FLAGS.batch_size
end = min(len(X_test), start + FLAGS.batch_size)
cur_batch_size = end - start
l1_dist_vec[start:end] = l1_diff.eval(feed_dict={x: X_test[start:end],keras.backend.learning_phase(): 0})
assert end >= len(X_test)
return l1_dist_vec
评论列表
文章目录