def testAdaptiveGradientClip(self):
with self.test_session() as session:
x, var, loss, global_step = _setup_model()
clip_gradients = tf.contrib.layers.adaptive_clipping_fn()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer="SGD",
clip_gradients=clip_gradients)
tf.global_variables_initializer().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertAlmostEqual(var_value, 9.8916, 4)
self.assertEqual(global_step_value, 1)
var_count = 0
for var in tf.all_variables():
if var.name.startswith("OptimizeLoss/AdaptiveMaxNorm"):
var_count += 1
self.assertEqual(2, var_count)
评论列表
文章目录