def testTwoSessions(self):
optimizer = ctf.train.CplxAdamOptimizer()
g = tf.Graph()
with g.as_default():
with tf.Session():
var0 = tf.Variable(np.array([1.0+1.0j, 2.0+2.0j], dtype=np.complex64),
name="v0")
grads0 = tf.constant(np.array([0.1+0.1j, 0.1+0.1j], dtype=np.complex64))
optimizer.apply_gradients([(grads0, var0)])
gg = tf.Graph()
with gg.as_default():
with tf.Session():
var0 = tf.Variable(np.array([1.0+1.0j, 2.0+2.0j], dtype=np.complex64),
name="v0")
grads0 = tf.constant(np.array([0.1+0.1j, 0.1+0.1j], dtype=np.complex64))
# If the optimizer saves any state not keyed by graph the following line
# fails.
optimizer.apply_gradients([(grads0, var0)])
评论列表
文章目录