def create_optimizers(gene_loss, gene_var_list,
disc_loss, disc_var_list):
# TBD: Does this global step variable need to be manually incremented? I think so.
global_step = tf.Variable(0, dtype=tf.int64, trainable=False, name='global_step')
learning_rate = tf.placeholder(dtype=tf.float32, name='learning_rate')
gene_opti = tf.train.AdamOptimizer(learning_rate=learning_rate,
beta1=FLAGS.learning_beta1,
name='gene_optimizer')
disc_opti = tf.train.AdamOptimizer(learning_rate=learning_rate,
beta1=FLAGS.learning_beta1,
name='disc_optimizer')
gene_minimize = gene_opti.minimize(gene_loss, var_list=gene_var_list, name='gene_loss_minimize', global_step=global_step)
disc_minimize = disc_opti.minimize(disc_loss, var_list=disc_var_list, name='disc_loss_minimize', global_step=global_step)
return (global_step, learning_rate, gene_minimize, disc_minimize)
评论列表
文章目录