def __init__(self, session, ob_dim=None, n_epochs=10, stepsize=1e-3):
"""
They provide us with an ob_dim in the code so I assume we can use it;
makes it easy to define the layers anyway. This gets constructed upon
initialization so future calls to self.fit should remember this. I
actually use the pre-processed version, though.
"""
self.n_epochs = n_epochs
self.lrate = stepsize
self.sy_ytarg = tf.placeholder(shape=[None], name="nnvf_y", dtype=tf.float32)
self.sy_ob_no = tf.placeholder(shape=[None, ob_dim+1], name="nnvf_ob", dtype=tf.float32)
self.sy_h1 = utils.lrelu(utils.dense(self.sy_ob_no, 32, "nnvf_h1", weight_init=utils.normc_initializer(1.0)), leak=0.0)
self.sy_h2 = utils.lrelu(utils.dense(self.sy_h1, 32, "nnvf_h2", weight_init=utils.normc_initializer(1.0)), leak=0.0)
self.sy_final_n = utils.dense(self.sy_h2, 1, "nnvf_final", weight_init=utils.normc_initializer(1.0))
self.sy_ypred = tf.reshape(self.sy_final_n, [-1])
self.sy_l2_error = tf.reduce_mean(tf.square(self.sy_ypred - self.sy_ytarg))
self.fit_op = tf.train.AdamOptimizer(stepsize).minimize(self.sy_l2_error)
self.sess = session
评论列表
文章目录