def construct(self, hidden_layer_size):
with self.session.graph.as_default():
with tf.name_scope("inputs"):
self.images = tf.placeholder(tf.float32, [None, self.WIDTH, self.HEIGHT, 1], name="images")
self.labels = tf.placeholder(tf.int64, [None], name="labels")
flattened_images = tf_layers.flatten(self.images, scope="preprocessing")
hidden_layer = tf_layers.fully_connected(flattened_images, num_outputs=hidden_layer_size, activation_fn=tf.nn.relu, scope="hidden_layer")
output_layer = tf_layers.fully_connected(hidden_layer, num_outputs=self.LABELS, activation_fn=None, scope="output_layer")
self.predictions = tf.argmax(output_layer, 1)
loss = tf_losses.sparse_softmax_cross_entropy(output_layer, self.labels, scope="loss")
self.global_step = tf.Variable(0, dtype=tf.int64, trainable=False, name="global_step")
self.training = tf.train.AdamOptimizer().minimize(loss, global_step=self.global_step)
self.accuracy = tf_metrics.accuracy(self.predictions, self.labels)
# Summaries
self.summaries = {"training": tf.merge_summary([tf.scalar_summary("train/loss", loss),
tf.scalar_summary("train/accuracy", self.accuracy)])}
for dataset in ["dev", "test"]:
self.summaries[dataset] = tf.scalar_summary(dataset+"/accuracy", self.accuracy)
# Initialize variables
self.session.run(tf.initialize_all_variables())
# Finalize graph and log it if requested
self.session.graph.finalize()
if self.summary_writer:
self.summary_writer.add_graph(self.session.graph)
评论列表
文章目录