def add_layers(inputs, in_size, out_size, layer_name, keep_prob, activation_function=None):
# add one more layer and return the output of this layer
weights = tf.Variable(tf.random_normal([in_size, out_size]))
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
wx_plus_b = tf.matmul(inputs, weights) + biases
# here to dropout
# ? wx_plus_b ?drop?????
# keep_prob ??????drop?????? sess.run ? feed
wx_plus_b = tf.nn.dropout(wx_plus_b, keep_prob)
if activation_function is None:
outputs = wx_plus_b
else:
outputs = activation_function(wx_plus_b)
tf.histogram_summary(layer_name + '/outputs', outputs)
return outputs
评论列表
文章目录