def add_layer(inputs, in_size, out_size, activation_function=None):
# add one more layer and return the output of this layer
# ?????????? layer???? ???
with tf.name_scope('layer'):
# ??????
with tf.name_scope('weights_1'):
weights = tf.Variable(tf.random_normal([in_size, out_size]), name='W')
with tf.name_scope('biases_1'):
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1, name='b')
with tf.name_scope('wx_plus_b'):
wx_plus_b = tf.add(tf.matmul(inputs, weights), biases)
# here to dropout, ? wx_plus_b ?drop?????, keep_prob ??????drop?????? sess.run ? feed
wx_plus_b = tf.nn.dropout(wx_plus_b, keep_prob=1)
if activation_function is None:
outputs = wx_plus_b
else:
outputs = activation_function(wx_plus_b, )
return outputs
# define placeholder for inputs to network
# ?????????? inputs x?y
评论列表
文章目录