def multilayer_perceptron(x, weights, biases):
# Hidden layer with RELU activation
layer_1 = tf.add(tf.matmul(x, weights['w1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Create a summary to visualize the first layer ReLU activation
tf.summary.histogram("relu1", layer_1)
# Hidden layer with RELU activation
layer_2 = tf.add(tf.matmul(layer_1, weights['w2']), biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Create another summary to visualize the second layer ReLU activation
tf.summary.histogram("relu2", layer_2)
# Output layer
out_layer = tf.add(tf.matmul(layer_2, weights['w3']), biases['b3'])
return out_layer
# Store layers weight & bias
tensorboard_advanced.py 文件源码
python
阅读 28
收藏 0
点赞 0
评论 0
评论列表
文章目录