def fully_connected(input_layer, num_outputs):
# In order to connect our whole W byH 2d array, we first flatten it out to
# a W times H 1D array.
flat_input = tf.reshape(input_layer, [-1])
# We then find out how long it is, and create an array for the shape of
# the multiplication weight = (WxH) by (num_outputs)
weight_shape = tf.squeeze(tf.pack([tf.shape(flat_input),[num_outputs]]))
# Initialize the weight
weight = tf.random_normal(weight_shape, stddev=0.1)
# Initialize the bias
bias = tf.random_normal(shape=[num_outputs])
# Now make the flat 1D array into a 2D array for multiplication
input_2d = tf.expand_dims(flat_input, 0)
# Multiply and add the bias
full_output = tf.add(tf.matmul(input_2d, weight), bias)
# Get rid of extra dimension
full_output_2d = tf.squeeze(full_output)
return(full_output_2d)
# Create Fully Connected Layer
implementing_different_layers.py 文件源码
python
阅读 26
收藏 0
点赞 0
评论 0
评论列表
文章目录