def build_fully_connected_layers_with_batch_norm(the_input, shape, mode, num_previous_fully_connected_layers=0, activation_summaries=[]):
"""
a function to build the fully connected layers with batch normalization onto the computational
graph from given specifications.
shape of the format:
[num_neurons_layer_1,num_neurons_layer_2,...,num_neurons_layer_n]
"""
for index, size in enumerate(shape):
with tf.variable_scope("FC_" + str(num_previous_fully_connected_layers + index + 1)):
temp_pre_activation = tf.layers.dense(
inputs=the_input,
units=size,
use_bias=False,
kernel_initializer=layers.xavier_initializer(),
name="layer")
temp_batch_normalized = tf.layers.batch_normalization(temp_pre_activation,
training=(mode == tf.estimator.ModeKeys.TRAIN),
fused=True)
temp_layer_output = tf.nn.relu(temp_batch_normalized)
the_input = temp_layer_output
activation_summaries.append(layers.summarize_activation(temp_layer_output))
return the_input, activation_summaries
评论列表
文章目录