def initilizae_layer(self, name_scope, row_size, col_size, activation_function, last_hidden):
# Bulid layer of the network with weights and biases
weights = get_scope_variable(name_scope=name_scope, var="weights",
shape=[row_size, col_size],
initializer=tf.truncated_normal_initializer(mean=0.0, stddev=1.0 / np.sqrt(
float(row_size))))
biases = get_scope_variable(name_scope=name_scope, var='biases', shape=[col_size],
initializer=tf.constant_initializer(0.0))
self.weights_all.append(weights)
self.biases_all.append(biases)
variable_summaries(weights)
variable_summaries(biases)
with tf.variable_scope(name_scope) as scope:
input = tf.matmul(last_hidden, weights) + biases
if activation_function == None:
output = input
else:
output = activation_function(input, name='output')
self.inputs.append(input)
self.hidden.append(output)
return output
评论列表
文章目录