def hidden_layers_on(self, layer, layer_sizes):
if not isinstance(layer_sizes, list):
layer_sizes = map(int, layer_sizes.split(","))
assert len(layer_sizes) > 0
for i, size in enumerate(layer_sizes):
layer = slim.fully_connected(scope="h%d" % i,
inputs=layer,
num_outputs=size,
weights_regularizer=tf.contrib.layers.l2_regularizer(0.01),
activation_fn=tf.nn.relu)
# if opts.use_dropout:
# layer = slim.dropout(layer, is_training=IS_TRAINING, scope="do%d" % i)
return layer
评论列表
文章目录