def vgg_16(inputs,
variables_collections=None,
scope='vgg_16',
reuse=None):
"""
modification of vgg_16 in TF-slim
see original code in https://github.com/tensorflow/models/blob/master/slim/nets/vgg.py
"""
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d]):
conv1 = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1', biases_initializer=None,
variables_collections=variables_collections, reuse=reuse)
pool1, argmax_1 = tf.nn.max_pool_with_argmax(conv1, [1,2,2,1], [1,2,2,1], padding='VALID', name='pool1')
conv2 = slim.repeat(pool1, 2, slim.conv2d, 128, [3, 3], scope='conv2', biases_initializer=None,
variables_collections=variables_collections, reuse=reuse)
pool2, argmax_2 = tf.nn.max_pool_with_argmax(conv2, [1,2,2,1], [1,2,2,1], padding='VALID', name='pool2')
conv3 = slim.repeat(pool2, 3, slim.conv2d, 256, [3, 3], scope='conv3', biases_initializer=None,
variables_collections=variables_collections, reuse=reuse)
pool3, argmax_3 = tf.nn.max_pool_with_argmax(conv3, [1,2,2,1], [1,2,2,1], padding='VALID', name='pool3')
conv4 = slim.repeat(pool3, 3, slim.conv2d, 512, [3, 3], scope='conv4', biases_initializer=None,
variables_collections=variables_collections, reuse=reuse)
pool4, argmax_4 = tf.nn.max_pool_with_argmax(conv4, [1,2,2,1], [1,2,2,1], padding='VALID', name='pool4')
conv5 = slim.repeat(pool4, 3, slim.conv2d, 512, [3, 3], scope='conv5', biases_initializer=None,
variables_collections=variables_collections, reuse=reuse)
pool5, argmax_5 = tf.nn.max_pool_with_argmax(conv5, [1,2,2,1], [1,2,2,1], padding='VALID', name='pool5')
# return argmax
argmax = (argmax_1, argmax_2, argmax_3, argmax_4, argmax_5)
# return feature maps
features = (conv1, conv2, conv3, conv4, conv5)
return pool5, argmax, features
评论列表
文章目录