def svhnnet(inputs, scope='svhnnet', is_training=True, reuse=False):
layers = OrderedDict()
net = inputs
with tf.variable_scope(scope, reuse=reuse):
with ExitStack() as stack:
stack.enter_context(
slim.arg_scope(
[slim.fully_connected, slim.conv2d],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(2.5e-5)))
stack.enter_context(
slim.arg_scope([slim.max_pool2d, slim.conv2d],
padding='SAME'))
net = slim.conv2d(net, 64, 5, scope='conv1')
net = slim.max_pool2d(net, 3, stride=2, scope='pool1')
layers['pool1'] = net
net = slim.conv2d(net, 64, 5, scope='conv2')
net = slim.max_pool2d(net, 3, stride=2, scope='pool2')
layers['pool2'] = net
net = slim.conv2d(net, 128, 5, scope='conv3')
layers['conv3'] = net
net = tf.contrib.layers.flatten(net)
net = slim.fully_connected(net, 3072, scope='fc4')
layers['fc4'] = net
net = slim.fully_connected(net, 2048, scope='fc5')
layers['fc5'] = net
net = slim.fully_connected(net, 10, activation_fn=None, scope='fc6')
layers['fc6'] = net
return net, layers
评论列表
文章目录