def _block_stem(net, endpoints, scope='Stem'):
# Stem shared by inception-v4 and inception-resnet-v2 (resnet-v1 uses simpler _stem below)
# NOTE observe endpoints of first 3 layers
with arg_scope([layers.conv2d, layers.max_pool2d, layers.avg_pool2d], padding='VALID'):
with tf.variable_scope(scope):
# 299 x 299 x 3
net = layers.conv2d(net, 32, [3, 3], stride=2, scope='Conv1_3x3/2')
endpoints[scope + '/Conv1'] = net
# 149 x 149 x 32
net = layers.conv2d(net, 32, [3, 3], scope='Conv2_3x3')
endpoints[scope + '/Conv2'] = net
# 147 x 147 x 32
net = layers.conv2d(net, 64, [3, 3], padding='SAME', scope='Conv3_3x3')
endpoints[scope + '/Conv3'] = net
# 147 x 147 x 64
with tf.variable_scope('Br1A_Pool'):
br1a = layers.max_pool2d(net, [3, 3], stride=2, scope='Pool1_3x3/2')
with tf.variable_scope('Br1B_3x3'):
br1b = layers.conv2d(net, 96, [3, 3], stride=2, scope='Conv4_3x3/2')
net = tf.concat(3, [br1a, br1b], name='Concat1')
endpoints[scope + '/Concat1'] = net
# 73 x 73 x 160
with tf.variable_scope('Br2A_3x3'):
br2a = layers.conv2d(net, 64, [1, 1], padding='SAME', scope='Conv5_1x1')
br2a = layers.conv2d(br2a, 96, [3, 3], scope='Conv6_3x3')
with tf.variable_scope('Br2B_7x7x3'):
br2b = layers.conv2d(net, 64, [1, 1], padding='SAME', scope='Conv5_1x1')
br2b = layers.conv2d(br2b, 64, [7, 1], padding='SAME', scope='Conv6_7x1')
br2b = layers.conv2d(br2b, 64, [1, 7], padding='SAME', scope='Conv7_1x7')
br2b = layers.conv2d(br2b, 96, [3, 3], scope='Conv8_3x3')
net = tf.concat(3, [br2a, br2b], name='Concat2')
endpoints[scope + '/Concat2'] = net
# 71 x 71 x 192
with tf.variable_scope('Br3A_3x3'):
br3a = layers.conv2d(net, 192, [3, 3], stride=2, scope='Conv9_3x3/2')
with tf.variable_scope('Br3B_Pool'):
br3b = layers.max_pool2d(net, [3, 3], stride=2, scope='Pool2_3x3/2')
net = tf.concat(3, [br3a, br3b], name='Concat3')
endpoints[scope + '/Concat3'] = net
print('%s output shape: %s' % (scope, net.get_shape()))
# 35x35x384
return net
build_inception_v4.py 文件源码
python
阅读 18
收藏 0
点赞 0
评论 0
评论列表
文章目录