def _build_inception_v4(
inputs,
stack_counts=[4, 7, 3],
dropout_keep_prob=0.8,
num_classes=1000,
is_training=True,
scope=''):
"""Inception v4 from http://arxiv.org/abs/
Args:
inputs: a tensor of size [batch_size, height, width, channels].
dropout_keep_prob: dropout keep_prob.
num_classes: number of predicted classes.
is_training: whether is training or not.
scope: Optional scope for op_scope.
Returns:
a list containing 'logits' Tensors and a dict of Endpoints.
"""
# endpoints will collect relevant activations for external use, for example, summaries or losses.
endpoints = {}
name_scope_net = tf.name_scope(scope, 'Inception_v4', [inputs])
arg_scope_train = arg_scope([layers.batch_norm, layers.dropout], is_training=is_training)
arg_scope_conv = arg_scope([layers.conv2d, layers.max_pool2d, layers.avg_pool2d], stride=1, padding='SAME')
with name_scope_net, arg_scope_train, arg_scope_conv:
net = _block_stem(inputs, endpoints)
# 35 x 35 x 384
with tf.variable_scope('Scale1'):
net = _stack(net, endpoints, fn=_block_a, count=stack_counts[0], scope='BlockA')
# 35 x 35 x 384
with tf.variable_scope('Scale2'):
net = _block_a_reduce(net, endpoints)
# 17 x 17 x 1024
net = _stack(net, endpoints, fn=_block_b, count=stack_counts[1], scope='BlockB')
# 17 x 17 x 1024
with tf.variable_scope('Scale3'):
net = _block_b_reduce(net, endpoints)
# 8 x 8 x 1536
net = _stack(net, endpoints, fn=_block_c, count=stack_counts[2], scope='BlockC')
# 8 x 8 x 1536
logits = _block_output(net, endpoints, num_classes, dropout_keep_prob, scope='Output')
endpoints['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, endpoints
build_inception_v4.py 文件源码
python
阅读 18
收藏 0
点赞 0
评论 0
评论列表
文章目录