def aux_logit_layer( inputs, num_classes, is_training ):
with tf.variable_scope("pool2d"):
pooled = layers.avg_pool2d(inputs, [ 5, 5 ], stride = 3 )
with tf.variable_scope("conv11"):
conv11 = layers.conv2d( pooled, 128, [1, 1] )
with tf.variable_scope("flatten"):
flat = tf.reshape( conv11, [-1, 2048] )
with tf.variable_scope("fc"):
fc = layers.fully_connected( flat, 1024, activation_fn=None )
with tf.variable_scope("drop"):
drop = layers.dropout( fc, 0.3, is_training = is_training )
with tf.variable_scope( "linear" ):
linear = layers.fully_connected( drop, num_classes, activation_fn=None )
with tf.variable_scope("soft"):
soft = tf.nn.softmax( linear )
return soft
评论列表
文章目录