def model(inputs, is_training=False):
# Multiple parallel convolutions
block1 = multi_conv(inputs, [1, 2, 3, 4, 5, 6, 7, 8],
[512, 512, 512, 512, 512, 512, 512, 512],
name='block1', is_training=is_training)
net = tf.reduce_max(block1, axis=1, name='maxpool')
# Fully connected hidden layer (dense -> batch norm -> relu -> dropout)
net = tf.layers.dense(net, 4096, kernel_regularizer=kernel_regularizer,
name='fc1')
net = tf.layers.batch_normalization(net, training=is_training,
name='fc1/batch_normalization')
net = tf.nn.relu(net, name='fc1/relu')
net = tf.layers.dropout(net, rate=0.5, training=is_training,
name='fc1/dropout')
# Fully connected output layer
net = tf.layers.dense(net, 4716, kernel_regularizer=kernel_regularizer,
name='fc2')
tf.summary.histogram('summary/fc2', tf.nn.sigmoid(net))
return net
评论列表
文章目录