def dense_layer_bn(bottom,
name,
training,
hidden_units=512,
activation=tf.nn.relu,
weight_init='he_normal'):
'''
Shortcut for batch normalised 2D dilated convolutional layer
'''
linact = dense_layer(bottom=bottom,
name=name,
hidden_units=hidden_units,
activation=tf.identity,
weight_init=weight_init,
add_bias=False)
batchnorm = batch_normalisation_layer(linact, name + '_bn', training=training)
act = activation(batchnorm)
return act
### VARIABLE INITIALISERS ####################################################################################
评论列表
文章目录