def BN_ReLU(self, net):
# Batch Normalization and ReLU
# 'gamma' is not used as the next layer is ReLU
net = batch_norm(net,
center=True,
scale=False,
activation_fn=tf.nn.relu, )
# net = tf.nn.relu(net)
# activation summary ??
self._activation_summary(net)
return net
评论列表
文章目录