def BN_ReLU(self, net):
"""Batch Normalization and ReLU."""
# 'gamma' is not used as the next layer is ReLU
net = batch_norm(net,
center=True,
scale=False,
activation_fn=tf.nn.relu, )
self._activation_summary(net)
return net
# def conv2d(self, net, num_ker, ker_size, stride):
# 1D-convolution
net = convolution2d(
net,
num_outputs=num_ker,
kernel_size=[ker_size, 1],
stride=[stride, 1],
padding='SAME',
activation_fn=None,
normalizer_fn=None,
weights_initializer=variance_scaling_initializer(),
weights_regularizer=l2_regularizer(self.weight_decay),
biases_initializer=tf.zeros_initializer)
return net
basic_resnet.py 文件源码
python
阅读 27
收藏 0
点赞 0
评论 0
评论列表
文章目录