def conv(self, inputs, num_outputs, activations, normalizer_fn = batch_norm, kernel_size=3, stride=1, scope=None):
'''Creates a convolutional layer with default arguments'''
if activations == 'relu':
activation_fn = tf.nn.relu
elif activations == 'softplus':
activation_fn = tf.nn.softplus
else:
raise ValueError("Invalid activation function.")
return conv2d( inputs = inputs,
num_outputs = num_outputs,
kernel_size = kernel_size,
stride = stride,
padding = 'SAME',
activation_fn = activation_fn,
normalizer_fn = normalizer_fn,
normalizer_params = {'is_training' : self.is_training, 'updates_collections': None, 'decay': 0.9},
scope=scope )
评论列表
文章目录