def convBnrelu(inputs, filters, kernel_size = 1, strides = 1, name = None):
"""
Create a Convolutional Layer + Batch Normalization + ReLU Activation
args :
inputs : (tf.Tensor) input Tensor
filters : (int) number of filters
kernel_size : (int) size of the kernel
strides : (int) Value of stride
pad : ('VALID'/'SAME')
return :
tf.Tensor
"""
with tf.name_scope(name):
kernel = tf.Variable(tf.contrib.layers.xavier_initializer(uniform=False)([kernel_size,kernel_size, inputs.get_shape().as_list()[3], filters]), name= 'weights')
conv = tf.nn.conv2d(inputs, kernel, [1,strides,strides,1], padding='VALID', data_format='NHWC')
norm = tf.contrib.layers.batch_norm(conv, 0.9, epsilon=1e-5, activation_fn = tf.nn.relu, scope = '_bn_relu')
with tf.device('/cpu:0'):
tf.summary.histogram('weights_summary', kernel, collections = ['train'])
return norm
评论列表
文章目录