def conv2d(self, filter_size, output_channels, stride=1, padding='SAME', activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0, bn=True, stoch=False):
"""
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['conv'] += 1
self._layer_count += 1
scope = 'conv_' + str(self.count['conv'])
if stoch is True:
clean = False
else:
clean = True
with tf.variable_scope(scope):
input_channels = self.input.get_shape()[3]
output_shape = [filter_size, filter_size, input_channels, output_channels]
w = self.weight_variable(name='weights', shape=output_shape)
self.input = tf.nn.conv2d(self.input, w, strides=[1, stride, stride, 1], padding=padding)
if bn is True:
self.input = self.conv_batch_norm(self.input, clean=clean, count=self._layer_count)
if stoch is True:
self.input = tf.random_normal(tf.shape(self.input)) + self.input
self._noisy_z_dict[self._layer_count] = self.input
if b_value is not None:
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b)
if s_value is not None:
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s)
if activation_fn is not None:
self.input = activation_fn(self.input)
self.print_log(scope + ' output: ' + str(self.input.get_shape()))
评论列表
文章目录