vae_skipconn.py 文件源码

python
阅读 16 收藏 0 点赞 0 评论 0

项目:divcolor 作者: aditya12agd5 项目源码 文件源码
def __cond_encoder(self, scope, input_tensor, bn_is_training, keep_prob, in_nch=1, reuse=False):

        lf = self.layer_factory
        input_tensor2d = tf.reshape(input_tensor, [self.flags.batch_size, \
            self.flags.img_height, self.flags.img_width, 1])
        nch = tensor_shape.as_dimension(input_tensor2d.get_shape()[3]).value
        nout = self.flags.hidden_size

        if(reuse == False):
            W_conv1 = lf.weight_variable(name='W_conv1_cond', shape=[5, 5, nch, 128])
            W_conv2 = lf.weight_variable(name='W_conv2_cond', shape=[5, 5, 128, 256])
            W_conv3 = lf.weight_variable(name='W_conv3_cond', shape=[5, 5, 256, 512])
            W_conv4 = lf.weight_variable(name='W_conv4_cond', shape=[4, 4, 512, self.flags.hidden_size])

            b_conv1 = lf.bias_variable(name='b_conv1_cond', shape=[128])
            b_conv2 = lf.bias_variable(name='b_conv2_cond', shape=[256])
            b_conv3 = lf.bias_variable(name='b_conv3_cond', shape=[512])
            b_conv4 = lf.bias_variable(name='b_conv4_cond', shape=[self.flags.hidden_size])
        else:
            W_conv1 = lf.weight_variable(name='W_conv1_cond')
            W_conv2 = lf.weight_variable(name='W_conv2_cond')
            W_conv3 = lf.weight_variable(name='W_conv3_cond')
            W_conv4 = lf.weight_variable(name='W_conv4_cond')

            b_conv1 = lf.bias_variable(name='b_conv1_cond')
            b_conv2 = lf.bias_variable(name='b_conv2_cond')
            b_conv3 = lf.bias_variable(name='b_conv3_cond')
            b_conv4 = lf.bias_variable(name='b_conv4_cond')

        conv1 = tf.nn.relu(lf.conv2d(input_tensor2d, W_conv1, stride=2) + b_conv1)
        conv1_norm = lf.batch_norm_aiuiuc_wrapper(conv1, bn_is_training, \
                'BN1_cond', reuse_vars=reuse)

        conv2 = tf.nn.relu(lf.conv2d(conv1_norm, W_conv2, stride=2) + b_conv2)
        conv2_norm = lf.batch_norm_aiuiuc_wrapper(conv2, bn_is_training, \
                'BN2_cond', reuse_vars=reuse)

        conv3 = tf.nn.relu(lf.conv2d(conv2_norm, W_conv3, stride=2) + b_conv3)
        conv3_norm = lf.batch_norm_aiuiuc_wrapper(conv3, bn_is_training, \
                'BN3_cond', reuse_vars=reuse)

        conv4 = tf.nn.relu(lf.conv2d(conv3_norm, W_conv4, stride=2) + b_conv4)
        conv4_norm = lf.batch_norm_aiuiuc_wrapper(conv4, bn_is_training, \
                'BN4_cond', reuse_vars=reuse)

        return conv1_norm, conv2_norm, conv3_norm, conv4_norm
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号