style_transfer.py 文件源码

python
阅读 27 收藏 0 点赞 0 评论 0

项目:neural_style 作者: metaflow-ai 项目源码 文件源码
def inception_layer_fast(input, do, channel_axis, batchnorm_mode, nb_layers):
    # Branch 1
    out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
    out1 = Activation('relu')(out)

    # Branch 2
    out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
    out2 = Activation('relu')(out)

    # Branch 3
    out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
    out3 = Activation('relu')(out)

    # Branch 4
    out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
    out4 = Activation('relu')(out)

    m = merge([out1, out2, out3, out4], mode='concat', concat_axis=channel_axis) # 16 layers
    m = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(m)

    return m
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号