style_transfer.py 文件源码

python
阅读 23 收藏 0 点赞 0 评论 0

项目:neural_style 作者: metaflow-ai 项目源码 文件源码
def inception_layer(input, do, channel_axis, batchnorm_mode):
    # Branch 1
    out = Convolution2D(32, 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
    out1 = Activation('relu')(out)

    # Branch 2
    out = Convolution2D(32, 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Activation('relu')(out)
    out = Convolution2D(32, 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
    out2 = Activation('relu')(out)

    # Branch 3
    out = Convolution2D(32, 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Activation('relu')(out)
    out = Convolution2D(32, 5, 5, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
    out3 = Activation('relu')(out)

    # Branch 4
    out = Convolution2D(32, 1, 1, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
    out = Activation('relu')(out)
    out = Convolution2D(32, 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = Activation('relu')(out)
    out = Convolution2D(32, 3, 3, dim_ordering=do, 
        init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
    out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
    out4 = Activation('relu')(out)

    m = merge([out1, out2, out3, out4], mode='concat', concat_axis=channel_axis) # 16 layers

    return m
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号