def inception_layer(input, do, channel_axis, batchnorm_mode):
# Branch 1
out = Convolution2D(32, 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
out1 = Activation('relu')(out)
# Branch 2
out = Convolution2D(32, 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Activation('relu')(out)
out = Convolution2D(32, 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
out2 = Activation('relu')(out)
# Branch 3
out = Convolution2D(32, 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Activation('relu')(out)
out = Convolution2D(32, 5, 5, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
out3 = Activation('relu')(out)
# Branch 4
out = Convolution2D(32, 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Activation('relu')(out)
out = Convolution2D(32, 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = Activation('relu')(out)
out = Convolution2D(32, 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.9, gamma_init='he_normal')(out)
out4 = Activation('relu')(out)
m = merge([out1, out2, out3, out4], mode='concat', concat_axis=channel_axis) # 16 layers
return m
评论列表
文章目录