def inception_layer_fast(input, do, channel_axis, batchnorm_mode, nb_layers):
# Branch 1
out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
out1 = Activation('relu')(out)
# Branch 2
out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
out2 = Activation('relu')(out)
# Branch 3
out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
out3 = Activation('relu')(out)
# Branch 4
out = Convolution2D(int(nb_layers/4), 1, 1, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(input)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = Convolution2D(int(nb_layers/4), 3, 3, dim_ordering=do,
init='he_normal', subsample=(1, 1), border_mode='same', activation='linear')(out)
out = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(out)
out4 = Activation('relu')(out)
m = merge([out1, out2, out3, out4], mode='concat', concat_axis=channel_axis) # 16 layers
m = BatchNormalization(mode=batchnorm_mode, axis=channel_axis, momentum=0.5, gamma_init='he_normal')(m)
return m
评论列表
文章目录