def _bn_relu_conv(filters, kernel_size = (3, 3), strides = (1, 1)):
def f(inputs):
x = BatchNormalization()(inputs)
x = Activation('relu')(x)
x = Conv2D(filters, kernel_size, strides = strides,
kernel_initializer = init, padding = 'same')(x)
return x
return f
评论列表
文章目录