def _conv_bn_relu(**conv_params):
"""Helper to build a conv -> BN -> relu residual unit activation function.
This is the original ResNet v1 scheme in https://arxiv.org/abs/1512.03385
"""
filters = conv_params["filters"]
kernel_size = conv_params["kernel_size"]
strides = conv_params.setdefault("strides", (1, 1))
dilation_rate = conv_params.setdefault("dilation_rate", (1, 1))
conv_name = conv_params.setdefault("conv_name", None)
bn_name = conv_params.setdefault("bn_name", None)
relu_name = conv_params.setdefault("relu_name", None)
kernel_initializer = conv_params.setdefault("kernel_initializer", "he_normal")
padding = conv_params.setdefault("padding", "same")
kernel_regularizer = conv_params.setdefault("kernel_regularizer", l2(1.e-4))
def f(x):
x = Conv2D(filters=filters, kernel_size=kernel_size,
strides=strides, padding=padding,
dilation_rate=dilation_rate,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer,
name=conv_name)(x)
return _bn_relu(x, bn_name=bn_name, relu_name=relu_name)
return f
评论列表
文章目录