def unit_2(in_layer, n1=64, n2=64, n3=256, p2=1, d2=1):
'''
Shortcut Unit
:param in_layer:
:return:
'''
x = Conv2D(n1, (1, 1), strides=(1, 1), padding='valid', kernel_initializer=he_uniform(), use_bias=False)(in_layer)
x = BatchNormalization(momentum=0.95)(x)
x = Activation('relu')(x)
x = ZeroPadding2D(padding=(p2, p2))(x)
x = Conv2D(n2, (3, 3), strides=(1, 1), padding='valid', dilation_rate=(d2, d2), kernel_initializer=he_uniform(), use_bias=False)(x)
x = BatchNormalization(momentum=0.95)(x)
x = Activation('relu')(x)
x = Conv2D(n3, (1, 1), strides=(1, 1), padding='valid', kernel_initializer=he_uniform(), use_bias=False)(x)
x = BatchNormalization(momentum=0.95)(x)
x = add([in_layer, x])
x = Activation('relu')(x)
return x
评论列表
文章目录