def densenet(img_input,classes_num):
def bn_relu(x):
x = BatchNormalization()(x)
x = Activation('relu')(x)
return x
def bottleneck(x):
channels = growth_rate * 4
x = bn_relu(x)
x = Conv2D(channels,kernel_size=(1,1),strides=(1,1),padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay),use_bias=False)(x)
x = bn_relu(x)
x = Conv2D(growth_rate,kernel_size=(3,3),strides=(1,1),padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay),use_bias=False)(x)
return x
def single(x):
x = bn_relu(x)
x = Conv2D(growth_rate,kernel_size=(3,3),strides=(1,1),padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay),use_bias=False)(x)
return x
def transition(x, inchannels):
outchannels = int(inchannels * compression)
x = bn_relu(x)
x = Conv2D(outchannels,kernel_size=(1,1),strides=(1,1),padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay),use_bias=False)(x)
x = AveragePooling2D((2,2), strides=(2, 2))(x)
return x, outchannels
def dense_block(x,blocks,nchannels):
concat = x
for i in range(blocks):
x = bottleneck(concat)
concat = concatenate([x,concat], axis=-1)
nchannels += growth_rate
return concat, nchannels
def dense_layer(x):
return Dense(classes_num,activation='softmax',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
nblocks = (depth - 4) // 6
nchannels = growth_rate * 2
x = Conv2D(nchannels,kernel_size=(3,3),strides=(1,1),padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay),use_bias=False)(img_input)
x, nchannels = dense_block(x,nblocks,nchannels)
x, nchannels = transition(x,nchannels)
x, nchannels = dense_block(x,nblocks,nchannels)
x, nchannels = transition(x,nchannels)
x, nchannels = dense_block(x,nblocks,nchannels)
x, nchannels = transition(x,nchannels)
x = bn_relu(x)
x = GlobalAveragePooling2D()(x)
x = dense_layer(x)
return x
评论列表
文章目录