def addLayer(previousLayer, nChannels, nOutChannels, dropRate, blockNum):
bn = BatchNormalization(name = 'denseb_BatchNorm_{}'.format(blockNum) , axis = 1)(previousLayer)
relu = Activation('relu', name ='denseb_relu_{}'.format(blockNum))(bn)
conv = Convolution2D(nOutChannels, 3, 3, border_mode='same', name='denseb_conv_{}'.format(blockNum))(relu)
if dropRate is not None:
dp = Dropout(dropRate, name='denseb_dropout_{}'.format)(conv)
return merge([dp, previousLayer], mode='concat', concat_axis=1)
else:
return merge([conv, previousLayer], mode='concat', concat_axis=1)
评论列表
文章目录