def addTransition(previousLayer, nChannels, nOutChannels, dropRate, blockNum):
bn = BatchNormalization(name = 'tr_BatchNorm_{}'.format(blockNum), axis = 1)(previousLayer)
relu = Activation('relu', name ='tr_relu_{}'.format(blockNum))(bn)
conv = Convolution2D(nOutChannels, 1, 1, border_mode='same', name='tr_conv_{}'.format(blockNum))(relu)
if dropRate is not None:
dp = Dropout(dropRate, name='tr_dropout_{}'.format)(conv)
avgPool = AveragePooling2D(pool_size=(2, 2))(dp)
else:
avgPool = AveragePooling2D(pool_size=(2, 2))(conv)
return avgPool
评论列表
文章目录