def __init__(self, rng, inputVar, cfgParams, copyLayer=None, layerNum=None):
"""
Allocate a PoolLayer with shared variable internal parameters.
:type rng: numpy.random.RandomState
:param rng: a random number generator used to initialize weights
:type inputVar: theano.tensor.dtensor4
:param inputVar: symbolic image tensor, of shape image_shape
:type cfgParams: PoolLayerParams
"""
floatX = theano.config.floatX # @UndefinedVariable
outputDim = cfgParams.outputDim
poolsize = cfgParams.poolsize
inputDim = cfgParams.inputDim
activation = cfgParams.activation
poolType = cfgParams.poolType
self.cfgParams = cfgParams
self.layerNum = layerNum
self.inputVar = inputVar
if inputVar.type.ndim != 4:
raise TypeError()
self.params = []
self.weights = []
# downsample each feature map individually, using maxpooling
if poolType == 0:
# use maxpooling
pooled_out = pool_2d(input=self.inputVar, ds=poolsize, ignore_border=True)
elif poolType == 1:
# use average pooling
pooled_out = theano.sandbox.neighbours.images2neibs(ten4=self.inputVar, neib_shape=poolsize, mode='ignore_borders').mean(axis=-1)
new_shape = T.cast(T.join(0, self.inputVar.shape[:-2], T.as_tensor([self.inputVar.shape[2]//poolsize[0]]), T.as_tensor([self.inputVar.shape[3]//poolsize[1]])), 'int64')
pooled_out = T.reshape(pooled_out, new_shape, ndim=4)
elif poolType == 3:
# use subsampling and ignore border
pooled_out = self.inputVar[:, :, :(inputDim[2]//poolsize[0])*poolsize[0], :(inputDim[3]//poolsize[1])*poolsize[1]][:, :, ::poolsize[0], ::poolsize[1]]
elif poolType == -1:
# no pooling at all
pooled_out = self.inputVar
else:
raise ValueError("Unknown pool type!")
self.output = (pooled_out if activation is None
else activation(pooled_out))
self.output.name = 'output_layer_{}'.format(self.layerNum)
评论列表
文章目录