def __init__(self, rng, inputVar, cfgParams, copyLayer=None, layerNum=None):
"""
Allocate a PoolLayer with shared variable internal parameters.
:type rng: numpy.random.RandomState
:param rng: a random number generator used to initialize weights
:type inputVar: theano.tensor.dtensor4
:param inputVar: symbolic image tensor, of shape image_shape
:type cfgParams: PoolLayerParams
"""
import theano
import theano.sandbox.neighbours
import theano.tensor as T
from theano.tensor.signal.pool import pool_2d
super(PoolLayer, self).__init__(rng)
floatX = theano.config.floatX # @UndefinedVariable
outputDim = cfgParams.outputDim
poolsize = cfgParams.poolsize
inputDim = cfgParams.inputDim
activation = cfgParams.activation
poolType = cfgParams.poolType
self.cfgParams = cfgParams
self.layerNum = layerNum
self.inputVar = inputVar
if inputVar.type.ndim != 4:
raise TypeError()
self.params = []
self.weights = []
# downsample each feature map individually, using maxpooling
if poolType == 0:
# use maxpooling
pooled_out = pool_2d(input=self.inputVar, ds=poolsize, ignore_border=True, mode='max')
elif poolType == 1:
# use average pooling
pooled_out = pool_2d(input=self.inputVar, ds=poolsize, ignore_border=True, mode='average_inc_pad')
elif poolType == 3:
# use subsampling and ignore border
pooled_out = self.inputVar[:, :, :(inputDim[2]//poolsize[0])*poolsize[0], :(inputDim[3]//poolsize[1])*poolsize[1]][:, :, ::poolsize[0], ::poolsize[1]]
elif poolType == -1:
# no pooling at all
pooled_out = self.inputVar
else:
raise NotImplementedError()
self.output_pre_act = pooled_out
self.output = (pooled_out if activation is None
else activation(pooled_out))
self.output.name = 'output_layer_{}'.format(self.layerNum)
评论列表
文章目录