def __init__(self, inputSize, kernelSize, kernelStride, outputSize, reduceSize, pool, useBatchNorm, reduceStride=None, padding=True):
super(Inception, self).__init__()
#
self.seq_list = []
self.outputSize = outputSize
#
# 1x1 conv (reduce) -> 3x3 conv
# 1x1 conv (reduce) -> 5x5 conv
# ...
for i in range(len(kernelSize)):
od = OrderedDict()
# 1x1 conv
od['1_conv'] = Conv2d(inputSize, reduceSize[i], (1, 1),
reduceStride[i] if reduceStride is not None else 1, (0, 0))
if useBatchNorm:
od['2_bn'] = BatchNorm(reduceSize[i])
od['3_relu'] = nn.ReLU()
# nxn conv
pad = int(numpy.floor(kernelSize[i] / 2)) if padding else 0
od['4_conv'] = Conv2d(
reduceSize[i], outputSize[i], kernelSize[i], kernelStride[i], pad)
if useBatchNorm:
od['5_bn'] = BatchNorm(outputSize[i])
od['6_relu'] = nn.ReLU()
#
self.seq_list.append(nn.Sequential(od))
ii = len(kernelSize)
# pool -> 1x1 conv
od = OrderedDict()
od['1_pool'] = pool
if ii < len(reduceSize) and reduceSize[ii] is not None:
i = ii
od['2_conv'] = Conv2d(inputSize, reduceSize[i], (1, 1),
reduceStride[i] if reduceStride is not None else 1, (0, 0))
if useBatchNorm:
od['3_bn'] = BatchNorm(reduceSize[i])
od['4_relu'] = nn.ReLU()
#
self.seq_list.append(nn.Sequential(od))
ii += 1
# reduce: 1x1 conv (channel-wise pooling)
if ii < len(reduceSize) and reduceSize[ii] is not None:
i = ii
od = OrderedDict()
od['1_conv'] = Conv2d(inputSize, reduceSize[i], (1, 1),
reduceStride[i] if reduceStride is not None else 1, (0, 0))
if useBatchNorm:
od['2_bn'] = BatchNorm(reduceSize[i])
od['3_relu'] = nn.ReLU()
self.seq_list.append(nn.Sequential(od))
self.seq_list = nn.ModuleList(self.seq_list)
评论列表
文章目录