python类average_pooling_2d()的实例源码

googlenetbn.py 文件源码 项目:chainer-deconv 作者: germanRos 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def __call__(self, x, t):
        self.clear()
        test = not self.train

        h = F.max_pooling_2d(
            F.relu(self.norm1(self.conv1(x), test=test)),  3, stride=2, pad=1)
        h = F.max_pooling_2d(
            F.relu(self.norm2(self.conv2(h), test=test)), 3, stride=2, pad=1)

        h = self.inc3a(h)
        h = self.inc3b(h)
        h = self.inc3c(h)
        h = self.inc4a(h)

        a = F.average_pooling_2d(h, 5, stride=3)
        a = F.relu(self.norma(self.conva(a), test=test))
        a = F.relu(self.norma2(self.lina(a), test=test))
        a = self.outa(a)
        self.loss1 = F.softmax_cross_entropy(a, t)

        h = self.inc4b(h)
        h = self.inc4c(h)
        h = self.inc4d(h)

        b = F.average_pooling_2d(h, 5, stride=3)
        b = F.relu(self.normb(self.convb(b), test=test))
        b = F.relu(self.normb2(self.linb(b), test=test))
        b = self.outb(b)
        self.loss2 = F.softmax_cross_entropy(b, t)

        h = self.inc4e(h)
        h = self.inc5a(h)
        h = F.average_pooling_2d(self.inc5b(h), 7)
        h = self.out(h)
        self.loss3 = F.softmax_cross_entropy(h, t)

        self.loss = 0.3 * (self.loss1 + self.loss2) + self.loss3
        self.accuracy = F.accuracy(h, t)
        return self.loss
test_average_pooling_2d.py 文件源码 项目:chainer-deconv 作者: germanRos 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def forward(self):
        x = chainer.Variable(self.x)
        return functions.average_pooling_2d(
            x, 3, stride=2, pad=1, use_cudnn=self.use_cudnn)
modified_googlenet.py 文件源码 项目:deep_metric_learning 作者: ronekko 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def __call__(self, x, subtract_mean=True):
        if subtract_mean:
            x = x - self._image_mean
#        h = super(ModifiedGoogLeNet, self).__call__(
#            x, layers=['pool5'], train=train)['pool5']
#        h = self.bn_fc(h, test=not train)
#        y = self.fc(h)
#        return y
        h = F.relu(self.conv1(x))
        h = F.max_pooling_2d(h, 3, stride=2)
        h = F.local_response_normalization(h, n=5, k=1, alpha=1e-4/5)
        h = F.relu(self.conv2_reduce(h))
        h = F.relu(self.conv2(h))
        h = F.local_response_normalization(h, n=5, k=1, alpha=1e-4/5)
        h = F.max_pooling_2d(h, 3, stride=2)
        h = self.inc3a(h)
        h = self.inc3b(h)
        h = F.max_pooling_2d(h, 3, stride=2)
        h = self.inc4a(h)
        h = self.inc4b(h)
        h = self.inc4c(h)
        h = self.inc4d(h)
        h = self.inc4e(h)
        h = F.max_pooling_2d(h, 3, stride=2)
        h = self.inc5a(h)
        h = self.inc5b(h)
        h = F.average_pooling_2d(h, 7, stride=1)
        h = self.bn_fc(h)
        y = self.fc(h)
        if self.normalize_output:
            y = F.normalize(y)
        return y
net3.py 文件源码 项目:GUINNESS 作者: HirokiNakahara 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def __call__(self, x, train):
        h = bst.bst(self.b0(self.conv0(x)))
        h = bst.bst(self.b1(self.conv1(h)))
        h = bst.bst(self.b2(self.conv2(h)))
        h = F.max_pooling_2d(h, 2)
        h = F.average_pooling_2d(h, 32)
        h = self.b3(self.fc0(h))
        return h
net2.py 文件源码 项目:GUINNESS 作者: HirokiNakahara 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def __call__(self, x, train):
        h = bst.bst(self.b0(self.conv0(x)))
        h = bst.bst(self.b1(self.conv1(h)))
        h = bst.bst(self.b2(self.conv2(h)))
        h = F.max_pooling_2d(h, 2)
        h = F.average_pooling_2d(h, 24)
        h = self.b3(self.fc0(h))
        return h
pyramidal_residual_networks.py 文件源码 项目:pyramidal_residual_networks 作者: nutszebra 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def maybe_pooling(self, x):
        if 2 in self.strides:
            return F.average_pooling_2d(x, 1, 2, 0)
        return x
pyramidal_residual_networks.py 文件源码 项目:pyramidal_residual_networks 作者: nutszebra 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def __call__(self, x, train=False):
        h = self.conv1(x, train=train)
        for i in six.moves.range(len(self.strides)):
            for ii in six.moves.range(len(self.strides[i])):
                name = 'res_block{}_{}'.format(i, ii)
                h = self[name](h, train=train)
        batch, channels, height, width = h.data.shape
        h = F.reshape(F.average_pooling_2d(h, (height, width)), (batch, channels, 1, 1))
        return F.reshape(self.linear(h, train=train), (batch, self.category_num))
function.py 文件源码 项目:ddnn 作者: kunglab 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def __init__(self, ksize, stride=None, pad=0, use_cudnn=True):
        self._function = "average_pooling_2d"
        self.ksize = ksize
        self.stride = stride
        self.pad = pad
        self.use_cudnn = use_cudnn
function.py 文件源码 项目:ddnn 作者: kunglab 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def __call__(self, x):
        return F.average_pooling_2d(x, self.ksize, self.stride, self.pad, self.use_cudnn)
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def __call__(self, x):
        skip = False
        if chainer.config.train and self.skip_ratio > 0 and np.random.rand() < self.skip_ratio:
            skip = True
        sh, sw = self.conv1.stride
        c_out, c_in, kh, kw = self.conv1.W.data.shape
        b, c, hh, ww = x.data.shape
        if sh == 1 and sw == 1:
            shape_out = (b, c_out, hh, ww)
        else:
            hh = (hh + 2 - kh) // sh + 1
            ww = (ww + 2 - kw) // sw + 1
            shape_out = (b, c_out, hh, ww)
        h = x
        if x.data.shape != shape_out:
            xp = chainer.cuda.get_array_module(x.data)
            n, c, hh, ww = x.data.shape
            pad_c = shape_out[1] - c
            p = xp.zeros((n, pad_c, hh, ww), dtype=xp.float32)
            p = chainer.Variable(p)
            x = F.concat((p, x))
            if x.data.shape[2:] != shape_out[2:]:
                x = F.average_pooling_2d(x, 1, 2)
        if skip:
            return x
        h = self.bn1(self.conv1(h))
        if self.activation1 is not None:
            h = self.activation1(h)
        h = self.bn2(self.conv2(h))
        if not chainer.config.train:
            h = h * (1 - self.skip_ratio)
        if self.swapout:
            h = F.dropout(h) + F.dropout(x)
        else:
            h = h + x
        if self.activation2 is not None:
            return self.activation2(h)
        else:
            return h
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def __call__(self, x):
        skip = False
        if chainer.config.train and self.skip_ratio > 0 and np.random.rand() < self.skip_ratio:
            skip = True
        sh, sw = self.conv1.stride
        c_out, c_in, kh, kw = self.conv1.W.data.shape
        b, c, hh, ww = x.data.shape
        if sh == 1 and sw == 1:
            shape_out = (b, c_out, hh, ww)
        else:
            hh = (hh + 2 - kh) // sh + 1
            ww = (ww + 2 - kw) // sw + 1
            shape_out = (b, c_out, hh, ww)
        h = x
        if x.data.shape != shape_out:
            xp = chainer.cuda.get_array_module(x.data)
            n, c, hh, ww = x.data.shape
            pad_c = shape_out[1] - c
            p = xp.zeros((n, pad_c, hh, ww), dtype=xp.float32)
            p = chainer.Variable(p)
            x = F.concat((p, x))
            if x.data.shape[2:] != shape_out[2:]:
                x = F.average_pooling_2d(x, 1, 2)
        if skip:
            return x
        h = self.bn1(h)
        if self.activation1 is not None:
            h = self.activation1(h)
        h = self.conv1(h)
        h = self.bn2(h)
        if self.activation2 is not None:
            h = self.activation2(h)
        h = self.conv2(h)
        if not chainer.config.train:
            h = h * (1 - self.skip_ratio)
        if self.swapout:
            return F.dropout(h) + F.dropout(x)
        else:
            return h + x
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def __call__(self, x):
        xp = chainer.cuda.get_array_module(x.data)
        skip = False
        if chainer.config.train and self.skip_ratio > 0 and np.random.rand() < self.skip_ratio:
            skip = True
        sh, sw = self.conv1.stride
        c_out, c_in, kh, kw = self.conv1.W.data.shape
        b, c, hh, ww = x.data.shape
        if sh == 1 and sw == 1:
            shape_out = (b, c_out, hh, ww)
        else:
            hh = (hh + 2 - kh) // sh + 1
            ww = (ww + 2 - kw) // sw + 1
            shape_out = (b, c_out, hh, ww)
        h = x
        if x.data.shape[2:] != shape_out[2:]:
            x = F.average_pooling_2d(x, 1, 2)
        if x.data.shape[1] != c_out:
            n, c, hh, ww = x.data.shape
            pad_c = c_out - c
            p = xp.zeros((n, pad_c, hh, ww), dtype=xp.float32)
            p = chainer.Variable(p)
            x = F.concat((x, p), axis=1)
        if skip:
            return x
        h = self.bn1(h)
        h = self.conv1(h)
        h = self.bn2(h)
        if self.activation is not None:
            h = self.activation(h)
        h = self.conv2(h)
        h = self.bn3(h)
        if self.skip_ratio > 0 and not chainer.config.train:
            h = h * (1 - self.skip_ratio)
        return h + x
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def __call__(self, x):
        h = self.bconv1_1(x)
        h = self.bconv1_2(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = self.bconv2_1(h)
        h = self.bconv2_2(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = self.bconv3_1(h)
        h = self.bconv3_2(h)
        h = self.bconv3_3(h)
        h = self.bconv3_4(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = F.average_pooling_2d(h, 4, 1, 0)
        h = self.fc(F.dropout(h))
        return h
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def __call__(self, x):
        h = self.bconv1_1(x)
        h = F.dropout(h, 0.25)
        h = self.bconv1_2(h)
        h = F.dropout(h, 0.25)
        h = self.bconv1_3(h)
        h = F.dropout(h, 0.25)
        h = self.bconv1_4(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = self.bconv2_1(h)
        h = F.dropout(h, 0.25)
        h = self.bconv2_2(h)
        h = F.dropout(h, 0.25)
        h = self.bconv2_3(h)
        h = F.dropout(h, 0.25)
        h = self.bconv2_4(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = self.bconv3_1(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_2(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_3(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_4(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_5(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_6(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_7(h)
        h = F.dropout(h, 0.25)
        h = self.bconv3_8(h)
        h = F.dropout(F.max_pooling_2d(h, 2), 0.25)
        h = F.average_pooling_2d(h, 4, 1, 0)
        h = self.fc(F.dropout(h))
        return h
net.py 文件源码 项目:chainer-cifar 作者: dsanno 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def __call__(self, x):
        sh, sw = self.conv1_1.stride
        c_out, c_in, kh, kw = self.conv1_1.W.data.shape
        b, c, hh, ww = x.data.shape
        if sh == 1 and sw == 1:
            shape_out = (b, c_out, hh, ww)
        else:
            hh = (hh + 2 - kh) // sh + 1
            ww = (ww + 2 - kw) // sw + 1
            shape_out = (b, c_out, hh, ww)
        h = x
        if x.data.shape != shape_out:
            xp = chainer.cuda.get_array_module(x.data)
            n, c, hh, ww = x.data.shape
            pad_c = shape_out[1] - c
            p = xp.zeros((n, pad_c, hh, ww), dtype=xp.float32)
            x = F.concat((p, x))
            if x.data.shape[2:] != shape_out[2:]:
                x = F.average_pooling_2d(x, 1, 2)
        h1 = self.bn1_1(self.conv1_1(h))
        h2 = self.bn2_1(self.conv2_1(h))
        if self.activation1 is not None:
            h1 = self.activation1(h1)
            h2 = self.activation1(h2)
        h1 = self.bn1_2(self.conv1_2(h1))
        h2 = self.bn2_2(self.conv2_2(h2))
        h = shake_shake(h1, h2) + x
        if self.activation2 is not None:
            return self.activation2(h)
        else:
            return h
nn.py 文件源码 项目:adversarial-autoencoder 作者: musyoku 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def __call__(self, x):
        return functions.average_pooling_2d(x, self.ksize, self.stride, self.pad)
googlenet.py 文件源码 项目:deel 作者: uei 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def forward(self, x):
        h = F.relu(self.conv1(x))
        h = F.local_response_normalization(
            F.max_pooling_2d(h, 3, stride=2), n=5)
        h = F.relu(self.conv2_reduce(h))
        h = F.relu(self.conv2(h))
        h = F.max_pooling_2d(
            F.local_response_normalization(h, n=5), 3, stride=2)

        h = self.inc3a(h)
        h = self.inc3b(h)
        h = F.max_pooling_2d(h, 3, stride=2)
        h = self.inc4a(h)

        l = F.average_pooling_2d(h, 5, stride=3)
        l = F.relu(self.loss1_conv(l))
        l = F.relu(self.loss1_fc1(l))
        l = self.loss1_fc2(l)
        loss1 = l

        h = self.inc4b(h)
        h = self.inc4c(h)
        h = self.inc4d(h)

        l = F.average_pooling_2d(h, 5, stride=3)
        l = F.relu(self.loss2_conv(l))
        l = F.relu(self.loss2_fc1(l))
        l = self.loss2_fc2(l)
        loss2 = l

        h = self.inc4e(h)
        h = F.max_pooling_2d(h, 3, stride=2)
        h = self.inc5a(h)
        h = self.inc5b(h)

        h = F.average_pooling_2d(h, 7, stride=1)
        h = self.loss3_fc(F.dropout(h, 0.4, train=self.train))
        loss3 = h

        return loss1,loss2,loss3
resnet50.py 文件源码 项目:chainermn 作者: chainer 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def __call__(self, x, t):
        h = self.bn1(self.conv1(x))
        h = F.max_pooling_2d(F.relu(h), 3, stride=2)
        h = self.res2(h)
        h = self.res3(h)
        h = self.res4(h)
        h = self.res5(h)
        h = F.average_pooling_2d(h, 7, stride=1)
        h = self.fc(h)

        loss = F.softmax_cross_entropy(h, t)
        chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
        return loss
resnet50.py 文件源码 项目:chainermn 作者: chainer 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def __call__(self, x, t):
        self.clear()
        h = self.bn1(self.conv1(x), test=not self.train)
        h = F.max_pooling_2d(F.relu(h), 3, stride=2)
        h = self.res2(h, self.train)
        h = self.res3(h, self.train)
        h = self.res4(h, self.train)
        h = self.res5(h, self.train)
        h = F.average_pooling_2d(h, 7, stride=1)
        h = self.fc(h)

        loss = F.softmax_cross_entropy(h, t)
        chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
        return loss
net.py 文件源码 项目:chainer-pix2pix 作者: pfnet-research 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def __call__(self, x_0, x_1):
        h = F.concat([self.c0_0(x_0), self.c0_1(x_1)])
        h = self.c1(h)
        h = self.c2(h)
        h = self.c3(h)
        h = self.c4(h)
        #h = F.average_pooling_2d(h, h.data.shape[2], 1, 0)
        return h


问题


面经


文章

微信
公众号

扫码关注公众号