python类constant()的实例源码

l2norm.py 文件源码 项目:ssd_pytorch 作者: miraclebiu 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        init.constant(self.weight,self.gamma)
test_nn.py 文件源码 项目:pytorch-coriander 作者: hughperkins 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def test_constant(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
                val = self._random_float(1, 10)
                init.constant(input_tensor, val)
                if as_variable:
                    input_tensor = input_tensor.data

                self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
l2norm.py 文件源码 项目:yolov2 作者: zhangkaij 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        init.constant(self.weight,self.gamma)
Classifier.py 文件源码 项目:MatchingNetworks 作者: gitabcworld 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def weights_init(self,module):
        for m in module.modules():
            if isinstance(m, nn.Conv2d):
                init.xavier_uniform(m.weight, gain=np.sqrt(2))
                init.constant(m.bias, 0)
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()
msra_init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def msra_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.kaiming_normal(m.weight)
            if m.bias:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias:
                init.constant(m.bias, 0)
init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def xavier_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.xavier_normal(m.weight)
            if m.bias is not None:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias is not None:
                init.constant(m.bias, 0)
init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def gauss_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.normal(0.0, 0.01)
            if m.bias is not None:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias is not None:
                init.constant(m.bias, 0)
# --- End
test_nn.py 文件源码 项目:pytorch 作者: ezyang 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def test_constant(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
                val = self._random_float(1, 10)
                init.constant(input_tensor, val)
                if as_variable:
                    input_tensor = input_tensor.data

                self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def weights_init_normal(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('Linear') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def weights_init_xavier(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('Linear') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def weights_init_kaiming(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('Linear') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def weights_init_orthogonal(m):
    classname = m.__class__.__name__
    print(classname)
    if classname.find('Conv') != -1:
        init.orthogonal(m.weight.data, gain=1)
    elif classname.find('Linear') != -1:
        init.orthogonal(m.weight.data, gain=1)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
urnn.py 文件源码 项目:URNN-PyTorch 作者: jingli9111 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        """
        Initialize parameters
        """
        init.uniform(self.thetaA, a=-0.1, b=0.1)
        init.uniform(self.thetaB, a=-0.1, b=0.1)
        init.uniform(self.U, a=-0.1, b=0.1)
        init.constant(self.bias.data, val=0)
goru.py 文件源码 项目:URNN-PyTorch 作者: jingli9111 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        """
        Initialize parameters  TO DO
        """
        init.uniform(self.thetaA, a=-0.1, b=0.1)
        init.uniform(self.thetaB, a=-0.1, b=0.1)
        init.uniform(self.U, a=-0.1, b=0.1)
        init.orthogonal(self.gate_U.data)

        gate_W_data = torch.eye(self.hidden_size)
        gate_W_data = gate_W_data.repeat(1, 2)
        self.gate_W.data.set_(gate_W_data)

        init.constant(self.bias.data, val=0)
        init.constant(self.gate_bias.data, val=0)
model.py 文件源码 项目:NoisyNet-A3C 作者: Kaixhin 项目源码 文件源码 阅读 16 收藏 0 点赞 0 评论 0
def reset_parameters(self):
    if hasattr(self, 'sigma_weight'):  # Only init after all params added (otherwise super().__init__() fails)
      init.uniform(self.weight, -math.sqrt(3 / self.in_features), math.sqrt(3 / self.in_features))
      init.uniform(self.bias, -math.sqrt(3 / self.in_features), math.sqrt(3 / self.in_features))
      init.constant(self.sigma_weight, self.sigma_init)
      init.constant(self.sigma_bias, self.sigma_init)
test_nn.py 文件源码 项目:pytorch 作者: pytorch 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def test_constant(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=1, size_max=5, as_variable=as_variable)
                val = self._random_float(1, 10)
                init.constant(input_tensor, val)
                if as_variable:
                    input_tensor = input_tensor.data

                self.assertEqual(input_tensor, input_tensor.clone().fill_(val))
weight_init.py 文件源码 项目:generative_models 作者: j-min 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def weights_init_normal(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('Linear') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
weight_init.py 文件源码 项目:generative_models 作者: j-min 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def weights_init_xavier(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('Linear') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
weight_init.py 文件源码 项目:generative_models 作者: j-min 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def weights_init_kaiming(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('Linear') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
models.py 文件源码 项目:SRU 作者: akuzeee 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def initWeight(self, init_forget_bias=1):
        # See details in https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/rnn.py
        for name, params in self.named_parameters():
            # weight?xavier????
            if 'weight' in name:
                init.xavier_uniform(params)

            # ??????????GRU?b_iz, b_hz????
            elif 'gru.bias_ih_l' in name:
                b_ir, b_iz, b_in = params.chunk(3, 0)
                init.constant(b_iz, init_forget_bias)
            elif 'gru.bias_hh_l' in name:
                b_hr, b_hz, b_hn = params.chunk(3, 0)
                init.constant(b_hz, init_forget_bias)

            # ?????bias?0????
            else:
                init.constant(params, 0)


问题


面经


文章

微信
公众号

扫码关注公众号