python类normal()的实例源码

treelstm.py 文件源码 项目:unsupervised-treelstm 作者: jihunchoi 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        if self.use_leaf_rnn:
            init.kaiming_normal(self.leaf_rnn_cell.weight_ih.data)
            init.orthogonal(self.leaf_rnn_cell.weight_hh.data)
            init.constant(self.leaf_rnn_cell.bias_ih.data, val=0)
            init.constant(self.leaf_rnn_cell.bias_hh.data, val=0)
            # Set forget bias to 1
            self.leaf_rnn_cell.bias_ih.data.chunk(4)[1].fill_(1)
            if self.bidirectional:
                init.kaiming_normal(self.leaf_rnn_cell_bw.weight_ih.data)
                init.orthogonal(self.leaf_rnn_cell_bw.weight_hh.data)
                init.constant(self.leaf_rnn_cell_bw.bias_ih.data, val=0)
                init.constant(self.leaf_rnn_cell_bw.bias_hh.data, val=0)
                # Set forget bias to 1
                self.leaf_rnn_cell_bw.bias_ih.data.chunk(4)[1].fill_(1)
        else:
            init.kaiming_normal(self.word_linear.weight.data)
            init.constant(self.word_linear.bias.data, val=0)
        self.treelstm_layer.reset_parameters()
        init.normal(self.comp_query.data, mean=0, std=0.01)
init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def msra_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.kaiming_normal(m.weight)
            # Modified by lzh @ 201707251408:
            # <<< Old:
            # if m.bias:
            # >>> New:
            if m.bias is not None:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            # Modified by lzh @ 201707241734:
            # <<< Old:
            # if m.bias:
            # >>> New:
            if m.bias is not None:
            # --- End
                init.constant(m.bias, 0)

# Added by lzh @ 201707251404:
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def define_D(input_nc, ndf, which_model_netD,
             n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', gpu_ids=[]):
    netD = None
    use_gpu = len(gpu_ids) > 0
    norm_layer = get_norm_layer(norm_type=norm)

    if use_gpu:
        assert(torch.cuda.is_available())
    if which_model_netD == 'basic':
        netD = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid, gpu_ids=gpu_ids)
    elif which_model_netD == 'n_layers':
        netD = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid, gpu_ids=gpu_ids)
    elif which_model_netD == 'pixel':
        netD = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid, gpu_ids=gpu_ids)
    else:
        raise NotImplementedError('Discriminator model name [%s] is not recognized' %
                                  which_model_netD)
    if use_gpu:
        netD.cuda(gpu_ids[0])
    init_weights(netD, init_type=init_type)
    return netD
utils.py 文件源码 项目:seqmod 作者: emanjavacas 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def make_lm_hook(d, seed_texts=None, max_seq_len=25, gpu=False,
                 method='sample', temperature=1, width=5,
                 early_stopping=None, validate=True):
    """
    Make a generator hook for a normal language model
    """

    def hook(trainer, epoch, batch_num, checkpoint):
        trainer.log("info", "Checking training...")
        if validate:
            loss = sum(trainer.validate_model().pack())
            trainer.log("info", "Valid loss: {:g}".format(loss))
            trainer.log("info", "Registering early stopping loss...")
            if early_stopping is not None:
                early_stopping.add_checkpoint(loss)
        trainer.log("info", "Generating text...")
        scores, hyps = trainer.model.generate(
            d, seed_texts=seed_texts, max_seq_len=max_seq_len, gpu=gpu,
            method=method, temperature=temperature, width=width)
        hyps = [format_hyp(score, hyp, hyp_num + 1, d)
                for hyp_num, (score, hyp) in enumerate(zip(scores, hyps))]
        trainer.log("info", '\n***' + ''.join(hyps) + "\n***")

    return hook
generator.py 文件源码 项目:seqGAN 作者: suragnair 项目源码 文件源码 阅读 44 收藏 0 点赞 0 评论 0
def __init__(self, embedding_dim, hidden_dim, vocab_size, max_seq_len, gpu=False, oracle_init=False):
        super(Generator, self).__init__()
        self.hidden_dim = hidden_dim
        self.embedding_dim = embedding_dim
        self.max_seq_len = max_seq_len
        self.vocab_size = vocab_size
        self.gpu = gpu

        self.embeddings = nn.Embedding(vocab_size, embedding_dim)
        self.gru = nn.GRU(embedding_dim, hidden_dim)
        self.gru2out = nn.Linear(hidden_dim, vocab_size)

        # initialise oracle network with N(0,1)
        # otherwise variance of initialisation is very small => high NLL for data sampled from the same model
        if oracle_init:
            for p in self.parameters():
                init.normal(p, 0, 1)
utils.py 文件源码 项目:YellowFin_Pytorch 作者: JianGoForIt 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def init_params(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.kaiming_normal(m.weight, mode='fan_out')
            if m.bias:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias:
                init.constant(m.bias, 0)
model.py 文件源码 项目:pytorch-skipthoughts 作者: kaniblu 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        I.normal(self.embeddings.weight.data, mean=0, std=0.01)
        I.xavier_normal(self.W_i.weight.data)
        I.xavier_normal(self.W_o.weight.data)

        init_rnn_cell(self.encoder)

        for i in range(self.n_decoders):
            decoder = getattr(self, "decoder{}".format(i))
            init_rnn_cell(decoder)
resnet.py 文件源码 项目:open-reid 作者: Cysu 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def reset_params(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                init.kaiming_normal(m.weight, mode='fan_out')
                if m.bias is not None:
                    init.constant(m.bias, 0)
            elif isinstance(m, nn.BatchNorm2d):
                init.constant(m.weight, 1)
                init.constant(m.bias, 0)
            elif isinstance(m, nn.Linear):
                init.normal(m.weight, std=0.001)
                if m.bias is not None:
                    init.constant(m.bias, 0)
inception.py 文件源码 项目:open-reid 作者: Cysu 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def reset_params(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                init.kaiming_normal(m.weight, mode='fan_out')
                if m.bias is not None:
                    init.constant(m.bias, 0)
            elif isinstance(m, nn.BatchNorm2d):
                init.constant(m.weight, 1)
                init.constant(m.bias, 0)
            elif isinstance(m, nn.Linear):
                init.normal(m.weight, std=0.001)
                if m.bias is not None:
                    init.constant(m.bias, 0)
utils.py 文件源码 项目:pytorch-cifar 作者: kuangliu 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def init_params(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.kaiming_normal(m.weight, mode='fan_out')
            if m.bias:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias:
                init.constant(m.bias, 0)
model.py 文件源码 项目:ShuffleNet 作者: jaxony 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def init_params(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                init.kaiming_normal(m.weight, mode='fan_out')
                if m.bias is not None:
                    init.constant(m.bias, 0)
            elif isinstance(m, nn.BatchNorm2d):
                init.constant(m.weight, 1)
                init.constant(m.bias, 0)
            elif isinstance(m, nn.Linear):
                init.normal(m.weight, std=0.001)
                if m.bias is not None:
                    init.constant(m.bias, 0)
test_nn.py 文件源码 项目:pytorch 作者: tylergenter 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def test_normal(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=30, size_max=50, as_variable=as_variable)
                mean = self._random_float(-3, 3)
                std = self._random_float(1, 5)
                init.normal(input_tensor, mean=mean, std=std)

                assert self._is_normal(input_tensor, mean, std)
init.py 文件源码 项目:covfefe 作者: deepnn 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def normal(w, mean=0, std=1):
    return nn.normal(w, mean=mean, std=std)
model.py 文件源码 项目:unsupervised-treelstm 作者: jihunchoi 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        init.normal(self.word_embedding.weight.data, mean=0, std=0.01)
        self.encoder.reset_parameters()
        self.classifier.reset_parameters()
model.py 文件源码 项目:unsupervised-treelstm 作者: jihunchoi 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def reset_parameters(self):
        init.normal(self.word_embedding.weight.data, mean=0, std=0.01)
        self.encoder.reset_parameters()
        self.classifier.reset_parameters()
test_nn.py 文件源码 项目:pytorch-coriander 作者: hughperkins 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def test_normal(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=30, size_max=50, as_variable=as_variable)
                mean = self._random_float(-3, 3)
                std = self._random_float(1, 5)
                init.normal(input_tensor, mean=mean, std=std)

                assert self._is_normal(input_tensor, mean, std)
msra_init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def msra_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.kaiming_normal(m.weight)
            if m.bias:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias:
                init.constant(m.bias, 0)
init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def xavier_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.xavier_normal(m.weight)
            if m.bias is not None:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias is not None:
                init.constant(m.bias, 0)
init.py 文件源码 项目:DeepLab 作者: 2prime 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def gauss_init(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d):
            init.normal(0.0, 0.01)
            if m.bias is not None:
                init.constant(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant(m.weight, 1)
            init.constant(m.bias, 0)
        elif isinstance(m, nn.Linear):
            init.normal(m.weight, std=1e-3)
            if m.bias is not None:
                init.constant(m.bias, 0)
# --- End
utils.py 文件源码 项目:Efficient-Dynamic-Batching 作者: jsuarez5341 项目源码 文件源码 阅读 16 收藏 0 点赞 0 评论 0
def initWeights(net, scheme='orthogonal'):
   print('Initializing weights. Warning: may overwrite sensitive bias parameters (e.g. batchnorm)')
   for e in net.parameters():
      if scheme == 'orthogonal':
         if len(e.size()) >= 2:
            init.orthogonal(e)
      elif scheme == 'normal':
         init.normal(e, std=1e-2)
      elif scheme == 'xavier':
         init.xavier_normal(e)
test_nn.py 文件源码 项目:pytorch 作者: ezyang 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def test_normal(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=30, size_max=50, as_variable=as_variable)
                mean = self._random_float(-3, 3)
                std = self._random_float(1, 5)
                init.normal(input_tensor, mean=mean, std=std)

                assert self._is_normal(input_tensor, mean, std)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def weights_init_normal(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('Linear') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def weights_init_xavier(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('Linear') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def weights_init_kaiming(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('Linear') != -1:
        init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def weights_init_orthogonal(m):
    classname = m.__class__.__name__
    print(classname)
    if classname.find('Conv') != -1:
        init.orthogonal(m.weight.data, gain=1)
    elif classname.find('Linear') != -1:
        init.orthogonal(m.weight.data, gain=1)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
networks.py 文件源码 项目:pytorch-CycleGAN-and-pix2pix 作者: junyanz 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def init_weights(net, init_type='normal'):
    print('initialization method [%s]' % init_type)
    if init_type == 'normal':
        net.apply(weights_init_normal)
    elif init_type == 'xavier':
        net.apply(weights_init_xavier)
    elif init_type == 'kaiming':
        net.apply(weights_init_kaiming)
    elif init_type == 'orthogonal':
        net.apply(weights_init_orthogonal)
    else:
        raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
utils.py 文件源码 项目:seqmod 作者: emanjavacas 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def make_mlm_hook(d, seed_texts=None, max_seq_len=25, gpu=False,
                  method='sample', temperature=1, width=5,
                  early_stopping=None, validate=True):
    """
    Make a generator hook for a normal language model
    """

    def hook(trainer, epoch, batch_num, checkpoint):
        trainer.log("info", "Checking training...")
        if validate:
            loss = sum(trainer.validate_model().pack())
            trainer.log("info", "Valid loss: {:g}".format(loss))
            trainer.log("info", "Registering early stopping loss...")
            if early_stopping is not None:
                early_stopping.add_checkpoint(loss)
        trainer.log("info", "Generating text...")
        for head in trainer.model.project:
            trainer.log("info", "Head: {}".format(head))
            scores, hyps = trainer.model.generate(
                d, head=head, seed_texts=seed_texts, max_seq_len=max_seq_len,
                gpu=gpu, method=method, temperature=temperature, width=width)
            hyps = [format_hyp(score, hyp, hyp_num + 1, d)
                    for hyp_num, (score, hyp) in enumerate(zip(scores, hyps))]
            trainer.log("info", '\n***' + ''.join(hyps) + "\n***")

    return hook
test_nn.py 文件源码 项目:pytorch 作者: pytorch 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def test_normal(self):
        for as_variable in [True, False]:
            for dims in [1, 2, 4]:
                input_tensor = self._create_random_nd_tensor(dims, size_min=30, size_max=50, as_variable=as_variable)
                mean = self._random_float(-3, 3)
                std = self._random_float(1, 5)
                init.normal(input_tensor, mean=mean, std=std)

                assert self._is_normal(input_tensor, mean, std)
weight_init.py 文件源码 项目:generative_models 作者: j-min 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def weights_init_normal(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('Linear') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)
weight_init.py 文件源码 项目:generative_models 作者: j-min 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def weights_init_xavier(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('Linear') != -1:
        init.xavier_normal(m.weight.data, gain=0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)


问题


面经


文章

微信
公众号

扫码关注公众号