python类NesterovAG()的实例源码

optimizers.py 文件源码 项目:chainer-speech-recognition 作者: musyoku 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def decay_learning_rate(opt, factor, final_value):
    if isinstance(opt, optimizers.NesterovAG):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.SGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.MomentumSGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.Adam):
        if opt.alpha <= final_value:
            return final_value
        opt.alpha *= factor
        return
    raise NotImplementedError()
optim.py 文件源码 项目:chainer-qrnn 作者: musyoku 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def decay_learning_rate(opt, factor, final_value):
    if isinstance(opt, optimizers.NesterovAG):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.SGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.MomentumSGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.Adam):
        if opt.alpha <= final_value:
            return final_value
        opt.alpha *= factor
        return
    raise NotImplementedError()
chain.py 文件源码 项目:ddnn 作者: kunglab 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def get_optimizer(self, name, lr, momentum=0.9):
        if name.lower() == "adam":
            return optimizers.Adam(alpha=lr, beta1=momentum)
        if name.lower() == "smorms3":
            return optimizers.SMORMS3(lr=lr)
        if name.lower() == "adagrad":
            return optimizers.AdaGrad(lr=lr)
        if name.lower() == "adadelta":
            return optimizers.AdaDelta(rho=momentum)
        if name.lower() == "nesterov" or name.lower() == "nesterovag":
            return optimizers.NesterovAG(lr=lr, momentum=momentum)
        if name.lower() == "rmsprop":
            return optimizers.RMSprop(lr=lr, alpha=momentum)
        if name.lower() == "momentumsgd":
            return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
        if name.lower() == "sgd":
            return optimizers.SGD(lr=lr)
optim.py 文件源码 项目:adversarial-autoencoder 作者: musyoku 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def decrease_learning_rate(opt, factor, final_value):
    if isinstance(opt, optimizers.NesterovAG):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.SGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.MomentumSGD):
        if opt.lr <= final_value:
            return final_value
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.Adam):
        if opt.alpha <= final_value:
            return final_value
        opt.alpha *= factor
        return
    raise NotImplementedError()
train.py 文件源码 项目:chainer-glu 作者: musyoku 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def decay_learning_rate(opt, factor, final_value):
    if isinstance(opt, optimizers.NesterovAG):
        if opt.lr <= final_value:
            return
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.SGD):
        if opt.lr <= final_value:
            return
        opt.lr *= factor
        return
    if isinstance(opt, optimizers.Adam):
        if opt.alpha <= final_value:
            return
        opt.alpha *= factor
        return
    raise NotImplementationError()
chain.py 文件源码 项目:unrolled-gan 作者: musyoku 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return optimizers.SGD(lr=lr)
chain.py 文件源码 项目:unrolled-gan 作者: musyoku 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def update_momentum(self, momentum):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            self.optimizer.rho = momentum
            return
        if isinstance(self.optimizer, optimizers.NesterovAG):
            self.optimizer.momentum = momentum
            return
        if isinstance(self.optimizer, optimizers.RMSprop):
            self.optimizer.alpha = momentum
            return
        if isinstance(self.optimizer, optimizers.MomentumSGD):
            self.optimizer.mommentum = momentum
            return
wavenet.py 文件源码 项目:wavenet 作者: musyoku 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return chainer.optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return chainer.optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return chainer.optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return chainer.optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return chainer.optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return chainer.optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return chainer.optimizers.SGD(lr=lr)
    raise Exception()
wavenet.py 文件源码 项目:wavenet 作者: musyoku 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def update_momentum(self, momentum):
        if isinstance(self.optimizer, optimizers.Adam):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, Eve):
            self.optimizer.beta1 = momentum
            return
        if isinstance(self.optimizer, optimizers.AdaDelta):
            self.optimizer.rho = momentum
            return
        if isinstance(self.optimizer, optimizers.NesterovAG):
            self.optimizer.momentum = momentum
            return
        if isinstance(self.optimizer, optimizers.RMSprop):
            self.optimizer.alpha = momentum
            return
        if isinstance(self.optimizer, optimizers.MomentumSGD):
            self.optimizer.mommentum = momentum
            return
chain.py 文件源码 项目:LSGAN 作者: musyoku 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def update_momentum(self, momentum):
        if isinstance(self._optimizer, optimizers.Adam):
            self._optimizer.beta1 = momentum
            return
        if isinstance(self._optimizer, Eve):
            self._optimizer.beta1 = momentum
            return
        if isinstance(self._optimizer, optimizers.AdaDelta):
            self._optimizer.rho = momentum
            return
        if isinstance(self._optimizer, optimizers.NesterovAG):
            self._optimizer.momentum = momentum
            return
        if isinstance(self._optimizer, optimizers.RMSprop):
            self._optimizer.alpha = momentum
            return
        if isinstance(self._optimizer, optimizers.MomentumSGD):
            self._optimizer.mommentum = momentum
            return
chain.py 文件源码 项目:adgm 作者: musyoku 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum=0.9):
    if name.lower() == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    if name.lower() == "eve":
        return Eve(alpha=lr, beta1=momentum)
    if name.lower() == "adagrad":
        return optimizers.AdaGrad(lr=lr)
    if name.lower() == "adadelta":
        return optimizers.AdaDelta(rho=momentum)
    if name.lower() == "nesterov" or name.lower() == "nesterovag":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name.lower() == "rmsprop":
        return optimizers.RMSprop(lr=lr, alpha=momentum)
    if name.lower() == "momentumsgd":
        return optimizers.MomentumSGD(lr=lr, mommentum=mommentum)
    if name.lower() == "sgd":
        return optimizers.SGD(lr=lr)
optimizers.py 文件源码 项目:chainer-speech-recognition 作者: musyoku 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def get_learning_rate(opt):
    if isinstance(opt, optimizers.NesterovAG):
        return opt.lr
    if isinstance(opt, optimizers.MomentumSGD):
        return opt.lr
    if isinstance(opt, optimizers.SGD):
        return opt.lr
    if isinstance(opt, optimizers.Adam):
        return opt.alpha
    raise NotImplementedError()
optimizers.py 文件源码 项目:chainer-speech-recognition 作者: musyoku 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def set_learning_rate(opt, lr):
    if isinstance(opt, optimizers.NesterovAG):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.MomentumSGD):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.SGD):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.Adam):
        opt.alpha = lr
        return
    raise NotImplementedError()
optimizers.py 文件源码 项目:chainer-speech-recognition 作者: musyoku 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def set_momentum(opt, momentum):
    if isinstance(opt, optimizers.NesterovAG):
        opt.momentum = momentum
        return
    if isinstance(opt, optimizers.MomentumSGD):
        opt.momentum = momentum
        return
    if isinstance(opt, optimizers.SGD):
        return
    if isinstance(opt, optimizers.Adam):
        opt.beta1 = momentum
        return
    raise NotImplementedError()
optimizers.py 文件源码 项目:chainer-speech-recognition 作者: musyoku 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum):
    if name == "sgd":
        return optimizers.SGD(lr=lr)
    if name == "msgd":
        return optimizers.MomentumSGD(lr=lr, momentum=momentum)
    if name == "nesterov":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    raise NotImplementedError()
optim.py 文件源码 项目:chainer-qrnn 作者: musyoku 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def get_current_learning_rate(opt):
    if isinstance(opt, optimizers.NesterovAG):
        return opt.lr
    if isinstance(opt, optimizers.MomentumSGD):
        return opt.lr
    if isinstance(opt, optimizers.SGD):
        return opt.lr
    if isinstance(opt, optimizers.Adam):
        return opt.alpha
    raise NotImplementedError()
optim.py 文件源码 项目:chainer-qrnn 作者: musyoku 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum):
    if name == "sgd":
        return optimizers.SGD(lr=lr)
    if name == "msgd":
        return optimizers.MomentumSGD(lr=lr, momentum=momentum)
    if name == "nesterov":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    raise NotImplementedError()
test_optimizers_by_linear_model.py 文件源码 项目:chainer-deconv 作者: germanRos 项目源码 文件源码 阅读 15 收藏 0 点赞 0 评论 0
def create(self):
        return optimizers.NesterovAG(0.1)
optim.py 文件源码 项目:adversarial-autoencoder 作者: musyoku 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def get_current_learning_rate(opt):
    if isinstance(opt, optimizers.NesterovAG):
        return opt.lr
    if isinstance(opt, optimizers.MomentumSGD):
        return opt.lr
    if isinstance(opt, optimizers.SGD):
        return opt.lr
    if isinstance(opt, optimizers.Adam):
        return opt.alpha
    raise NotImplementedError()
optim.py 文件源码 项目:adversarial-autoencoder 作者: musyoku 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def set_learning_rate(opt, lr):
    if isinstance(opt, optimizers.NesterovAG):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.MomentumSGD):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.SGD):
        opt.lr = lr
        return
    if isinstance(opt, optimizers.Adam):
        opt.alpha = lr
        return
    raise NotImplementedError()
optim.py 文件源码 项目:adversarial-autoencoder 作者: musyoku 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum):
    name = name.lower()
    if name == "sgd":
        return optimizers.SGD(lr=lr)
    if name == "msgd":
        return optimizers.MomentumSGD(lr=lr, momentum=momentum)
    if name == "nesterov":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    raise NotImplementedError()
train.py 文件源码 项目:chainer-glu 作者: musyoku 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def get_current_learning_rate(opt):
    if isinstance(opt, optimizers.NesterovAG):
        return opt.lr
    if isinstance(opt, optimizers.Adam):
        return opt.alpha
    if isinstance(opt, optimizers.SGD):
        return opt.lr
    raise NotImplementationError()
train.py 文件源码 项目:chainer-glu 作者: musyoku 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def get_optimizer(name, lr, momentum):
    if name == "nesterov":
        return optimizers.NesterovAG(lr=lr, momentum=momentum)
    if name == "adam":
        return optimizers.Adam(alpha=lr, beta1=momentum)
    if name == "sgd":
        return optimizers.SGD(lr=lr)
    raise NotImplementationError()


问题


面经


文章

微信
公众号

扫码关注公众号