python类__version__()的实例源码

prepare_train.py 文件源码 项目:chainer-faster-rcnn 作者: mitmul 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def create_logger(args, result_dir):
    root = logging.getLogger()
    root.setLevel(logging.DEBUG)
    msg_format = '%(asctime)s [%(levelname)s] %(message)s'
    formatter = logging.Formatter(msg_format)
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    ch.setFormatter(formatter)
    root.addHandler(ch)
    fileHandler = logging.FileHandler("{}/stdout.log".format(result_dir))
    fileHandler.setFormatter(formatter)
    root.addHandler(fileHandler)
    logging.info(sys.version_info)
    logging.info('chainer version: {}'.format(chainer.__version__))
    logging.info('cuda: {}, cudnn: {}'.format(
        chainer.cuda.available, chainer.cuda.cudnn_enabled))
    logging.info(args)
train_utils.py 文件源码 项目:chainer-segnet 作者: pfnet-research 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def create_logger(args, result_dir):
    root = logging.getLogger()
    root.setLevel(logging.DEBUG)
    msg_format = '%(asctime)s [%(levelname)s] %(message)s'
    formatter = logging.Formatter(msg_format)
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    ch.setFormatter(formatter)
    root.addHandler(ch)
    fileHandler = logging.FileHandler("{}/stdout.log".format(result_dir))
    fileHandler.setFormatter(formatter)
    root.addHandler(fileHandler)
    logging.info(sys.version_info)
    logging.info('chainer version: {}'.format(chainer.__version__))
    logging.info('cuda: {}, cudnn: {}'.format(
        chainer.cuda.available, chainer.cuda.cudnn_enabled))
    logging.info(args)
model.py 文件源码 项目:teras 作者: chantera 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
                 initialW=None, initial_bias=None):
        if int(chainer_version[0]) > 2:
            super(LSTM, self).__init__(n_layers, in_size, out_size, dropout,
                                       initialW, initial_bias)
        else:
            super(LSTM, self).__init__(n_layers, in_size, out_size, dropout)
model.py 文件源码 项目:teras 作者: chantera 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
                 initialW=None, initial_bias=None):
        if int(chainer_version[0]) > 2:
            super(BiLSTM, self).__init__(n_layers, in_size, out_size, dropout,
                                         initialW, initial_bias)
        else:
            super(BiLSTM, self).__init__(n_layers, in_size, out_size, dropout)
model.py 文件源码 项目:teras 作者: chantera 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
                 initialW=None, initial_bias=None):
        if int(chainer_version[0]) > 2:
            super(GRU, self).__init__(n_layers, in_size, out_size, dropout,
                                      initialW, initial_bias)
        else:
            super(GRU, self).__init__(n_layers, in_size, out_size, dropout)
model.py 文件源码 项目:teras 作者: chantera 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
                 initialW=None, initial_bias=None):
        if int(chainer_version[0]) > 2:
            super(BiGRU, self).__init__(n_layers, in_size, out_size, dropout,
                                        initialW, initial_bias)
        else:
            super(BiGRU, self).__init__(n_layers, in_size, out_size, dropout)


问题


面经


文章

微信
公众号

扫码关注公众号