def create_logger(args, result_dir):
root = logging.getLogger()
root.setLevel(logging.DEBUG)
msg_format = '%(asctime)s [%(levelname)s] %(message)s'
formatter = logging.Formatter(msg_format)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
root.addHandler(ch)
fileHandler = logging.FileHandler("{}/stdout.log".format(result_dir))
fileHandler.setFormatter(formatter)
root.addHandler(fileHandler)
logging.info(sys.version_info)
logging.info('chainer version: {}'.format(chainer.__version__))
logging.info('cuda: {}, cudnn: {}'.format(
chainer.cuda.available, chainer.cuda.cudnn_enabled))
logging.info(args)
python类__version__()的实例源码
def create_logger(args, result_dir):
root = logging.getLogger()
root.setLevel(logging.DEBUG)
msg_format = '%(asctime)s [%(levelname)s] %(message)s'
formatter = logging.Formatter(msg_format)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
root.addHandler(ch)
fileHandler = logging.FileHandler("{}/stdout.log".format(result_dir))
fileHandler.setFormatter(formatter)
root.addHandler(fileHandler)
logging.info(sys.version_info)
logging.info('chainer version: {}'.format(chainer.__version__))
logging.info('cuda: {}, cudnn: {}'.format(
chainer.cuda.available, chainer.cuda.cudnn_enabled))
logging.info(args)
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
initialW=None, initial_bias=None):
if int(chainer_version[0]) > 2:
super(LSTM, self).__init__(n_layers, in_size, out_size, dropout,
initialW, initial_bias)
else:
super(LSTM, self).__init__(n_layers, in_size, out_size, dropout)
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
initialW=None, initial_bias=None):
if int(chainer_version[0]) > 2:
super(BiLSTM, self).__init__(n_layers, in_size, out_size, dropout,
initialW, initial_bias)
else:
super(BiLSTM, self).__init__(n_layers, in_size, out_size, dropout)
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
initialW=None, initial_bias=None):
if int(chainer_version[0]) > 2:
super(GRU, self).__init__(n_layers, in_size, out_size, dropout,
initialW, initial_bias)
else:
super(GRU, self).__init__(n_layers, in_size, out_size, dropout)
def __init__(self, n_layers, in_size, out_size, dropout=0.5,
initialW=None, initial_bias=None):
if int(chainer_version[0]) > 2:
super(BiGRU, self).__init__(n_layers, in_size, out_size, dropout,
initialW, initial_bias)
else:
super(BiGRU, self).__init__(n_layers, in_size, out_size, dropout)