def __init__(self, level, log_path, log_name, when, interval, backupCount):
if not os.path.exists(log_path):
try:
os.makedirs(log_path)
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(exc_type, exc_value, exc_traceback)
format = '%(asctime)s - %(levelname)s - %(message)s'
formatter = Formatter(format)
logging.basicConfig(level=level, format=format)
fileHandler = TimedRotatingFileHandler(filename=log_path + log_name, when=when, interval=interval, backupCount=backupCount)
fileHandler.setFormatter(formatter)
# logging.getLogger('') ????root????????????????
self.log = logging.getLogger('')
self.log.addHandler(fileHandler)
python类TimedRotatingFileHandler()的实例源码
def instantiate(p):
print("*** instantiate ***")
print(p)
with rlock:
global logger
logger = logging.getLogger("freepydius-logger")
logger.setLevel(logging.INFO)
handler = TimedRotatingFileHandler(_LOG_FILE,
when="midnight",
interval=1)
formatter = logging.Formatter("%(asctime)s %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
log = Log("INSTANCE")
log.log(( ('Response', 'created'), ))
# return 0 for success or -1 for failure
return 0
def get_logger(logger_name):
app_conf = imp.load_source('app_conf', os.getenv('EAGLE_HOME', '..') + '/eagle_cfg.py')
_logger = logging.getLogger(logger_name)
file_formatter = Formatter(
'%(levelname)s | %(asctime)s | %(name)s | %(message)s | %(pathname)s:%(lineno)d'
)
time_rotating_handler = TimedRotatingFileHandler(\
'{0}/{1}.log'.format(app_conf.LOG_PATH, logger_name), when="midnight", encoding='utf-8')
time_rotating_handler.suffix = "%Y-%m-%d"
time_rotating_handler.setFormatter(file_formatter)
stream_handler = StreamHandler(stream=sys.stdout)
echo_formatter = Formatter('[%(levelname)s][%(name)s][in %(filename)s:%(lineno)d] %(message)s')
stream_handler.setFormatter(echo_formatter)
_logger.addHandler(time_rotating_handler)
_logger.addHandler(stream_handler)
_logger.setLevel(logging.DEBUG)
return _logger
def get_logger(self):
"""?logger?????????????logger??????????
?????????????????????????????????
??????????????????????
"""
if not self.logger.handlers: # ??????
console_handler = logging.StreamHandler()
console_handler.setFormatter(self.formatter)
console_handler.setLevel(self.console_output_level)
self.logger.addHandler(console_handler)
# ?????????????????backup_count?
file_handler = TimedRotatingFileHandler(filename=os.path.join(LOG_PATH, self.log_file_name),
when='D',
interval=1,
backupCount=self.backup_count,
delay=True,
encoding='utf-8'
)
file_handler.setFormatter(self.formatter)
file_handler.setLevel(self.file_output_level)
self.logger.addHandler(file_handler)
return self.logger
def set_logfile(self, path=expanduser('~'), interval=5, bucket=None,
prefix=''):
self.data_logger = logging.getLogger('telemetry_file_logger')
self.data_logger.setLevel(logging.INFO)
if not bucket:
handler = TimedRotatingFileHandler(
path,
when='S',
interval=interval * 60,
backupCount=0
)
else:
handler = log.S3Batch(
path,
bucket,
prefix,
when='S',
interval=interval * 60)
handler.setFormatter('')
self.data_logger.addHandler(handler)
def __setFileHandler__(self, level=None):
"""
set file handler
:param level:
:return:
"""
file_name = os.path.join(LOG_PATH, '{name}.log'.format(name=self.name))
# ??????, ???log??, ????????, ??15?
file_handler = TimedRotatingFileHandler(filename=file_name, when='D', interval=1, backupCount=15)
file_handler.suffix = '%Y%m%d.log'
if not level:
file_handler.setLevel(self.level)
else:
file_handler.setLevel(level)
formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
self.addHandler(file_handler)
def create_logger():
level = config.get('LOG_LEVEL', None)
level = LOG_LEVELS.get(level, logging.INFO)
log_dir = config.get('LOG_DIR', os.path.join(PROJECT_DIR, 'logs'))
log_path = os.path.join(log_dir, 'coco.log')
logger = logging.getLogger()
main_formatter = logging.Formatter(
fmt='%(asctime)s [%(module)s %(levelname)s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
console_handler = StreamHandler()
file_handler = TimedRotatingFileHandler(
filename=log_path, when='D', backupCount=10)
for handler in [console_handler, file_handler]:
handler.setFormatter(main_formatter)
logger.addHandler(handler)
logger.setLevel(level)
# def get_logger(name):
# return logging.getLogger('coco.%s' % name)
def __setFileHandler__(self, level=None):
"""
set file handler
:param level:
:return:
"""
file_name = os.path.join(LOG_PATH, '{name}.log'.format(name=self.name))
# ??????, ???log??, ????????, ??15?
file_handler = TimedRotatingFileHandler(filename=file_name, when='D', interval=1, backupCount=15)
file_handler.suffix = '%Y%m%d.log'
if not level:
file_handler.setLevel(self.level)
else:
file_handler.setLevel(level)
formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
self.file_handler = file_handler
self.addHandler(file_handler)
def setup_logging():
dt_fmt = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter('[{asctime}] [{levelname:<7}] {name}: {message}', dt_fmt, style='{')
rotating_handler = TimedRotatingFileHandler(
filename='logs/logging.log',
encoding='utf-8',
when='midnight',
backupCount=3,
utc=True,
)
rotating_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(rotating_handler)
logger.addHandler(stream_handler)
return logger
def init_log(debug):
formatter = logging.Formatter('%(asctime)s - %(levelname)s - <%(filename)s-%(funcName)s:%(lineno)d> : %(message)s')
if debug:
level=logging.DEBUG
else:
level=logging.INFO
logbase = os.getcwd()+"/log/"
os.system("mkdir -p %s" % logbase)
log_file = logbase+"deepcake.log"
logger = logging.getLogger("nomral")
logger.setLevel(level)
file_handler = TimedRotatingFileHandler(log_file,"midnight", 1, 30, None, True)
console_handler = logging.StreamHandler()
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.info("init logger success {}".format(4))
return logger
def load_logger(self, log_dir, log_level, log_enabled, log_to_console):
path = log_dir + '/' + self.logname
self.logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s %(levelname)s (%(name)s): %(message)s',
'%d/%m/%Y %H:%M:%S')
file_handler = TimedRotatingFileHandler(path,
when='d',
interval=1,
backupCount=6)
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
if log_to_console:
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
self.set_loglevel(log_level)
self.log_enabled(log_enabled)
def setupLogger(path):
path += FILE
prepareLogfile(path)
logger = logging.getLogger('Backend')
logger.setLevel(logging.DEBUG)
handler = TimedRotatingFileHandler(
path,
when='midnight',
interval=1,
backupCount=0,
encoding=None,
delay=False,
utc=False)
formatter = logging.Formatter('[%(levelname)-8s][%(name)-8s][%(asctime)s] : %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info("Logfile Created.")
def set_file_handler(self, level=None):
"""
set file handler
:param level:
:return:
"""
file_name = os.path.join(LOG_PATH, '{name}.log'.format(name=self.name))
# ??????, ???log??, ????????, ??15?
file_handler = TimedRotatingFileHandler(
filename=file_name,
when='D',
interval=1,
backupCount=15,
encoding='utf-8'
)
file_handler.suffix = '%Y%m%d.log'
if not level:
file_handler.setLevel(self.level)
else:
file_handler.setLevel(level)
formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
self.file_handler = file_handler
self.addHandler(file_handler)
def add_file_handler(self, path, name, level=None):
levelname = logging.getLevelName(level) if level is not None \
else 'DEFAULT'
filename = '{path}/{name}.{level}.log'.format(
path=os.path.abspath(path), name=name,
level=levelname)
if filename not in self.file_handlers:
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(filename, when="midnight",
backupCount=7)
self.file_handlers[filename] = file_handler
if level is not None:
file_handler.setLevel(level)
self.add_handler(file_handler)
def ofchannel_log(self, flowmods):
if self.dp is not None:
if self.dp.ofchannel_log is not None:
if self.ofchannel_logger is None:
self.ofchannel_logger = logging.getLogger(
self.dp.ofchannel_log)
logger_handler = TimedRotatingFileHandler(
self.dp.ofchannel_log,
when='midnight')
log_fmt = '%(asctime)s %(name)-6s %(levelname)-8s %(message)s'
logger_handler.setFormatter(
logging.Formatter(log_fmt, '%b %d %H:%M:%S'))
self.ofchannel_logger.addHandler(logger_handler)
self.ofchannel_logger.propagate = 0
self.ofchannel_logger.setLevel(logging.DEBUG)
for flowmod in flowmods:
self.ofchannel_logger.debug(flowmod)
def get_logger(self):
"""?logger?????????????logger??????????"""
if not self.logger.handlers: # ??????
# if True, ?console?????
if self.console_output == 1:
console_handler = logging.StreamHandler()
console_handler.setFormatter(self.formatter)
console_handler.setLevel(self.console_output_level)
self.logger.addHandler(console_handler)
else:
pass
# if True, ??????????
if self.file_output == 1:
# ?????????????????backup_count?
# todo: ??????bug?????handler??????????????
file_handler = TimedRotatingFileHandler(self.log_path + self.log_file_name, 'midnight', 1, self.backup_count)
file_handler.setFormatter(self.formatter)
file_handler.setLevel(self.file_output_level)
self.logger.addHandler(file_handler)
else:
pass
return self.logger
def __init__(self, filename, max_bytes=0, backup_count=0, encoding=None,
delay=0, when='h', interval=1, utc=False):
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
handlers.TimedRotatingFileHandler.__init__(
self, filename, when, interval, backup_count, encoding, delay, utc)
self.maxBytes = max_bytes
# noinspection PyIncorrectDocstring
def activateOptions(self):
logging.handlers.RotatingFileHandler.__init__(self, **self.log4pyProps)
self.setLevel(self.threshold)
# TimedRotatingFileHandler is only available for 2.4 and up
def activateOptions(self):
logging.handlers.TimedRotatingFileHandler.__init__(self, **self.log4pyProps)
self.setLevel(self.threshold)
def configure_logging(app):
if app.config['ERROR_TO_FILE']:
error_log_name = app.config['ERROR_LOG_NAME']
file_handler = TimedRotatingFileHandler(
filename=create_path_if_not_exist(error_log_name),
when='d')
file_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('logtoes startup')
def create_multiprocess_logger(logger_name, persist_logger_name, log_level, log_format, log_queue, log_file_path,
when_to_rotate, keep_log_days, log_suffix=None):
"""
Creates queue logger and persist logger.
Queue logger should be used to log into. It is Thread and Process safe.
Persist logger is logger which persist data to disk. LogCollector moves data from queue log into persist log.
"""
queue_log_formatter = logging.Formatter(log_format)
queue_log_handler = QueueHandler(log_queue, persist_logger_name)
queue_log_handler.setFormatter(queue_log_formatter)
queue_logger = logging.getLogger(logger_name)
queue_logger.setLevel(log_level)
queue_logger.handlers = []
queue_logger.addHandler(queue_log_handler)
queue_logger.propagate = False
persist_log_formatter = logging.Formatter('%(message)s')
persist_log_handler = TimedRotatingFileHandler(log_file_path, when=when_to_rotate, interval=1, backupCount=keep_log_days)
if log_suffix is not None:
persist_log_handler.suffix = log_suffix
persist_log_handler.setFormatter(queue_log_formatter)
persist_logger = logging.getLogger(persist_logger_name)
persist_logger.setLevel(log_level)
persist_logger.handlers = []
persist_logger.addHandler(persist_log_handler)
persist_logger.propagate = False
def get_log_handler(self):
"""Configure and return file logging handler."""
path = osp.join(gettempdir(), 'pywebhdfs.log')
level = lg.DEBUG
if 'configuration' in self.config:
configuration = self.config['configuration']
if 'logging' in configuration:
logging_config = configuration['logging']
if 'disable' in logging_config and logging_config['disable'] == True:
return NullHandler()
if 'path' in logging_config:
path = logging_config['path'] # Override default path.
if 'level' in logging_config:
level = getattr(lg, logging_config['level'].upper())
log_handler = TimedRotatingFileHandler(
path,
when='midnight', # Daily backups.
backupCount=5,
encoding='utf-8',
)
fmt = '%(asctime)s\t%(name)-16s\t%(levelname)-5s\t%(message)s'
log_handler.setFormatter(lg.Formatter(fmt))
log_handler.setLevel(level)
return log_handler
def setup_logger(name):
project_path = os.path.abspath(os.path.join(os.path.dirname(__file__)))
file_name = name + '.log'
complete_file_name = project_path + '/' + file_name
logging_level = logging.DEBUG
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] - %(module)s - %(funcName)s:%(lineno)d - %(message)s','%Y-%m-%d %H:%M:%S')
formatter = logging.Formatter('%(message)s')
logging_handler = TimedRotatingFileHandler(complete_file_name, when='midnight')
logging_handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(logging_level)
logger.addHandler(logging_handler)
return logger
def add_file_handler(logger):
"""
Add a log handler that writes logs to a auto rotated file.
The file will be rotated every day.
:param logger:
:return:
"""
log_dir = 'log'
if not os.path.exists(log_dir):
os.mkdir(log_dir)
filename = os.path.join(log_dir, 'sigma.log')
handler = TimedRotatingFileHandler(filename, when='d', interval=1, encoding='utf-8', utc=True)
logger.add_handler(handler)
def __init__(self, config, debug_mode=False):
self.debug_mode = debug_mode
self.global_config=global_config(config)
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
self.logger.propagate = False
self.lst_yes= ['yes', 'Yes', 'y', 'Y']
formatter = logging.Formatter("%(asctime)s: [%(levelname)s] - %(filename)s (%(lineno)s): %(message)s", "%b %e %H:%M:%S")
if self.global_config.log_dest=='stdout' or self.debug_mode:
fh=logging.StreamHandler(sys.stdout)
elif self.global_config.log_dest=='file':
fh = TimedRotatingFileHandler(self.global_config.log_file, when="d",interval=1,backupCount=self.global_config.log_days_keep)
if self.global_config.log_level=='debug' or self.debug_mode:
fh.setLevel(logging.DEBUG)
elif self.global_config.log_level=='info':
fh.setLevel(logging.INFO)
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.my_eng=mysql_engine(self.global_config, self.logger)
self.pg_eng=pg_engine(self.global_config, self.my_eng.my_tables, self.logger, self.global_config.sql_dir)
self.pid_file=self.global_config.pid_file
self.exit_file=self.global_config.exit_file
self.email_alerts=email_alerts(self.global_config.email_config, self.logger)
self.sleep_loop=self.global_config.sleep_loop
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None,
delay=0, when='h', interval=1, utc=False):
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
handlers.TimedRotatingFileHandler.__init__(
self, filename, when, interval, backupCount, encoding, delay, utc)
self.maxBytes = maxBytes
def init_logger(cls):
log_dir = os.path.dirname(cls.config['log_file_path'])
if not os.path.isdir(log_dir):
try:
os.makedirs(log_dir, 0755)
except OSError as e:
# ????????????????????????????????
if e.errno != errno.EACCES:
raise
cls.config['log_file_path'] = './logs/jimvc.log'
log_dir = os.path.dirname(cls.config['log_file_path'])
if not os.path.isdir(log_dir):
os.makedirs(log_dir, 0755)
print u'????????? ' + cls.config['log_file_path']
_logger = logging.getLogger(cls.config['log_file_path'])
if cls.config['DEBUG']:
_logger.setLevel(logging.DEBUG)
else:
_logger.setLevel(logging.INFO)
fh = TimedRotatingFileHandler(cls.config['log_file_path'], when=cls.config['log_cycle'],
interval=1, backupCount=7)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(funcName)s - %(lineno)s - %(message)s')
fh.setFormatter(formatter)
_logger.addHandler(fh)
return _logger
def logToFile(filename=None):
if not filename:
filename = '/var/log/myDevices/cayenne.log'
handler = TimedRotatingFileHandler(filename, when="midnight", interval=1, backupCount=7)
handler.setFormatter(LOG_FORMATTER)
handler.rotator=rotator
handler.namer=namer
LOGGER.addHandler(handler)
def get_file_handler(self, level, format):
filehandler = TimedRotatingFileHandler(self.name, when='D', interval=1, backupCount=5)
filehandler.setLevel(level)
filehandler.suffix = '%Y%m%d.log'
formatter = logging.Formatter(fmt=format, datefmt='%Y-%m-%d %H:%M:%S')
filehandler.setFormatter(formatter)
return filehandler
def setup_logging(log_file):
# TODO: more advanced filters, logging info like when rooms go live to console
# https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
log_backup_time = dt_time(tzinfo=TOKYO_TZ)
log_filter = logging.Filter(name="showroom")
file_log_handler = TimedRotatingFileHandler(log_file, encoding='utf8',
when='midnight', atTime=log_backup_time)
file_log_formatter = logging.Formatter(fmt='%(asctime)s %(name)-12s %(levelname)-8s %(threadName)s:\n%(message)s',
datefmt='%m-%d %H:%M:%S')
file_log_handler.setFormatter(file_log_formatter)
# leave this in local time?
file_log_handler.addFilter(log_filter)
file_log_handler.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt=HHMM_FMT)
console_formatter.converter = tokyotime
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(console_formatter)
console_handler.addFilter(log_filter)
logger = logging.getLogger('showroom')
logger.setLevel(logging.DEBUG)
logger.propagate = False
# at this moment, there shouldn't be any handlers in the showroom logger
# however, i can't preclude the possibility of there ever being such handlers
for handler in (file_log_handler, console_handler):
if handler not in logger.handlers:
logger.addHandler(handler)