def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
python类addLevelName()的实例源码
def __init__(self, name=None, level=logging.NOTSET):
super(DataStoreLogger, self).__init__(name, level)
logging.addLevelName(DataStore.LOG_LEVEL_JOURNAL, "JOURNAL")
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging, atexit
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
def setup(debug=False, statsd_host=None):
level = 'DEBUG' if debug else 'INFO'
dictConfig(dict(
version=1,
disable_existing_loggers=True,
loggers={
'': {
'level': level,
'handlers': ['console']
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'standard',
# Log to stderr so that click commands can make
# use of stdout
'stream': sys.stderr
},
},
formatters={
'standard': {
'format': '[%(levelname)s]%(db)s%(message)s',
'()': 'odooku.logger.DBFormatter'
},
}
))
OdookuLogger._statsd_host = statsd_host
logging.setLoggerClass(OdookuLogger)
logging.addLevelName(25, 'INFO')
# Prevent odoo from overriding log config
import odoo.netsvc
odoo.netsvc._logger_init = True
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
def setup_log_colors():
logging.addLevelName(logging.DEBUG, '\033[1;37m%s\033[1;0m' % logging.getLevelName(logging.DEBUG))
logging.addLevelName(logging.INFO, '\033[1;36m%s\033[1;0m' % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.WARNING, '\033[1;31m%s\033[1;0m' % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.ERROR, '\033[1;41m%s\033[1;0m' % logging.getLevelName(logging.ERROR))
def monkeypatch_resp_logging_level():
logging.RESP = const.LOGGING.RESP_STREAM_LEVEL
logging.addLevelName(logging.RESP, "RESP")
logging.Logger.resp = lambda inst, msg, *args, **kwargs: inst.log(logging.RESP, msg, *args, **kwargs)
logging.resp = lambda msg, *args, **kwargs: logging.log(logging.RESP, msg, *args, **kwargs)
def inject_verbose_info(self):
logging.VERBOSE = 15
logging.verbose = lambda x: logging.log(logging.VERBOSE, x)
logging.addLevelName(logging.VERBOSE, "VERBOSE")
def initialize_logger():
"""sets up the logger including a console, file and qt handler
"""
# initialize logger
logging.basicConfig(format="%(message)s", level=logging.INFO)
logging.addLevelName(logging.CRITICAL, 'critical')
logging.addLevelName(logging.ERROR, 'error')
logging.addLevelName(logging.WARNING, 'warning')
logging.addLevelName(logging.INFO, 'info')
logging.addLevelName(logging.DEBUG, 'debug')
logging.addLevelName(logging.NOTSET, 'not set')
logging.captureWarnings(True)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# set level of stream handler which logs to stderr
logger.handlers[0].setLevel(logging.WARNING)
# add file logger
rotating_file_handler = logging.handlers.RotatingFileHandler(
'qudi.log', maxBytes=10*1024*1024, backupCount=5)
rotating_file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(name)s %(message)s',
datefmt="%Y-%m-%d %H:%M:%S"))
rotating_file_handler.doRollover()
rotating_file_handler.setLevel(logging.DEBUG)
logger.addHandler(rotating_file_handler)
# add Qt log handler
qt_log_handler = QtLogHandler()
qt_log_handler.setLevel(logging.DEBUG)
logging.getLogger().addHandler(qt_log_handler)
for logger_name in ['core', 'gui', 'logic', 'hardware']:
logging.getLogger(logger_name).setLevel(logging.DEBUG)
# global variables used by exception handler
def setup_logging(level=None):
"""Configure logging"""
logging.addLevelName(DRYRUN_num, DRYRUN_name)
logging.Logger.dryrun = _dryrun
try:
logging.config.dictConfig(_get_dict_config())
logging.captureWarnings(True)
silence_verbose_loggers()
if level is not None:
logging.getLogger().setLevel(level)
except Exception: # pylint:disable=broad-except
logging.basicConfig(level=logging.WARN)
logging.warn('Could not configure logging, using basicConfig', exc_info=True)
def setup(level):
logging.addLevelName(DEBUG, 'DEBG')
logging.addLevelName(INFO, 'INFO')
logging.addLevelName(WARNING, 'WARN')
logging.addLevelName(ERROR, 'ERR ')
logging.addLevelName(CRITICAL, 'CRIT')
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(Formatter(format))
handler.addFilter(Filter())
root = logging.getLogger()
root.addHandler(handler)
root.setLevel(level)
def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging, atexit
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
def __init__(self, name=None):
if not Logger.console_handler or not Logger.file_handler:
Logger.logger, Logger.console_handler, Logger.file_handler = self.create_logger(name)
# Level names
logging.addLevelName(Logger.EXTRA, "EXTRA")
logging.addLevelName(Logger.FINER, "FINER")
logging.addLevelName(Logger.VERBOSE, "VERBOSE")
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging, atexit
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
def _start_logging(self, filename):
#will get the logging instance going
loglevel_config = 'logging.%s' % self.loglevel
#i'd like the stdout to be under another logging name than 'con_out'
console_log_level = 25 #between INFO and WARNING
quiet = self.quiet
logging.addLevelName(console_log_level, "CONSOLE")
def con_out(self, message, *args, **kws):
if not quiet:
self._log(console_log_level, message, args, **kws)
logging.Logger.con_out = con_out
logging.basicConfig(filename=filename,
level=eval(loglevel_config),
format='%(asctime)s %(name)s %(levelname)s: %(message)s',
datefmt = '%m-%d %H:%M:%S'
)
if not self.quiet: # pragma: no cover
console = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s: %(message)s', '%m-%d %H:%M:%S')
console.setFormatter(formatter)
console.setLevel(console_log_level)
self.logger = logging.getLogger(__name__)
if not self.quiet:
self.logger.addHandler(console) # pragma: no cover
self.logger.con_out("Log File Created at %s" % filename)
def setup_logging(log_level='INFO'):
log_format = "%(message)s"
logging.addLevelName(15, 'FINE')
logging.basicConfig(format=log_format, level=log_level)