def setup_logging(verbose=0, colors=False, name=None):
"""Configure console logging. Info and below go to stdout, others go to stderr.
:param int verbose: Verbosity level. > 0 print debug statements. > 1 passed to sphinx-build.
:param bool colors: Print color text in non-verbose mode.
:param str name: Which logger name to set handlers to. Used for testing.
"""
root_logger = logging.getLogger(name)
root_logger.setLevel(logging.DEBUG if verbose > 0 else logging.INFO)
formatter = ColorFormatter(verbose > 0, colors)
if colors:
colorclass.Windows.enable()
handler_stdout = logging.StreamHandler(sys.stdout)
handler_stdout.setFormatter(formatter)
handler_stdout.setLevel(logging.DEBUG)
handler_stdout.addFilter(type('', (logging.Filter,), {'filter': staticmethod(lambda r: r.levelno <= logging.INFO)}))
root_logger.addHandler(handler_stdout)
handler_stderr = logging.StreamHandler(sys.stderr)
handler_stderr.setFormatter(formatter)
handler_stderr.setLevel(logging.WARNING)
root_logger.addHandler(handler_stderr)
python类handlers()的实例源码
def __init__(self, pool_names, max_restarts=0, options=None):
self.names = pool_names
self.queue = multiprocessing.Queue()
self.pool = dict()
self.max_restarts = max_restarts
self.options = options or dict()
self.dog_path = os.curdir
self.dog_handler = LiveReload(self)
# self.dog_observer = Observer()
# self.dog_observer.schedule(self.dog_handler, self.dog_path, recursive=True)
if multiprocessing.get_start_method() != 'fork': # pragma: no cover
root_logger = logging.getLogger()
self.log_listener = QueueListener(self.queue, *root_logger.handlers)
# TODO: Find out how to get the watchdog + livereload working on a later moment.
# self.dog_observer.start()
self._restarts = dict()
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def setup_logging():
'''Sets up internal logging. Run this once at startup.'''
logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
if options.log.filename:
handler = logging.handlers.TimedRotatingFileHandler(filename=options.log.filename, when=options.log.when, backupCount=options.log.backup_count, utc=True)
handler.setFormatter(formatter)
logger.addHandler(handler)
if not options.main.daemon:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
level = getattr(logging, options.log.level.upper())
logger.setLevel(level)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(logging._checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def __init__(self, filename=None, directory=None, suffix=".log",
program_name=None, formatter=formatter.TEXT_FORMATTER,
level=None):
"""Log file output.
:param filename: The log file path to write to.
If directory is also specified, both will be combined.
:param directory: The log directory to write to.
If no filename is specified, the program name and suffix will be used
to contruct the full path relative to the directory.
:param suffix: The log file name suffix.
This will be only used if no filename has been provided.
:param program_name: Program name. Autodetected by default.
"""
logpath = _get_log_file_path(filename, directory,
program_name, suffix)
handler = logging.handlers.WatchedFileHandler(logpath)
super(File, self).__init__(handler, formatter, level)
def __init__(self, filename=None, directory=None, suffix='.log',
program_name=None, formatter=formatter.TEXT_FORMATTER,
level=None, max_size_bytes=0, backup_count=0):
"""Rotating log file output.
:param filename: The log file path to write to.
If directory is also specified, both will be combined.
:param directory: The log directory to write to.
If no filename is specified, the program name and suffix will be used
to contruct the full path relative to the directory.
:param suffix: The log file name suffix.
This will be only used if no filename has been provided.
:param program_name: Program name. Autodetected by default.
:param max_size_bytes: allow the file to rollover at a
predetermined size.
:param backup_count: the maximum number of files to rotate
logging output between.
"""
logpath = _get_log_file_path(filename, directory,
program_name, suffix)
handler = logging.handlers.RotatingFileHandler(
logpath, maxBytes=max_size_bytes, backupCount=backup_count)
super(RotatingFile, self).__init__(handler, formatter, level)
def configure_logging(debug=False, verbose=True, stderr=True):
config = copy.deepcopy(LOG_CONFIG)
for handler in config["handlers"].values():
if verbose:
handler["level"] = "INFO"
if debug:
handler["level"] = "DEBUG"
if verbose:
config["handlers"]["stderr"]["formatter"] = "verbose"
if debug:
config["handlers"]["stderr"]["formatter"] = "debug"
if stderr:
config["loggers"][LOG_NAMESPACE]["handlers"].append("stderr")
logging.config.dictConfig(config)
def __syslog_handler_init(self):
"""
Initialize the syslog handler if it hasn't been
"""
if self.syslog_handler is None:
try:
# TODO: /dev/log is Linux-specific.
self.syslog_handler = logging.handlers.SysLogHandler(
'/dev/log', facility=self.facility)
self.syslog_handler.setFormatter(
logging.Formatter(
fmt='%(name)s %(levelname)-8s %(message)s'
)
)
self.logger.addHandler(self.syslog_handler)
except:
self.__syslog_handler_deinit()
def getLogger(self):
''' Initialize and load log handlers '''
logger = logging.getLogger(self.proc_name)
logger.setLevel(logging.INFO)
if "debug" in self.config['logging']:
if self.config['logging']['debug']:
logger.setLevel(logging.DEBUG)
# Load and add a handler for each logging mechanism
for loghandler in self.config['logging']['plugins'].keys():
plugin = __import__("plugins.logging." + loghandler, globals(),
locals(), ['Logger'], -1)
lh = plugin.Logger(config=self.config, proc_name=self.proc_name)
logger.addHandler(lh.setup())
return logger
def init_logging():
main_logger = logging.getLogger()
formatter = logging.Formatter(
fmt='%(asctime)s.%(msecs)03d %(levelname)-8s [%(name)s] %(message)s'
, datefmt='%Y-%m-%d %H:%M:%S')
handler_stream = logging.StreamHandler(sys.stdout)
handler_stream.setFormatter(formatter)
main_logger.addHandler(handler_stream)
if LOG_TO_FILE:
handler_file = logging.handlers.RotatingFileHandler("debug.log"
, maxBytes = 2**24
, backupCount = 10)
handler_file.setFormatter(formatter)
main_logger.addHandler(handler_file)
main_logger.setLevel(logging.DEBUG)
return main_logger
# ============================================================================
def setup_logging(app):
"""Setup logging."""
from logging.handlers import RotatingFileHandler
from logging import Formatter
log_file_path = app.config.get('LOG_FILE')
log_level = app.config.get('LOG_LEVEL', logging.WARN)
if log_file_path: # pragma: no cover
file_handler = RotatingFileHandler(log_file_path)
file_handler.setFormatter(Formatter(
'%(name)s:%(levelname)s:[%(asctime)s] %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
file_handler.setLevel(log_level)
app.logger.addHandler(file_handler)
logger = logging.getLogger('pybossa')
logger.setLevel(log_level)
logger.addHandler(file_handler)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def make_logger(base_dir=None, log_name=None, log_fn=None,
level=logging.DEBUG, fmt=None,
datefmt=None):
# if log_name is None, get the root logger
logger = logging.getLogger(log_name)
logger.setLevel(level)
if base_dir is None:
base_dir = config.log_dir
# do not add 2 handlers to one logger by default
if len(logger.handlers) == 0:
if log_fn is None:
if log_name is None:
log_fn = get_root_log_fn()
else:
log_fn = log_name + '.' + log_suffix
logger.addHandler(make_file_handler(base_dir, log_fn,
fmt=fmt, datefmt=datefmt))
return logger
def create_logger(logger_name, handlers, level=logging.INFO,
formatter=LOG_FORMATTER):
"""??????
:param logger_name ????
:param filename ??????
:param level ????
:param formatter ??????
:param handlers ??????
"""
logger = logging.getLogger(logger_name)
logger.setLevel(level)
for handler in handlers:
handler.setFormatter(logging.Formatter(LOG_FORMATTER))
logger.addHandler(handler)
return logger
# GF_LOG = create_logger("girlfriend", (stdout_handler(),))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def Logger(name, **kargs):
""" Create and return logger """
path_dirs = PathDirs(**kargs)
logging.captureWarnings(True)
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
handler = logging.handlers.WatchedFileHandler(os.path.join(
path_dirs.meta_dir, "vent.log"))
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s:%(lineno)-4d - '
'%(levelname)s - %(message)s')
handler.setFormatter(formatter)
if not len(logger.handlers):
logger.addHandler(handler)
return logger
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(logging._checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def _cli_log_message(msg, logger_name=None, level="INFO"):
"""
Log a single message to Flightlog. Intended for CLI usage. Calling this
function multiple times within the same process will configure duplicate
handlers and result in duplicate messages.
"""
logger = logging.getLogger(logger_name)
levelnum = logging.getLevelName(level.upper())
try:
int(levelnum)
except ValueError:
raise ValueError("level must be one of DEBUG, INFO, WARNING, ERROR, CRITICAL")
handler = FlightlogHandler(background=False)
logger.addHandler(handler)
logger.setLevel(levelnum)
if msg == "-":
msg = sys.stdin.read()
for line in msg.splitlines():
if line:
logger.log(levelnum, line)
exit_code = 0
return None, exit_code
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.CIRCULATE_MAIL_SENDER,
toaddrs=[cls.CIRCULATE_ADMIN],
subject=cls.CIRCULATE_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
def loggingInit(logname):
isExists = os.path.exists('../log')
if not isExists:
os.mkdir('../log')
LogExists = os.path.exists(logname)
if not LogExists:
f = open(logname, 'w')
f.close()
log = logging.getLogger(logname)
log.setLevel(logging.DEBUG)
logHandler = logging.handlers.RotatingFileHandler(logname,maxBytes=10*1024*1024,backupCount=5)
logHandler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
logHandler.setFormatter(formatter)
log.addHandler(logHandler)
return log
def loglevel(level=logging.DEBUG, update_custom_handlers=False):
"""
Set the minimum loglevel for the default logger (`logzero.logger`).
This reconfigures only the internal handlers of the default logger (eg. stream and logfile).
You can also update the loglevel for custom handlers by using `update_custom_handlers=True`.
:arg int level: Minimum `logging-level <https://docs.python.org/2/library/logging.html#logging-levels>`_ to display (default: `logging.DEBUG`).
:arg bool update_custom_handlers: If you added custom handlers to this logger and want this to update them too, you need to set `update_custom_handlers` to `True`
"""
logger.setLevel(level)
# Reconfigure existing internal handlers
for handler in list(logger.handlers):
if hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR) or update_custom_handlers:
# Don't update the loglevel if this handler uses a custom one
if hasattr(handler, LOGZERO_INTERNAL_HANDLER_IS_CUSTOM_LOGLEVEL):
continue
# Update the loglevel for all default handlers
handler.setLevel(level)
global _loglevel
_loglevel = level
def formatter(formatter, update_custom_handlers=False):
"""
Set the formatter for all handlers of the default logger (``logzero.logger``).
This reconfigures only the logzero internal handlers by default, but you can also
reconfigure custom handlers by using ``update_custom_handlers=True``.
Beware that setting a formatter which uses colors also may write the color codes
to logfiles.
:arg Formatter formatter: `Python logging Formatter object <https://docs.python.org/2/library/logging.html#formatter-objects>`_ (by default uses the internal LogFormatter).
:arg bool update_custom_handlers: If you added custom handlers to this logger and want this to update them too, you need to set ``update_custom_handlers`` to `True`
"""
for handler in list(logger.handlers):
if hasattr(handler, LOGZERO_INTERNAL_LOGGER_ATTR) or update_custom_handlers:
handler.setFormatter(formatter)
global _formatter
_formatter = formatter
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)