def __init__(self, logger=None, mlogger=None):
"""
@param logger: holds logger for where to log info/warnings/errors
If None, a default logger will be created.
@type logger: L{logging.Logger}
@param mlogger: holds mlogger for where to log info/warnings/errors
If None, a default mlogger will be created.
@type mlogger: L{utils.MLogger}
"""
super(Settings, self).__init__(logger, mlogger)
self.TArg = False
self.EArg = False
self.OArg = False
self.DArg = False
self.MArg = False
self.NArg = False
self.XArg = False # -2, --twice
self.PArg = None
self.RArg = None # -r read PIN
self.AArg = None # -R read passphrase
self.SArg = False # Safety check
self.WArg = False # Wipe plaintxt after encryption
self.QArg = False # noconfirm
self.inputFiles = [] # list of input filenames
python类Logger()的实例源码
def __init__(self, terminalMode=None, logger=None, qtextbrowser=None):
"""
Get as many necessary parameters upfront as possible, so the user
does not have to provide them later on each call.
@param terminalMode: log only to terminal?
@type terminalMode: C{bool}
@param logger: holds logger for where to log info/warnings/errors
@type logger: L{logging.Logger}
@param qtextbrowser: holds GUI widget for where to log info/warnings/errors
@type qtextbrowser: L{PyQt5.QtWidgets.QTextBrowser}
"""
self.terminalMode = terminalMode
self.logger = logger
self.qtextbrowser = qtextbrowser
# qtextbrowser text will be created by assembling:
# qtextheader + qtextContent + qtextTrailer
self.qtextheader = u''
self.qtextcontent = u''
self.qtexttrailer = u''
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
def getLogger():
# ??log????????,?????????????????
# ??a?????log???b????a????????????
# ?????????????
if Logger.logger is not None:
return Logger.logger
Logger.logger = logging.Logger(Logger.log_name)
if Logger.log_print == "True":
print_handler = logging.StreamHandler()
print_fmt = logging.Formatter(
Logger.log_formatter,
datefmt=Logger.log_formatter_datefmt
)
print_handler.setFormatter(print_fmt)
Logger.logger.addHandler(print_handler)
file_handler = logging.handlers.RotatingFileHandler(
filename = Logger.log_file,
maxBytes = Logger.log_max_byte,
backupCount = Logger.log_backup_count
)
file_fmt = logging.Formatter(
Logger.log_formatter,
datefmt=Logger.log_formatter_datefmt
)
file_handler.setFormatter(file_fmt)
Logger.logger.addHandler(file_handler)
Logger.logger.setLevel(Logger.levels.get(Logger.log_level))
return Logger.logger
def getLogger():
# ??log????????,?????????????????
# ??a?????log???b????a????????????
# ?????????????
if Logger.logger is not None:
return Logger.logger
Logger.logger = logging.Logger(Logger.log_name)
if Logger.log_print == "True":
print_handler = logging.StreamHandler()
print_fmt = logging.Formatter(
Logger.log_formatter,
datefmt=Logger.log_formatter_datefmt
)
print_handler.setFormatter(print_fmt)
Logger.logger.addHandler(print_handler)
file_handler = logging.handlers.RotatingFileHandler(
filename = Logger.log_file,
maxBytes = Logger.log_max_byte,
backupCount = Logger.log_backup_count
)
file_fmt = logging.Formatter(
Logger.log_formatter,
datefmt=Logger.log_formatter_datefmt
)
file_handler.setFormatter(file_fmt)
Logger.logger.addHandler(file_handler)
Logger.logger.setLevel(Logger.levels.get(Logger.log_level))
return Logger.logger
def __init__(self, embeddings, nbow, vocabulary_min=50, vocabulary_max=500,
vocabulary_optimizer=TailVocabularyOptimizer(),
verbosity=logging.INFO, main_loop_log_interval=60):
"""
Initializes a new instance of WMD class.
:param embeddings: The embeddings model, see WMD.embeddings.
:param nbow: The nBOW model, see WMD.nbow.
:param vocabulary_min: The minimum bag size, see \
:py:attr:`~wmd.WMD.vocabulary_min`.
:param vocabulary_max: The maximum bag size, see \
:py:attr:`~wmd.WMD.vocabulary_max`.
:param vocabulary_optimizer: The bag size reducer, see \
:py:attr:`~wmd.WMD.vocabulary_optimizer`.
:param verbosity: The log verbosity level.
:param main_loop_log_interval: Time frequency of logging updates, see \
:py:attr:`~wmd.WMD.main_loop_log_interval`.
:type embeddings: object with :meth:`~object.__getitem__`
:type nbow: object with :meth:`~object.__iter__` and \
:meth:`~object.__getitem__`
:type vocabulary_min: int
:type vocabulary_max: int
:type vocabulary_optimizer: callable
:type verbosity: int
:type main_loop_log_interval: int
:raises TypeError: if some of the arguments are invalid.
:raises ValueError: if some of the arguments are invalid.
"""
self._relax_cache = None
self._exact_cache = None
self._centroid_cache = None
self.embeddings = embeddings
self.nbow = nbow
self.vocabulary_min = vocabulary_min
self.vocabulary_max = vocabulary_max
self.vocabulary_optimizer = vocabulary_optimizer
self._log = logging.getLogger("WMD")
self._log.level = logging.Logger("", verbosity).level
self.main_loop_log_interval = main_loop_log_interval
def logger(self) -> logging.Logger:
"""
Gets the logger belonging to this plugin
:return: The logger
"""
if self._logger is None:
self._logger = logging.getLogger(self.manifest.get("name"))
return self._logger
def logger(self):
"""Create and return a logger that logs to both console and
a log file.
Use :meth:`open_log` to open the log file in Console.
:returns: an initialised :class:`~logging.Logger`
"""
if self._logger:
return self._logger
# Initialise new logger and optionally handlers
logger = logging.getLogger('workflow')
if not len(logger.handlers): # Only add one set of handlers
fmt = logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)s'
' %(levelname)-8s %(message)s',
datefmt='%H:%M:%S')
logfile = logging.handlers.RotatingFileHandler(
self.logfile,
maxBytes=1024*1024,
backupCount=1)
logfile.setFormatter(fmt)
logger.addHandler(logfile)
# console = logging.StreamHandler()
# console.setFormatter(fmt)
# logger.addHandler(console)
logger.setLevel(logging.DEBUG)
self._logger = logger
return self._logger
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
def logger(self):
"""Create and return a logger that logs to both console and
a log file.
Use :meth:`open_log` to open the log file in Console.
:returns: an initialised :class:`~logging.Logger`
"""
if self._logger:
return self._logger
# Initialise new logger and optionally handlers
logger = logging.getLogger('workflow')
if not len(logger.handlers): # Only add one set of handlers
fmt = logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)s'
' %(levelname)-8s %(message)s',
datefmt='%H:%M:%S')
logfile = logging.handlers.RotatingFileHandler(
self.logfile,
maxBytes=1024*1024,
backupCount=1)
logfile.setFormatter(fmt)
logger.addHandler(logfile)
# console = logging.StreamHandler()
# console.setFormatter(fmt)
# logger.addHandler(console)
logger.setLevel(logging.DEBUG)
self._logger = logger
return self._logger
def logger(self, logger):
"""Set a custom logger.
:param logger: The logger to use
:type logger: `~logging.Logger` instance
"""
self._logger = logger
def __init__(self, name = ''):
conf = com_config.Config()
self.config = conf.getconfig()
self.logger = logging.Logger(name, logging.DEBUG)
self.logger.name = name
# Formatter
formatterfile = logging.Formatter('%(asctime)s %(levelname)s : %(name)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S')
formatterconsole = colorlog.ColoredFormatter('%(asctime)s %(log_color)s%(levelname)s : %(name)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S',
log_colors={'DEBUG': 'white', 'INFO': 'green',
'WARNING': 'bold_yellow', 'ERROR': 'bold_red',
'CRITICAL': 'bold_red'})
# First logger (file)
self.logger.setLevel(logging.DEBUG)
file_handler = RotatingFileHandler(self.config['LOGGER']['logfile'], 'a', int(self.config['LOGGER']['logfilesize']), 1)
file_handler.setLevel(int(self.config['LOGGER']['levelfile']))
file_handler.setFormatter(formatterfile)
self.logger.addHandler(file_handler)
# second logger (console)
steam_handler = logging.StreamHandler()
steam_handler.setLevel(int(self.config['LOGGER']['levelconsole']))
steam_handler.setFormatter(formatterconsole)
self.logger.addHandler(steam_handler)
def setUp(self):
super(EnablePrettyLoggingTest, self).setUp()
self.options = OptionParser()
define_logging_options(self.options)
self.logger = logging.Logger('tornado.test.log_test.EnablePrettyLoggingTest')
self.logger.propagate = False
def setUp(self):
self.formatter = LogFormatter(color=False)
# Fake color support. We can't guarantee anything about the $TERM
# variable when the tests are run, so just patch in some values
# for testing. (testing with color off fails to expose some potential
# encoding issues from the control characters)
self.formatter._colors = {
logging.ERROR: u("\u0001"),
}
self.formatter._normal = u("\u0002")
# construct a Logger directly to bypass getLogger's caching
self.logger = logging.Logger('LogFormatterTest')
self.logger.propagate = False
self.tempdir = tempfile.mkdtemp()
self.filename = os.path.join(self.tempdir, 'log.out')
self.handler = self.make_handler(self.filename)
self.handler.setFormatter(self.formatter)
self.logger.addHandler(self.handler)
def setUp(self):
super(EnablePrettyLoggingTest, self).setUp()
self.options = OptionParser()
define_logging_options(self.options)
self.logger = logging.Logger('tornado.test.log_test.EnablePrettyLoggingTest')
self.logger.propagate = False
def setUp(self):
self.formatter = LogFormatter(color=False)
# Fake color support. We can't guarantee anything about the $TERM
# variable when the tests are run, so just patch in some values
# for testing. (testing with color off fails to expose some potential
# encoding issues from the control characters)
self.formatter._colors = {
logging.ERROR: u("\u0001"),
}
self.formatter._normal = u("\u0002")
# construct a Logger directly to bypass getLogger's caching
self.logger = logging.Logger('LogFormatterTest')
self.logger.propagate = False
self.tempdir = tempfile.mkdtemp()
self.filename = os.path.join(self.tempdir, 'log.out')
self.handler = self.make_handler(self.filename)
self.handler.setFormatter(self.formatter)
self.logger.addHandler(self.handler)
def __call__(self, logger, **kwargs):
try:
logger = self.load_item(logger, Logger)
except TypeError: # pragma: no cover
pass
return super(LoggerPluginFactory, self).__call__(logger=logger, **kwargs)
def get_logger(self):
"""
Override this method to designate the logger for the application
:return: a :py:class:`logging.Logger` instance
"""
enable_pretty_logging()
return logging.getLogger("tornado.application")
# endregion
# region Can be called by user
def __init__(self, verbosity=5):
self.udp_ts = UDPTransceiver()
self.iax_proto = IAXProto()
self.log = Logger(verbosity).log
self.liveUsers = []
def __init__(self, verbosity=5):
self.udp_ts = UDPTransceiver()
self.iax_proto = IAXProto()
self.log = Logger(verbosity).log
self.liveHosts = []
def build_yt_api():
"""Build the YouTube API for future use"""
data = datatools.get_data()
if "google_api_key" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'google_api_key'")
logger.info("Please add your Google API key with name 'google_api_key' "
"in data.json to use YouTube features of the music module")
return False
logger.debug("Building YouTube discovery API")
ytdevkey = data["discord"]["keys"]["google_api_key"]
try:
global ytdiscoveryapi
ytdiscoveryapi = googleapiclient.discovery.build("youtube", "v3", developerKey=ytdevkey)
logger.debug("YouTube API build successful")
return True
except Exception as e:
logger.exception(e)
logger.warning("HTTP error connecting to YouTube API, YouTube won't be available")
return False
def build_sc_api():
"""Build the SoundCloud API for future use"""
data = datatools.get_data()
if "soundcloud_client_id" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'soundcloud_client_id'")
logger.info("Please add your SoundCloud client id with name 'soundcloud_client_id' "
"in data.json to use Soundcloud features of the music module")
return False
try:
global scclient
scclient = soundcloud.Client(client_id=data["discord"]["keys"]["soundcloud_client_id"])
logger.debug("SoundCloud build successful")
return True
except Exception as e:
logger.exception(e)
return False
def build_spotify_api():
"""Build the Spotify API for future use"""
data = datatools.get_data()
if "spotify_client_id" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_id'")
logger.info("Please add your Spotify client id with name 'spotify_client_id' "
"in data.json to use Spotify features of the music module")
return False
if "spotify_client_secret" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_secret'")
logger.info("Please add your Spotify client secret with name 'spotify_client_secret' "
"in data.json to use Spotify features of the music module")
return False
try:
global spclient
client_credentials_manager = SpotifyClientCredentials(
data["discord"]["keys"]["spotify_client_id"],
data["discord"]["keys"]["spotify_client_secret"])
spclient = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
logger.debug("Spotify build successful")
return True
except Exception as e:
logger.exception(e)
return False
def get_sc_tracks(result):
if result.kind == "track":
logger.debug("SoundCloud Track {}".format(result.title))
return [[result.stream_url, result.title]]
elif result.kind == "user":
track_list = []
logger.debug("SoundCloud User {}".format(result.username))
tracks = scclient.get("/users/{}/tracks".format(result.id), limit=50)
for t in tracks:
track_list.append([t.stream_url, t.title])
return track_list
elif result.kind == "playlist":
track_list = []
logger.debug("SoundCloud Playlist {}".format(result.title))
playlist = scclient.get("/playlists/{}".format(result.id), limit=50)
tracks = playlist.tracks
for t in tracks:
track_list.append([t["stream_url"], t["title"]])
return track_list
return None
def log_error(self, log_msg, sql='', err_msg= '', ex = None):
if ex:
for arg in ex.args:
err_msg += str(arg)
while err_msg.endswith('\n'):
err_msg = err_msg[:-1]
msg = """
<red>==========================================================================
ERROR tijdens : {}
SQL: {}
ERROR: {}
==========================================================================</>""".format(log_msg, sql, err_msg)
if 'on_errors' in self.config and self.config['on_errors'] == 'throw':
raise Exception(msg)
else:
self.logger.error(self.strip_formatting_tags(msg))
self.errors.append(msg)
if self.to_console:
Logger.pprint(msg)
# even wachten opdat de log-msg niet verward raakt door de foutmelding
sleep(0.1)
def log(request):
'return a logging.logger instance for this test session'
return pytest.log.getChild(request.node.name.partition('[')[0])
def __init__(self, opsim_db, db_config=None, logger=None):
"""
Constructor.
Parameters
----------
opsim_db : str
sqlite3 db file containing observing plan.
db_config : dict, optional
Dictionary of database connection parameters. Parameters
for connecting to fatboy.phys.washington.edu from a
whitelisted machine will be used.
logger : logging.logger, optional
Logger object.
"""
self.gen = ObservationMetaDataGenerator(database=opsim_db,
driver='sqlite')
if db_config is not None:
self.db_config = db_config
else:
self.db_config = dict(database='LSSTCATSIM',
port=1433,
host='fatboy.phys.washington.edu',
driver='mssql+pymssql')
if logger is None:
logging.basicConfig(format="%(message)s", level=logging.INFO,
stream=sys.stdout)
logger = logging.getLogger()
self.logger = logger
def start_logger(filename,level):
global swan_logger
if swan_logger is not None:
return swan_logger
level=getattr(logging,level.upper())
log_handler=logging.FileHandler(filename=filename)
log_handler.setLevel(level)
logger=logging.getLogger(LOGGERNAME)
logger.setLevel(level)
logger.addHandler(log_handler)
swan_logger=logger
return logger
def get_logger():
"""Get the current logger object
:returns: logger
:rtype: logging.logger
"""
global swan_logger
return swan_logger
def __init__(self):
self.logger = None #type: logging.logger
self.start_time = datetime.now() # type: datetime.datetime
self.last_start_time = datetime.now() # type: datetime.datetime
self.errors = [] #type: List[str]
self.to_console = True # type: bool
self.filename = '' # type: str