def main(args):
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
logging.info("LEDBAT TEST SOURCE starting. Target: {}".format(args.target_ip))
loop = asyncio.get_event_loop()
listen = loop.create_datagram_endpoint(lambda: PeerProtocol(args), local_addr=("0.0.0.0", 6778))
transport, protocol = loop.run_until_complete(listen)
if os.name == 'nt':
def wakeup():
# Call again later
loop.call_later(0.5, wakeup)
loop.call_later(0.5, wakeup)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
python类INFO的实例源码
def setup(args, pipeline, runmod, injector):
"""Load configuration"""
logging.basicConfig(
format='[%(asctime)s] [%(levelname)s] %(name)s: %(message)s',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S')
_globals['gransk'] = gransk.api.API(injector)
_globals['config'] = _globals['gransk'].config
if pipeline:
_globals['gransk'].pipeline = pipeline
if _globals['gransk'].pipeline.get_service('related_entities'):
_globals['gransk'].pipeline.get_service('related_entities').load_all(_globals['config'])
if _globals['gransk'].pipeline.get_service('related_documents'):
_globals['gransk'].pipeline.get_service('related_documents').load_all(_globals['config'])
def __init__(self, debug=False, logfile=None):
logging.Logger.__init__(self, 'VirtualBMC')
try:
if logfile is not None:
self.handler = logging.FileHandler(logfile)
else:
self.handler = logging.StreamHandler()
formatter = logging.Formatter(DEFAULT_LOG_FORMAT)
self.handler.setFormatter(formatter)
self.addHandler(self.handler)
if debug:
self.setLevel(logging.DEBUG)
else:
self.setLevel(logging.INFO)
except IOError, e:
if e.errno == errno.EACCES:
pass
def configure_logging(debug):
'''Sets the data kennel logger to appropriate levels of chattiness.'''
default_logger = logging.getLogger('')
datadog_logger = logging.getLogger('datadog.api')
requests_logger = logging.getLogger('requests')
if debug:
default_logger.setLevel(logging.DEBUG)
datadog_logger.setLevel(logging.INFO)
requests_logger.setLevel(logging.INFO)
else:
default_logger.setLevel(logging.INFO)
datadog_logger.setLevel(logging.WARNING)
requests_logger.setLevel(logging.WARNING)
stream_handler = logging.StreamHandler(sys.__stdout__)
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s'))
default_logger.addHandler(stream_handler)
def hidden_cursor(file):
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)
def open_spinner(message):
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
def parse_args():
""" Parse the command line arguments """
parser = argparse.ArgumentParser(
description="Integrate Hugo and PhotoSwipe")
parser.add_argument('-v', '--verbose', help="Verbose mode",
action="store_const", dest="loglevel", const=logging.INFO,
default=logging.WARNING)
parser.add_argument('-f', '--fast', action="store_true", help=('Fast mode '
'(tries less potential crops)'))
parser.add_argument('command', choices=['new', 'update', 'clean', 'init'],
help="action to do")
parser.add_argument('album', nargs='?',
help="album to apply the action to")
args = parser.parse_args()
logging.basicConfig(level=args.loglevel, datefmt="[%Y-%m-%d %H:%M:%S]",
format="%(asctime)s - %(message)s")
settings.verbose = args.loglevel == logging.INFO
settings.fast = args.fast
return args.command, args.album
def getLogger(self):
''' Initialize and load log handlers '''
logger = logging.getLogger(self.proc_name)
logger.setLevel(logging.INFO)
if "debug" in self.config['logging']:
if self.config['logging']['debug']:
logger.setLevel(logging.DEBUG)
# Load and add a handler for each logging mechanism
for loghandler in self.config['logging']['plugins'].keys():
plugin = __import__("plugins.logging." + loghandler, globals(),
locals(), ['Logger'], -1)
lh = plugin.Logger(config=self.config, proc_name=self.proc_name)
logger.addHandler(lh.setup())
return logger
def main(argv=None):
args = parse_arguments(argv)
if args['very_verbose']:
logging.basicConfig(level=logging.DEBUG)
elif args['verbose']:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig()
del args['verbose']
del args['very_verbose']
sc = SparkContext(appName="MLR: data collection pipeline")
# spark info logging is incredibly spammy. Use warn to have some hope of
# human decipherable output
sc.setLogLevel('WARN')
sqlContext = HiveContext(sc)
run_pipeline(sc, sqlContext, **args)
def parse_arguments(argv):
parser = argparse.ArgumentParser(description='...')
parser.add_argument(
'-b', '--brokers', dest='brokers', required=True, type=lambda x: x.split(','),
help='Kafka brokers to bootstrap from as a comma separated list of <host>:<port>')
parser.add_argument(
'-m', '--max-request-size', dest='max_request_size', type=int, default=4*1024*1024*10,
help='Max size of requets sent to the kafka broker'
+ 'Defaults to 40MB.')
parser.add_argument(
'-w', '--num-workers', dest='n_workers', type=int, default=5,
help='Number of workers to issue elasticsearch queries in parallel. '
+ 'Defaults to 5.')
parser.add_argument(
'-v', '--verbose', dest='verbose', default=False, action='store_true',
help='Increase logging to INFO')
parser.add_argument(
'-vv', '--very-verbose', dest='very_verbose', default=False, action='store_true',
help='Increase logging to DEBUG')
args = parser.parse_args(argv)
return dict(vars(args))
def __init__(self, ip_address, user, password=None, key_filename=None):
self.ip_address = ip_address
self.user = user
self.password = password
self.key_filename = key_filename
self.connected = False
self.shell = None
self.logger.setLevel(logging.INFO)
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.util = Utilvnf()
with open(self.util.test_env_config_yaml) as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
self.ssh_revieve_buff = test_env_config_yaml.get("general").get(
"ssh_receive_buffer")
def filter(self, rec):
"""
filter a record, adding the colors automatically
* error: red
* warning: yellow
:param rec: message to record
"""
rec.zone = rec.module
if rec.levelno >= logging.INFO:
return True
m = re_log.match(rec.msg)
if m:
rec.zone = m.group(1)
rec.msg = m.group(2)
if zones:
return getattr(rec, 'zone', '') in zones or '*' in zones
elif not verbose > 2:
return False
return True
def filter(self, rec):
"""
filter a record, adding the colors automatically
* error: red
* warning: yellow
:param rec: message to record
"""
rec.zone = rec.module
if rec.levelno >= logging.INFO:
return True
m = re_log.match(rec.msg)
if m:
rec.zone = m.group(1)
rec.msg = m.group(2)
if zones:
return getattr(rec, 'zone', '') in zones or '*' in zones
elif not verbose > 2:
return False
return True
def filter(self, rec):
"""
filter a record, adding the colors automatically
* error: red
* warning: yellow
:param rec: message to record
"""
rec.zone = rec.module
if rec.levelno >= logging.INFO:
return True
m = re_log.match(rec.msg)
if m:
rec.zone = m.group(1)
rec.msg = m.group(2)
if zones:
return getattr(rec, 'zone', '') in zones or '*' in zones
elif not verbose > 2:
return False
return True
def hidden_cursor(file):
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)
def open_spinner(message):
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
def main():
args = parse_args()
logging.basicConfig(level=(logging.WARN if args.quiet else logging.INFO))
# Don't allow more than 10 concurrent requests to the wayback machine
concurrency = min(args.concurrency, 10)
# Scrape results are stored in a temporary folder if no folder specified
target_folder = args.target_folder if args.target_folder else tempfile.gettempdir()
logger.info('Writing scrape results in the folder {target_folder}'.format(target_folder=target_folder))
# Parse the period entered by the user (throws an exception if the dates are not correctly formatted)
from_date = datetime.strptime(args.from_date, CLI_DATE_FORMAT)
to_date = datetime.strptime(args.to_date, CLI_DATE_FORMAT)
# The scraper downloads the elements matching the given xpath expression in the target folder
scraper = Scraper(target_folder, args.xpath)
# Launch the scraping using the scraper previously instantiated
scrape_archives(args.website_url, scraper.scrape, from_date, to_date, args.user_agent, timedelta(days=args.delta),
concurrency)
def init_logging(logfile):
formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(module)s: %(message)s',
datefmt='%m/%d/%Y %H:%M:%S' )
fh = logging.FileHandler(logfile)
# ch = logging.StreamHandler()
fh.setFormatter(formatter)
# ch.setFormatter(formatter)
# fh.setLevel(logging.INFO)
# ch.setLevel(logging.INFO)
# logging.getLogger().addHandler(ch)
logging.getLogger().addHandler(fh)
logging.getLogger().setLevel(logging.INFO)
return logging
# prepare logging.
def init_logging(logfile):
formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(module)s: %(message)s',
datefmt='%m/%d/%Y %H:%M:%S' )
fh = logging.FileHandler(logfile)
# ch = logging.StreamHandler()
fh.setFormatter(formatter)
# ch.setFormatter(formatter)
# fh.setLevel(logging.INFO)
# ch.setLevel(logging.INFO)
# logging.getLogger().addHandler(ch)
logging.getLogger().addHandler(fh)
logging.getLogger().setLevel(logging.INFO)
return logging
# prepare logging.
def init_logging(logfile):
formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(module)s: %(message)s',
datefmt='%m/%d/%Y %H:%M:%S' )
fh = logging.FileHandler(logfile)
# ch = logging.StreamHandler()
fh.setFormatter(formatter)
# ch.setFormatter(formatter)
# fh.setLevel(logging.INFO)
# ch.setLevel(logging.INFO)
# logging.getLogger().addHandler(ch)
logging.getLogger().addHandler(fh)
logging.getLogger().setLevel(logging.INFO)
return logging
# prepare logging.