def kerberos(args): # noqa
print(settings.HEADER)
import airflow.security.kerberos
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("kerberos", args.pid, args.stdout, args.stderr, args.log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
stdout=stdout,
stderr=stderr,
)
with ctx:
airflow.security.kerberos.run()
stdout.close()
stderr.close()
else:
airflow.security.kerberos.run()
python类pidfile()的实例源码
def main():
args = get_options()
config = ConfigParser.ConfigParser()
config.read(args.conffile)
if config.has_option('default', 'pidfile'):
pid_fn = os.path.expanduser(config.get('default', 'pidfile'))
else:
pid_fn = '/var/run/germqtt.pid'
if args.foreground:
_main(args, config)
else:
pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
with daemon.DaemonContext(pidfile=pid):
_main(args, config)
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='A cmdline tool to enhance PL9K theme for ZSH\n')
subparsers = parser.add_subparsers(help='Use {subcommand} -h for each subcommand\'s optional arguments details',
dest='command')
subparsers.add_parser('init', help='Init the settings in `~/.zshrc` file')
subparsers.add_parser('polling', help='Start the polling daemon process')
display_parser = subparsers.add_parser('display', help='Print the corresponding info on the terminal')
display_parser.add_argument('widget', help='The widget to display, e.g. weather')
args = parser.parse_args()
if args.command == 'polling':
with daemon.DaemonContext(pidfile=daemon.pidfile.PIDLockFile(constants.PID_PATH)):
polling()
elif args.command == 'display':
widgets = sys.modules['thunderbolt100k.widgets']
assert hasattr(widgets, args.widget), 'There is no widget called {0}'.format(args.widget)
assert hasattr(getattr(widgets, args.widget),
'display'), 'The widget {0} must contains a `display` method'.format(args.widget)
result = getattr(widgets, args.widget).display()
if result:
print(result)
elif args.command == 'init':
init_zshrc()
def kerberos(args): # noqa
print(settings.HEADER)
import airflow.security.kerberos
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("kerberos", args.pid, args.stdout, args.stderr, args.log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
stdout=stdout,
stderr=stderr,
)
with ctx:
airflow.security.kerberos.run()
stdout.close()
stderr.close()
else:
airflow.security.kerberos.run()
def scheduler(args):
print(settings.HEADER)
job = jobs.SchedulerJob(
dag_id=args.dag_id,
subdir=process_subdir(args.subdir),
run_duration=args.run_duration,
num_runs=args.num_runs,
do_pickle=args.do_pickle)
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("scheduler", args.pid, args.stdout, args.stderr, args.log_file)
handle = setup_logging(log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
files_preserve=[handle],
stdout=stdout,
stderr=stderr,
)
with ctx:
job.run()
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
signal.signal(signal.SIGQUIT, sigquit_handler)
job.run()
def flower(args):
broka = conf.get('celery', 'BROKER_URL')
address = '--address={}'.format(args.hostname)
port = '--port={}'.format(args.port)
api = ''
if args.broker_api:
api = '--broker_api=' + args.broker_api
flower_conf = ''
if args.flower_conf:
flower_conf = '--conf=' + args.flower_conf
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("flower", args.pid, args.stdout, args.stderr, args.log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
stdout=stdout,
stderr=stderr,
)
with ctx:
os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
def main():
parser = argparse.ArgumentParser(description=needl.__description__)
parser.add_argument('--datadir', default=os.getcwd() + '/data', help='Data directory')
parser.add_argument('-d', '--daemon', action='store_true', help='Run as a deamon')
parser.add_argument('-v', '--verbose', action='store_true', help='Increase logging')
parser.add_argument('--logfile', type=argparse.FileType('a'), default=sys.stdout, help='Log to this file. Default is stdout.')
parser.add_argument('--pidfile', default='/tmp/needl.pid', help='Save process PID to this file. Default is /tmp/needl.pid. Only valid when running as a daemon.')
args = parser.parse_args()
if args.daemon and args.logfile is sys.stdout:
args.logfile = open('/tmp/needl.log', 'a')
needl.init(args)
daemonize(args.logfile, args.pidfile) if args.daemon else start()
def daemonize(logfile, pidfile):
needl.log.info('Daemonizing and logging to %s', logfile)
with daemon.DaemonContext(working_directory=os.getcwd(),
stderr=logfile,
umask=0o002,
pidfile=daemon.pidfile.PIDLockFile(pidfile)) as dc:
start()
def run():
opts.load_options()
if opts.VIZ_LUMA_MAP:
import luma_vis
luma_vis.visualize(models.LUMA_FILE)
else:
opts.print_config()
models.load_luma_observations()
models.load_luma_map(models.LUMA_FILE)
if opts.RUN_AS_DAEMON:
import os
try:
import daemon
import daemon.pidfile
except:
print "MISSING DAEMON MODULE. PLEASE INSTALL PYTHON-DAEMON TO ENABLE DAEMON MODE"
import sys
sys.exit(1)
uid = os.getuid()
lock_file = os.path.join(os.path.sep, "tmp", "autolux.%s.pid" % uid)
print "RUNNING IN DAEMON MODE"
print "LOCKFILE", lock_file
with daemon.DaemonContext(pidfile=daemon.pidfile.PIDLockFile(lock_file)):
monitor_luma()
else:
monitor_luma()
def scheduler(args):
print(settings.HEADER)
job = jobs.SchedulerJob(
dag_id=args.dag_id,
subdir=process_subdir(args.subdir),
run_duration=args.run_duration,
num_runs=args.num_runs,
do_pickle=args.do_pickle)
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("scheduler", args.pid, args.stdout, args.stderr, args.log_file)
handle = setup_logging(log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
files_preserve=[handle],
stdout=stdout,
stderr=stderr,
)
with ctx:
job.run()
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
signal.signal(signal.SIGQUIT, sigquit_handler)
job.run()
def flower(args):
broka = conf.get('celery', 'BROKER_URL')
address = '--address={}'.format(args.hostname)
port = '--port={}'.format(args.port)
api = ''
if args.broker_api:
api = '--broker_api=' + args.broker_api
flower_conf = ''
if args.flower_conf:
flower_conf = '--conf=' + args.flower_conf
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("flower", args.pid, args.stdout, args.stderr, args.log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
stdout=stdout,
stderr=stderr,
)
with ctx:
os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
def start(self):
"""
If configured, converts to a daemon. Otherwise start connected to the current console.
"""
run_as_daemon = self.config.getboolean('Runtime', 'daemon', fallback=False)
uid = self.config.get('Runtime', 'daemon user', fallback=None)
gid = self.config.get('Runtime', 'daemon group', fallback=None)
pidfile = self.config.get('Runtime', 'daemon pid file', fallback=None)
if uid:
uid = getpwnam(uid).pw_uid
if gid:
gid = getgrnam(gid).gr_gid
if run_as_daemon:
LOGGER.info('Starting as a daemon process')
import daemon
import daemon.pidfile
if pidfile:
# We need to create the enclosing directory (if it does not exist) before
# starting the daemon context as we might lose the permissions to create
# that directory
piddir = os.path.dirname(pidfile)
if not os.path.isdir(piddir):
os.makedirs(piddir, 0o0755)
os.chown(piddir, uid, gid)
# Convert the filename to the appropriate type for the deamon module.
pidfile = daemon.pidfile.TimeoutPIDLockFile(pidfile)
with daemon.DaemonContext(uid=uid, gid=gid, pidfile=pidfile):
self._start()
self._start()
def worker(args):
env = os.environ.copy()
env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME
# Celery worker
from airflow.executors.celery_executor import app as celery_app
from celery.bin import worker
worker = worker.worker(app=celery_app)
options = {
'optimization': 'fair',
'O': 'fair',
'queues': args.queues,
'concurrency': args.concurrency,
'hostname': args.celery_hostname,
}
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
handle = setup_logging(log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
files_preserve=[handle],
stdout=stdout,
stderr=stderr,
)
with ctx:
sp = subprocess.Popen(['airflow', 'serve_logs'], env=env, close_fds=True)
worker.run(**options)
sp.kill()
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
sp = subprocess.Popen(['airflow', 'serve_logs'], env=env, close_fds=True)
worker.run(**options)
sp.kill()
def worker(args):
env = os.environ.copy()
env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME
# Celery worker
from airflow.executors.celery_executor import app as celery_app
from celery.bin import worker
worker = worker.worker(app=celery_app)
options = {
'optimization': 'fair',
'O': 'fair',
'queues': args.queues,
'concurrency': args.concurrency,
}
if args.daemon:
pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
handle = setup_logging(log_file)
stdout = open(stdout, 'w+')
stderr = open(stderr, 'w+')
ctx = daemon.DaemonContext(
pidfile=TimeoutPIDLockFile(pid, -1),
files_preserve=[handle],
stdout=stdout,
stderr=stderr,
)
with ctx:
sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)
worker.run(**options)
sp.kill()
stdout.close()
stderr.close()
else:
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)
worker.run(**options)
sp.kill()