def worker(config_uri):
""" Console entry script that starts a worker process
"""
# TODO: import spacy's model to share it between workers
pyramid_env = bootstrap(config_uri)
# this conflicts with normal worker output
# TODO: solve logging for the console
# Setup logging to allow log output from command methods
# from pyramid.paster import setup_logging
# setup_logging(config_uri)
try:
qs = ['default']
conn = redis_connection()
with Connection(conn):
w = Worker(qs)
w.work()
finally:
pyramid_env['closer']()
python类Connection()的实例源码
def worker():
with Connection(Redis("jobqueue.local")):
qs = sys.argv[1:] or ['default']
print("foo")
w = Worker(qs)
w.work()
def main():
"""
Init workers
"""
parser = argparse.ArgumentParser()
parser.add_argument('--queues', nargs='+', type=unicode, dest='queues', required=True)
args = parser.parse_args()
with Connection(connection=StrictRedis(**settings.REDIS)):
qs = map(Queue, args.queues) or [Queue()]
worker = Worker(qs)
worker.work()
def main():
with Connection(redis_connection):
worker = Worker(Queue('default'))
worker.work()
def main(config):
global worker_config
worker_config = config
listen = config["listen"].values()
queue_dsn = config["queue"]["dsn"]
conn = redis.from_url(queue_dsn)
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
def main(config=None):
listen = config["listen"].values()
queue_dsn = config["queue"]["dsn"]
conn = redis.from_url(queue_dsn)
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
def main():
with Connection(Redis(settings['redis server'])):
qs = ['default']
w = Worker(qs)
w.work()
def main(cli_config, raw, by_queue, queues, **options):
"""RQ command-line monitor."""
try:
with Connection(redis_conn):
refresh(0.1, show_prism_info, queues, raw, by_queue,
cli_config.queue_class, cli_config.worker_class)
except ConnectionError as e:
click.echo(e)
sys.exit(1)
except KeyboardInterrupt:
click.echo()
sys.exit(0)
def runworker(app):
REDIS_HOST = app.config['REDIS_HOST']
REDIS_PORT = app.config['REDIS_PORT']
REDIS_DB = app.config['REDIS_DB']
QUEUES = app.config['QUEUES']
redis_conn = Connection(Redis(REDIS_HOST,
REDIS_PORT,
REDIS_DB))
with redis_conn:
w = Worker(QUEUES)
w.work()
def __init__(self):
super(TrainCenter, self).__init__()
info = config['pubsub']
self.host = info[0]
self.port = int(info[1])
self.raw_conn = redis.StrictRedis(host=self.host, port=self.port, db=0)
self.conn = Connection(self.raw_conn)
self.train_sessions = []
def __init__(self):
self.measure_helper = MeasureHelper()
self.r = None
self.p = None
self.rc = None
self.group_dict = {} # group_id : Group object
info = config['pubsub']
host = info[0]
port = int(info[1])
self.raw_conn = redis.StrictRedis(host=host, port=port, db=0)
self.conn = Connection(self.raw_conn)
def launch_rq_worker() -> None:
"""
Blocking function to launch a worker using Python RQ's internal API
"""
with Connection():
w = Worker(
get_available_rq_worker_name()
)
w.work()
def run_worker():
"""Initializes a slim rq task queue."""
listen = ['default']
conn = Redis(
host=app.config['RQ_DEFAULT_HOST'],
port=app.config['RQ_DEFAULT_PORT'],
db=0,
password=app.config['RQ_DEFAULT_PASSWORD'])
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
def work():
print("Hello from the worker side.")
with Connection(REDIS):
worker = Worker(map(Queue, QUEUES))
worker.work()
def start_worker(queue_name):
print "starting worker '{}'...".format(queue_name)
with Connection(redis_rq_conn):
worker = Worker(Queue(queue_name), exc_handler=failed_job_handler)
worker.work()
def main():
utils.setup_logging()
config.log()
gh_pr.monkeypatch_github()
if config.FLUSH_REDIS_ON_STARTUP:
utils.get_redis().flushall()
with rq.Connection(utils.get_redis()):
worker = rq.worker.HerokuWorker([rq.Queue('default')],
exception_handlers=[error_handler])
worker.work()
def run_worker():
"""Initializes a slim rq task queue."""
listen = ['default']
conn = Redis(
host=app.config['RQ_DEFAULT_HOST'],
port=app.config['RQ_DEFAULT_PORT'],
db=0,
password=app.config['RQ_DEFAULT_PASSWORD'])
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
def create_base_learner(id):
"""This creates a single base learner from a base learner origin and queues it up"""
path = functions.get_path_from_query_string(request)
with functions.DBContextManager(path) as session:
base_learner_origin = session.query(models.BaseLearnerOrigin).filter_by(id=id).first()
if base_learner_origin is None:
raise exceptions.UserError('Base learner origin {} not found'.format(id), 404)
if not base_learner_origin.final:
raise exceptions.UserError('Base learner origin {} is not final'.format(id))
req_body = request.get_json()
# Retrieve full hyperparameters
est = base_learner_origin.return_estimator()
hyperparameters = functions.import_object_from_string_code(req_body['source'],
'params')
est.set_params(**hyperparameters)
hyperparameters = functions.make_serializable(est.get_params())
base_learners = session.query(models.BaseLearner).\
filter_by(base_learner_origin_id=id,
hyperparameters=hyperparameters).all()
if base_learners:
raise exceptions.UserError('Base learner exists with given hyperparameters')
base_learner = models.BaseLearner(hyperparameters,
'queued',
base_learner_origin)
if 'single_searches' not in base_learner_origin.description:
base_learner_origin.description['single_searches'] = []
base_learner_origin.description['single_searches'] += ([req_body['source']])
session.add(base_learner)
session.add(base_learner_origin)
session.commit()
with Connection(get_redis_connection()):
rqtasks.generate_meta_features.delay(path, base_learner.id)
return jsonify(base_learner.serialize)
def get_automated_runs():
"""Return all automated runs"""
path = functions.get_path_from_query_string(request)
if request.method == 'GET':
with functions.DBContextManager(path) as session:
automated_runs = session.query(models.AutomatedRun).all()
return jsonify(list(map(lambda x: x.serialize, automated_runs)))
if request.method == 'POST':
req_body = request.get_json()
with functions.DBContextManager(path) as session:
base_learner_origin = None
if req_body['category'] == 'bayes' or req_body['category'] == 'greedy_ensemble_search':
base_learner_origin = session.query(models.BaseLearnerOrigin).\
filter_by(id=req_body['base_learner_origin_id']).first()
if base_learner_origin is None:
raise exceptions.UserError('Base learner origin {} not found'.format(
req_body['base_learner_origin_id']
), 404)
if not base_learner_origin.final:
raise exceptions.UserError('Base learner origin {} is not final'.format(
req_body['base_learner_origin_id']
))
elif req_body['category'] == 'tpot':
pass
else:
raise exceptions.UserError('Automated run category'
' {} not recognized'.format(req_body['category']))
# Check for any syntax errors
module = functions.import_string_code_as_module(req_body['source'])
del module
automated_run = models.AutomatedRun(req_body['source'],
'queued',
req_body['category'],
base_learner_origin)
session.add(automated_run)
session.commit()
with Connection(get_redis_connection()):
rqtasks.start_automated_run.delay(path, automated_run.id)
return jsonify(automated_run.serialize)
def create_new_stacked_ensemble():
path = functions.get_path_from_query_string(request)
req_body = request.get_json()
with functions.DBContextManager(path) as session:
if request.method == 'GET':
return jsonify(
list(map(lambda x: x.serialize, session.query(models.StackedEnsemble).all()))
)
if request.method == 'POST':
base_learners = session.query(models.BaseLearner).\
filter(models.BaseLearner.id.in_(req_body['base_learner_ids'])).all()
if len(base_learners) != len(req_body['base_learner_ids']):
raise exceptions.UserError('Not all base learners found')
for learner in base_learners:
if learner.job_status != 'finished':
raise exceptions.UserError('Not all base learners have finished')
base_learner_origin = session.query(models.BaseLearnerOrigin).\
filter_by(id=req_body['base_learner_origin_id']).first()
if base_learner_origin is None:
raise exceptions.UserError('Base learner origin {} not '
'found'.format(req_body['base_learner_origin_id']), 404)
# Retrieve full hyperparameters
est = base_learner_origin.return_estimator()
params = functions.import_object_from_string_code\
(req_body['secondary_learner_hyperparameters_source'], 'params')
est.set_params(**params)
hyperparameters = functions.make_serializable(est.get_params())
stacked_ensembles = session.query(models.StackedEnsemble).\
filter_by(base_learner_origin_id=req_body['base_learner_origin_id'],
secondary_learner_hyperparameters=hyperparameters,
base_learner_ids=sorted([bl.id for bl in base_learners])).all()
if stacked_ensembles:
raise exceptions.UserError('Stacked ensemble exists')
stacked_ensemble = models.StackedEnsemble(
secondary_learner_hyperparameters=hyperparameters,
base_learners=base_learners,
base_learner_origin=base_learner_origin,
job_status='queued'
)
session.add(stacked_ensemble)
session.commit()
with Connection(get_redis_connection()):
rqtasks.evaluate_stacked_ensemble.delay(path, stacked_ensemble.id)
return jsonify(stacked_ensemble.serialize)