def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
{% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
{%- endif %}
python类Celery()的实例源码
def create_app(conf):
app = celery.Celery()
for option, value in conf.rpc.items():
# celery options are upper-cased, just like in flask
app.conf[option.upper()] = value
# Each driver may use own set of Celery tasks, so in order to run them
# the Celery worker has to load them into memory. That's why we need to
# iterate over Kostyor drivers and lazily load them into memory. The
# important note here is that we can't use stevedore because it tries
# to load found module immediately which leads to cyclic import error
# (drivers import a celery app in order to define own tasks).
namespaces = (
'kostyor.upgrades.drivers',
'kostyor.discovery_drivers',
)
for namespace in namespaces:
for ep in pkg_resources.iter_entry_points(namespace):
package, module = ep.module_name.rsplit('.', 1)
app.autodiscover_tasks([package], module)
return app
def make_celery(app):
celery = Celery(
app.import_name,
backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
def _setup_logging(loglevel=logging.WARN, **kwargs):
from loggers import config_logger
config_logger(level=loglevel)
log.debug(
'Configuring Celery on broker = %s,'
' result_backend = %s, timezone = %s',
broker_url, result_backend, timezone)
def setup(self):
self.celery = Celery()
dirname = os.path.dirname(__file__)
self.cfg = YAMLConfigFile(os.path.join(dirname, "../config.default.yml"))
def create_and_configure_celery(app):
broker_url = "sqla+{}".format(app.config["SQLALCHEMY_DATABASE_URI"])
result_backend = broker_url.replace("sqla+", "db+")
celery = Celery("tasks", backend=result_backend, broker=broker_url)
return celery
def shared_task_email(func):
"""
Replacement for @shared_task decorator that emails admins if an exception is raised.
"""
@wraps(func)
def new_func(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
subject = "Celery task failure"
message = traceback.format_exc()
mail_admins(subject, message)
raise
return shared_task(new_func)
def check_celery_version(*args, **kwargs):
if LooseVersion(__version__) < LooseVersion('4.0.0'):
sys.exit("""You are running Celery {}.
girder-worker requires celery>=4.0.0""".format(__version__))
def _configure_sentry(self, raven_config):
import raven
from raven.contrib.celery import (register_signal,
register_logger_signal)
client = raven.Client(**raven_config)
# register a custom filter to filter out duplicate logs
register_logger_signal(client)
# hook into the Celery error handler
register_signal(client)
def celery_app():
# reregister all the signals and sentry clients
talisker.celery.enable_signals()
celery.signals.before_task_publish.connect(before_task_publish)
celery.signals.task_prerun.connect(task_prerun)
yield celery.Celery(broker='memory://localhost/')
celery.signals.before_task_publish.disconnect(before_task_publish)
celery.signals.task_prerun.disconnect(task_prerun)
talisker.celery.disable_signals()
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
{% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
def make_celery(app):
kombu.serialization.register(
'drift_celery_json',
drift_celery_dumps, drift_celery_loads,
content_type='application/x-myjson', content_encoding='utf-8'
)
celery = Celery(app.import_name)
# if BROKER_URL is not set use the redis server
BROKER_URL = app.config.get("BROKER_URL", None)
if not BROKER_URL:
BROKER_URL = "redis://{}:6379/{}".format(app.config.get("redis_server"), CELERY_DB_NUMBER)
log.info("Using redis for celery broker: %s", BROKER_URL)
else:
log.info("celery broker set in config: %s", BROKER_URL)
celery.conf.update(app.config)
celery.conf["BROKER_URL"] = BROKER_URL
celery.conf["CELERY_RESULT_BACKEND"] = BROKER_URL
celery.conf["CELERY_TASK_SERIALIZER"] = "drift_celery_json"
celery.conf["CELERY_RESULT_SERIALIZER"] = "drift_celery_json"
celery.conf["CELERY_ACCEPT_CONTENT"] = ["drift_celery_json"]
celery.conf["CELERY_ENABLE_UTC"] = True
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
# custom json encoder for datetime object serialization
# from http://stackoverflow.com/questions/21631878/celery-is-there-a-way-to-write-custom-json-encoder-decoder
def getBroker(module):
app = Celery(module, broker=config.CELERY_BROKER, backend=config.CELERY_BACKEND)
app.conf.update(
task_serializer='json',
accept_content=['json'], # Ignore other content
result_serializer='json',
timezone='Europe/Berlin',
enable_utc=True,
)
app.conf.task_routes = {
'scanner.scan_connector.scan_site': {'queue': 'scan-browser'},
'scanner.db_connector.*': {'queue': 'db-mongo-access'},
'scanner.externaltests_connector.*': {'queue': 'scan-external'}
}
return app
def init(with_result_backend=False):
""" Init Celery and Selinon
:param with_result_backend: true if the application should connect to the result backend
:return: Celery application instance
"""
conf = {
'broker_url': os.environ.get('BROKER_URL', 'amqp://broker:5672'),
}
if with_result_backend:
conf['result_backend'] = os.environ.get('RESULT_BACKEND_URL', 'redis://redis:6379/0')
app = Celery('myapp')
app.config_from_object(conf)
flow_definition_files = []
# Add all config files for flows
for conf_file in os.listdir(os.path.join(_BASE_NAME, 'flows')):
if conf_file.endswith('.yaml') and not conf_file.startswith('.'):
flow_definition_files.append(os.path.join(_BASE_NAME, 'flows', conf_file))
# Set Selinon configuration
Config.set_config_yaml(os.path.join(_BASE_NAME, 'nodes.yaml'), flow_definition_files)
# Prepare Celery
Config.set_celery_app(app)
return app
def create_celery(app):
celery = Celery(app.import_name, backend=app.config['CELERY_RESULT_BACKEND'], broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
return celery
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
def make_celery(app):
"""Create the celery process."""
# Init the celery object via app's configuration.
celery = Celery(
app.import_name,
backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL'])
# Flask-Celery-Helpwe to auto-setup the config.
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
"""Will be execute when create the instance object of ContextTesk.
"""
# Will context(Flask's Extends) of app object(Producer Sit)
# be included in celery object(Consumer Site).
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
# Include the app_context into celery.Task.
# Let other Flask extensions can be normal calls.
celery.Task = ContextTask
return celery
def make_celery(app):
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
def make_celery(app):
celery = Celery(app.import_name, broker=app.config['BROKER_URL'])
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery