def start(self):
self.task_id = json.loads(get('{}/task/new'.format(self.api_url)).text)['taskid']
self.sqli_obj.task_id = self.task_id
logging.info(json.dumps(self.scan_options))
res = json.loads(post('{}/option/{}/set'.format(self.api_url, self.task_id), data=json.dumps(self.scan_options),
headers=self.headers).text)
if res['success']:
post('{}/scan/{}/start'.format(self.api_url, self.task_id), data=self.target_url,
headers=self.headers)
self.update.apply_async((self,), countdown=10)
else:
self.delete.delay(self)
python类Task()的实例源码
def delete(self):
get('{}/task/{}/delete'.format(self.api_url, self.task_id))
self.sqli_obj.delete()
def storeSatelliteData(request, name):
# reviews all the history of the satellite product
satelliteObj = SatelliteData.objects.filter(name=name)
if not satelliteObj:
context = {'message': ('error', 'The satellite data "' + name + '" has not been found in the database.')}
else:
job = storeSatelliteDataWrapper.delay(name)
satelliteObj[0].jobId = job.id
satelliteObj[0].save()
#=======================================================================
# storeSatelliteDataWrapper(name)
# satelliteObj = SatelliteData.objects.filter(name=name)
# satelliteObj[0].jobId = None
#=======================================================================
context = {'jobId': satelliteObj[0].jobId,
'message': ('warning', 'Starting data preparation...'),
'state': 'PROGRESS'}
# Add celery periodic task
intervalSchedules = IntervalSchedule.objects.filter(period='hours', every='2')
if intervalSchedules:
intervalSchedule = intervalSchedules[0]
else:
intervalSchedule = IntervalSchedule(period='hours', every='2')
intervalSchedule.save()
periodicTasks = PeriodicTask.objects.filter(name=name + ' Update')
if not periodicTasks:
periodicTask = PeriodicTask(name=name + ' Update', task='updateSatelliteData', interval=intervalSchedule, args='["' + name + '"]')
periodicTask.save()
return JsonResponse(context)
def run_subdomainbrute(target):
subdomainbrute_workspace = path.join(TASKS_ROOT, 'tools','subDomainsBrute').replace('\\', '/')
cmd = 'subDomainsBrute.py %s -f dict/test_subnames.txt' % target #
p=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ,cwd=subdomainbrute_workspace,)
process_output = p.stdout.readlines()
return process_output
# ??????????300????????5?
# @app.task(bind=True, default_retry_delay=300, max_retries=5)
def backend_cleanup():
'''delete all task results from db'''
from djcelery.models import TaskState
from celery import states
query_set = TaskState.objects.exclude(state__in=states.UNREADY_STATES)
count = query_set.count()
query_set.delete()
return 'Deleted: %s task results from db OK' % count
def start(self):
self.task_id = json.loads(get('{}/task/new'.format(self.api_url)).text)['taskid']
self.sqli_obj.task_id = self.task_id
logging.info(json.dumps(self.scan_options))
res = json.loads(post('{}/option/{}/set'.format(self.api_url, self.task_id), data=json.dumps(self.scan_options),
headers=self.headers).text)
if res['success']:
post('{}/scan/{}/start'.format(self.api_url, self.task_id), data=self.target_url,
headers=self.headers)
self.update.apply_async((self,), countdown=10)
else:
self.delete.delay(self)
def delete(self):
get('{}/task/{}/delete'.format(self.api_url, self.task_id))
self.sqli_obj.delete()
def async_task():
print 'async task'
def add(x, y):
request = current_task.request
print('Executing task id %r, args: %r kwargs: %r' % (
request.id, request.args, request.kwargs))
return x+y
def execute_dag(self, dag, workflow_id, data=None):
""" Celery task that runs a single dag on a worker.
This celery task starts, manages and monitors the individual tasks of a dag.
Args:
self (Task): Reference to itself, the celery task object.
dag (Dag): Reference to a Dag object that is being used to start, manage and
monitor tasks.
workflow_id (string): The unique ID of the workflow run that started this dag.
data (MultiTaskData): An optional MultiTaskData object that is being passed to
the first tasks in the dag. This allows the transfer of
data from dag to dag.
"""
start_time = datetime.now()
logger.info('Running DAG <{}>'.format(dag.name))
# send custom celery event that the dag has been started
self.send_event(JobEventName.Started,
job_type=JobType.Dag,
name=dag.name,
time=datetime.utcnow(),
workflow_id=workflow_id,
duration=None)
# store job specific meta information wth the job
self.update_state(meta={'name': dag.name,
'type': JobType.Dag,
'workflow_id': workflow_id})
# run the tasks in the DAG
signal = DagSignal(Client(SignalConnection(**self.app.user_options['config'].signal,
auto_connect=True),
request_key=workflow_id), dag.name)
dag.run(config=self.app.user_options['config'],
workflow_id=workflow_id,
signal=signal,
data=data)
# send custom celery event that the dag has succeeded
event_name = JobEventName.Succeeded if not signal.is_stopped else JobEventName.Aborted
self.send_event(event_name,
job_type=JobType.Dag,
name=dag.name,
time=datetime.utcnow(),
workflow_id=workflow_id,
duration=(datetime.now() - start_time).total_seconds())
logger.info('Finished DAG <{}>'.format(dag.name))