def provision_run(self, spark_job, first_run=False):
"""
Actually run the given Spark job.
If this is the first run we'll update the "last_run_at" value
to the start date of the spark_job so Celery beat knows what's
going on.
"""
spark_job.run()
if first_run:
def update_last_run_at():
schedule_entry = spark_job.schedule.get()
if schedule_entry is None:
schedule_entry = spark_job.schedule.add()
schedule_entry.reschedule(last_run_at=spark_job.start_date)
transaction.on_commit(update_last_run_at)
评论列表
文章目录