def generate_tiles(self):
environment = self.base_environment()
environment.update({
'PFB_JOB_ID': str(self.uuid),
'AWS_STORAGE_BUCKET_NAME': settings.AWS_STORAGE_BUCKET_NAME,
'PFB_S3_RESULTS_PATH': self.s3_results_path,
'PFB_S3_TILES_PATH': self.s3_tiles_path
})
# Workaround for not being able to run development jobs on the actual batch cluster:
# bail out with a helpful message
if settings.DJANGO_ENV == 'development':
logger.warn("Can't actually run development tiling jobs on AWS. Try this:"
"\nAWS_STORAGE_BUCKET_NAME='{AWS_STORAGE_BUCKET_NAME}' "
"PFB_JOB_ID='{PFB_JOB_ID}' "
"PFB_S3_RESULTS_PATH='{PFB_S3_RESULTS_PATH}' "
"PFB_S3_TILES_PATH='{PFB_S3_TILES_PATH}' "
"docker-compose run tilemaker".format(**environment))
return
job_params = {
'jobName': self.tilemaker_job_name,
'jobDefinition': self.tilemaker_job_definition,
'jobQueue': settings.PFB_AWS_BATCH_TILEMAKER_JOB_QUEUE_NAME,
'dependsOn': [{'jobId': self.batch_job_id}],
'containerOverrides': {
'environment': create_environment(**environment),
}
}
client = boto3.client('batch')
try:
response = client.submit_job(**job_params)
logger.info('Exporting tiles for AnalysisJob {}, job {}'.format(self.uuid,
response['jobId']))
except Exception:
logger.exception('Error starting tile export for AnalysisJob {}'.format(self.uuid))
raise
评论列表
文章目录