def run(self):
""" Run the analysis job, configuring ENV appropriately """
if self.status != self.Status.CREATED:
logger.warn('Attempt to re-run job: {}. Skipping.'.format(self.uuid))
return
# Provide the base environment to enable runnin Django commands in the container
environment = self.base_environment()
# Job-specific settings
environment.update({
'NB_TEMPDIR': os.path.join('/tmp', str(self.uuid)),
'PGDATA': os.path.join('/pgdata', str(self.uuid)),
'PFB_SHPFILE_URL': self.neighborhood.boundary_file.url,
'PFB_STATE': self.neighborhood.state_abbrev,
'PFB_STATE_FIPS': self.neighborhood.state.fips,
'PFB_JOB_ID': str(self.uuid),
'AWS_STORAGE_BUCKET_NAME': settings.AWS_STORAGE_BUCKET_NAME,
'PFB_S3_RESULTS_PATH': self.s3_results_path
})
if self.osm_extract_url:
environment['PFB_OSM_FILE_URL'] = self.osm_extract_url
# Workaround for not being able to run development jobs on the actual batch cluster:
# bail out with a helpful message
if settings.DJANGO_ENV == 'development':
logger.warn("Can't actually run development analysis jobs on AWS. Try this:"
"\nPFB_JOB_ID='{PFB_JOB_ID}' PFB_S3_RESULTS_PATH='{PFB_S3_RESULTS_PATH}' "
"./scripts/run-local-analysis "
"'{PFB_SHPFILE_URL}' {PFB_STATE} {PFB_STATE_FIPS}".format(**environment))
self.generate_tiles()
return
client = boto3.client('batch')
container_overrides = {
'environment': create_environment(**environment),
}
try:
response = client.submit_job(
jobName=self.analysis_job_name,
jobDefinition=self.analysis_job_definition,
jobQueue=settings.PFB_AWS_BATCH_ANALYSIS_JOB_QUEUE_NAME,
containerOverrides=container_overrides)
self.batch_job_id = response['jobId']
self.save()
self.update_status(self.Status.QUEUED)
except (botocore.exceptions.BotoCoreError, KeyError):
logger.exception('Error starting AnalysisJob {}'.format(self.uuid))
else:
self.generate_tiles()
评论列表
文章目录