def upload_to_s3(css_file):
bucket_name = settings.AWS_BUCKET_NAME
conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
folder = 'webpack_bundles/'
bucket = conn.get_bucket(bucket_name=bucket_name)
filename = css_file.split('/')[-1]
file_obj = open(css_file, 'r')
content = file_obj.read()
key = folder + filename
bucket = conn.get_bucket(bucket_name=bucket_name)
mime = mimetypes.guess_type(filename)[0]
k = Key(bucket)
k.key = key # folder + filename
k.set_metadata("Content-Type", mime)
k.set_contents_from_string(content)
public_read = True
if public_read:
k.set_acl("public-read")
python类AWS_SECRET_ACCESS_KEY的实例源码
def init_es(timeout=TIMEOUT):
log.info("connecting to %s %s", settings.ELASTICSEARCH_URL, settings.ELASTICSEARCH_PORT)
auth = AWSRequestsAuth(aws_access_key=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
aws_host=settings.ELASTICSEARCH_URL,
aws_region='us-west-1',
aws_service='es')
auth.encode = lambda x: bytes(x.encode('utf-8'))
es = Elasticsearch(host=settings.ELASTICSEARCH_URL,
port=settings.ELASTICSEARCH_PORT,
connection_class=RequestsHttpConnection,
timeout=timeout,
max_retries=10, retry_on_timeout=True,
http_auth=auth)
return es
def stop(self, aws_session=None, ecs_client=None):
if ecs_client is None:
if aws_session is None:
aws_session = boto3.session.Session(
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
ecs_client = aws_session.client('ecs')
response = ecs_client.stop_task(
cluster=settings.AWS_ECS_CLUSTER_NAME,
task=self.arn
)
self.status = Task.STATUS_STOPPING
self.save()
def sending_mail(subject, email_template_name, context, from_email, to_email):
"""
Sends a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
htmly = loader.get_template(email_template_name)
html_content = htmly.render(context)
conn = boto.ses.connect_to_region(
settings.AWS_HOST_NAME,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
response = conn.send_email(
from_email, subject, html_content, [to_email], format='html')
print (response)
def delete_file_from_s3(filename):
conn = S3Connection(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY,
)
b = Bucket(
conn,
settings.AWS_STORAGE_BUCKET_NAME,
)
k = Key(b)
k.key = filename
b.delete_key(k)
def session(self):
"""
Boto3 authenticated session
"""
if self._session is None:
self._session = boto3.Session(
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY
)
return self._session
def back_up_bucket(self):
logger.info('Start backing up the bucket data.')
boto_connection = boto.connect_s3(
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
host=settings.AWS_S3_HOST,
)
source_bucket = boto_connection.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
destination_bucket = boto_connection.get_bucket(settings.BACKUP_BUCKET_BUCKET_NAME)
destination_sub_directory = '{location}/{timestamp}'.format(location=settings.BACKUP_BUCKET_LOCATION,
timestamp=self.timestamp)
try:
key_list = [source_key.key for source_key in source_bucket.list() if source_key.size]
except ValueError:
raise ValueError('The backup task was aborted because of some bucket keys with no size. Set '
'`DJANGO_GREEN_GROVE_EMPTY_S3_KEYS` in your settings to get a list of the keys.')
if hasattr(settings, 'DJANGO_GREEN_GROVE_EMPTY_S3_KEYS'):
error_message = 'Some bucket keys were ignored during the backup task because they have no size'
try:
empty_keys = [source_key.key for source_key in source_bucket.list() if not source_key.size]
error_message += ': %s' % ', '.join(empty_keys)
except:
error_message += '.'
logger.error(error_message)
for key in key_list:
new_key_name = '{sub_directory}/{name}'.format(sub_directory=destination_sub_directory, name=key)
destination_bucket.copy_key(
new_key_name=new_key_name,
src_bucket_name=source_bucket.name,
src_key_name=key
)
logger.info('Bucket data successfully copied to the target storage backend.')
def handle(self, *args, **options):
"""
Run the command
"""
with open(options['filejson']) as filejson:
presets = json.load(filejson)
client = boto3.client('elastictranscoder',
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
for preset in presets:
preset['created'] = client.create_preset(**preset)
self.stdout.write('ET_PRESET_IDS={}'.format(','.join(
[preset['created']['Preset']['Id'] for preset in presets])))
def terminate(self, aws_session=None, ec2_client=None):
if ec2_client is None:
if aws_session is None:
aws_session = boto3.session.Session(
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
ec2_client = aws_session.client('ec2')
# Save the new state of the instance.
self.active = False
self.save()
# Actually terminate the instance
try:
ec2_client.terminate_instances(InstanceIds=[self.ec2_id])
# Record the termination time.
self.terminated = timezone.now()
self.save()
except ClientError as e:
raise RuntimeError('Problem terminating %s: [%s] %s' % (
self, e.response['Error']['Code'], e.response['Error']['Message'],
))
def save(self, **kwargs):
if not self.id:
now = pytz.utc.localize(datetime.utcnow())
expires = now + timedelta(days=1)
self.expires = expires
self.code = str(random.randint(1000, 9999))
up = UserProfile.objects.get(user=self.user)
if self.mode == "SMS" and \
up.mobile_phone_number and \
settings.SEND_SMS:
# Send SMS to up.mobile_phone_number
sns = boto3.client(
'sns',
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
region_name='us-east-1')
number = "+1%s" % (up.mobile_phone_number)
sns.publish(
PhoneNumber=number,
Message="Your code is : %s" % (self.code),
MessageAttributes={
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': 'MySenderID'
}
}
)
elif self.mode == "SMS" and not up.mobile_phone_number:
logger.info("Cannot send SMS. No phone number on file.")
elif self.mode == "EMAIL" and self.user.email:
# "Send SMS to self.user.email
mfa_via_email(self.user, self.code)
elif self.mode == "EMAIL" and not self.user.email:
logger.info("Cannot send email. No email_on_file.")
else:
# No MFA code sent
pass
super(MFACode, self).save(**kwargs)
def handle_noargs(self, **options):
conn = connection.S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
bucket = conn.create_bucket(settings.AWS_BUCKET)
conventions = (
('republican','gop convention'),
('democratic','democratic convention'),
)
for convention, args in conventions:
events = Event.objects.filter(status='', tags=args)
event_count = events.count()
if event_count > 0:
randdocnum = random.randint(0, event_count - 1)
event = events[randdocnum]
content = render_to_string("publicsite/widgets/abc_convention.html",
{"field":"Tags", "args": args, "doc":event, "convention": convention})
k = key.Key(bucket)
k.key = 'abc/%s.html' % convention
k.set_contents_from_string(content, headers={"Content-Type": "text/html"}, replace=True)
k.set_acl('public-read')
def get_temporary_url(self, ttl=60):
if hasattr(settings, 'AWS_STORAGE_BUCKET_NAME'):
s3 = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, is_secure=True)
key = "{media}/{filename}".format(media=settings.MEDIAFILES_LOCATION, filename=self.file.name)
return s3.generate_url(ttl, 'GET', bucket=settings.AWS_STORAGE_BUCKET_NAME, key=key)
return self.file.url
def get_connection_sns():
region = sns.connect_to_region(settings.AWS_REGION_NAME, aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY).region
return sns.SNSConnection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID, region=region,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
def render_paper(source, output_path, webhook_url=None):
"""
Render a source directory using Engrafo.
"""
try:
os.makedirs(output_path)
except FileExistsError:
pass
client = create_client()
labels = {}
environment = {}
volumes = {}
network = None
# Production
if settings.MEDIA_USE_S3:
source = f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{source}"
output_path = f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{output_path}"
environment['AWS_ACCESS_KEY_ID'] = settings.AWS_ACCESS_KEY_ID
environment['AWS_SECRET_ACCESS_KEY'] = settings.AWS_SECRET_ACCESS_KEY
environment['AWS_S3_REGION_NAME'] = settings.AWS_S3_REGION_NAME
# Development
else:
# HACK(bfirsh): MEDIA_ROOT is an absolute path to something on
# the host machine. We need to make this relative to a mount inside the
# Docker container.
docker_media_root = os.path.join(
'/mnt',
os.path.basename(settings.MEDIA_ROOT)
)
source = os.path.join(docker_media_root, source)
output_path = os.path.join(docker_media_root, output_path)
# HOST_PWD is set in docker-compose.yml
volumes[os.environ['HOST_PWD']] = {'bind': '/mnt', 'mode': 'rw'}
network = 'arxivvanity_default'
if settings.ENGRAFO_USE_HYPER_SH:
labels['sh_hyper_instancetype'] = settings.HYPER_INSTANCE_TYPE
container = client.containers.run(
settings.ENGRAFO_IMAGE,
'sh -c ' + shlex.quote('; '.join(make_command(source, output_path, webhook_url))),
volumes=volumes,
environment=environment,
labels=labels,
network=network,
detach=True,
)
return container.id
def transcode_video(video, video_file):
"""
Start a transcode job for a video
Args:
video(ui.models.Video): the video to transcode
video_file(ui.models.Videofile): the s3 file to use for transcoding
"""
video_input = {
'Key': video_file.s3_object_key,
}
# Generate an output video file for each encoding (assumed to be HLS)
outputs = [{
'Key': video.transcode_key(preset),
'PresetId': preset,
'SegmentDuration': '10.0'
} for preset in settings.ET_PRESET_IDS]
playlists = [{
'Format': 'HLSv3',
'Name': video.transcode_key('_index'),
'OutputKeys': [output['Key'] for output in outputs]
}]
# Generate thumbnails for the 1st encoding (no point in doing so for each).
outputs[0]['ThumbnailPattern'] = THUMBNAIL_PATTERN.format(video_file.s3_basename)
transcoder = VideoTranscoder(
settings.ET_PIPELINE_ID,
settings.AWS_REGION,
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY
)
try:
transcoder.encode(video_input, outputs, Playlists=playlists)
except ClientError as exc:
log.error('Transcode job creation failed for video %s', video.id)
video.update_status(VideoStatus.TRANSCODE_FAILED_INTERNAL)
if hasattr(exc, 'response'):
transcoder.message = exc.response
raise
finally:
transcoder.create_job_for_object(video)
if video.status not in (VideoStatus.TRANSCODE_FAILED_INTERNAL, VideoStatus.TRANSCODE_FAILED_VIDEO, ):
video.update_status(VideoStatus.TRANSCODING)
def task_status(request, owner, repo_name, change_pk, build_pk, task_slug):
try:
task = Task.objects.get(
build__change__project__repository__owner__login=owner,
build__change__project__repository__name=repo_name,
build__change__pk=change_pk,
build__pk=build_pk,
slug=task_slug
)
except Task.DoesNotExist:
raise Http404
try:
kwargs = {
'nextToken': request.GET['nextToken']
}
except KeyError:
kwargs = {}
aws_session = boto3.session.Session(
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
logs = aws_session.client('logs')
try:
log_response = logs.get_log_events(
logGroupName='beekeeper',
logStreamName=task.log_stream_name,
**kwargs
)
log_data = '\n'.join(
event['message']
for event in log_response['events']
)
message = None
next_token = log_response['nextForwardToken']
no_more_logs = log_response['nextForwardToken'] == kwargs.get('nextToken', None)
except Exception as e:
if task.has_error:
log_data = None
message = 'No logs; task did not start.'
next_token = ''
no_more_logs = True
else:
log_data = None
message = 'Waiting for logs to become available...'
next_token = ''
no_more_logs = False
return HttpResponse(json.dumps({
'started': task.has_started,
'log': log_data,
'message': message,
'status': task.full_status_display(),
'result': task.result,
'nextToken': next_token,
'finished': task.is_finished and no_more_logs,
}), content_type="application/json")