def test_lambda_function_bad_error_propagates(self, stubbed_session):
stubbed_session.stub('lambda').get_function(FunctionName='myappname')\
.raises_error(error_code='UnexpectedError',
message='Unknown')
stubbed_session.activate_stubs()
awsclient = TypedAWSClient(stubbed_session)
with pytest.raises(botocore.exceptions.ClientError):
awsclient.lambda_function_exists(name='myappname')
stubbed_session.verify_stubs()
python类exceptions()的实例源码
def test_unexpected_error_is_propagated(self, stubbed_session):
stubbed_session.stub('iam').get_role(RoleName='Yes').raises_error(
error_code='InternalError',
message='Foo')
stubbed_session.activate_stubs()
awsclient = TypedAWSClient(stubbed_session)
with pytest.raises(botocore.exceptions.ClientError):
awsclient.get_role_arn_for_name(name='Yes')
stubbed_session.verify_stubs()
def copy_on_s3(self, src_file_name, dst_file_name, bucket_name):
"""
Copies src file to destination within a bucket.
"""
try:
self.s3_client.head_bucket(Bucket=bucket_name)
except botocore.exceptions.ClientError as e: # pragma: no cover
# If a client error is thrown, then check that it was a 404 error.
# If it was a 404 error, then the bucket does not exist.
error_code = int(e.response['Error']['Code'])
if error_code == 404:
return False
copy_src = {
"Bucket": bucket_name,
"Key": src_file_name
}
try:
self.s3_client.copy(
CopySource=copy_src,
Bucket=bucket_name,
Key=dst_file_name
)
return True
except botocore.exceptions.ClientError: # pragma: no cover
return False
def remove_from_s3(self, file_name, bucket_name):
"""
Given a file name and a bucket, remove it from S3.
There's no reason to keep the file hosted on S3 once its been made into a Lambda function, so we can delete it from S3.
Returns True on success, False on failure.
"""
try:
self.s3_client.head_bucket(Bucket=bucket_name)
except botocore.exceptions.ClientError as e: # pragma: no cover
# If a client error is thrown, then check that it was a 404 error.
# If it was a 404 error, then the bucket does not exist.
error_code = int(e.response['Error']['Code'])
if error_code == 404:
return False
try:
self.s3_client.delete_object(Bucket=bucket_name, Key=file_name)
return True
except botocore.exceptions.ClientError: # pragma: no cover
return False
##
# Lambda
##
def delete_rule(self, rule_name):
"""
Delete a CWE rule.
This deletes them, but they will still show up in the AWS console.
Annoying.
"""
logger.debug('Deleting existing rule {}'.format(rule_name))
# All targets must be removed before
# we can actually delete the rule.
try:
targets = self.events_client.list_targets_by_rule(Rule=rule_name)
except botocore.exceptions.ClientError as e:
# This avoids misbehavior if low permissions, related: https://github.com/Miserlou/Zappa/issues/286
error_code = e.response['Error']['Code']
if error_code == 'AccessDeniedException':
raise
else:
logger.debug('No target found for this rule: {} {}'.format(rule_name, e.args[0]))
return
if 'Targets' in targets and targets['Targets']:
self.events_client.remove_targets(Rule=rule_name, Ids=[x['Id'] for x in targets['Targets']])
else: # pragma: no cover
logger.debug('No target to delete')
# Delete our rule.
self.events_client.delete_rule(Name=rule_name)
def create_async_dynamodb_table(self, table_name, read_capacity, write_capacity):
"""
Create the DynamoDB table for async task return values
"""
try:
dynamodb_table = self.dynamodb_client.describe_table(TableName=table_name)
return False, dynamodb_table
# catch this exception (triggered if the table doesn't exist)
except botocore.exceptions.ClientError:
dynamodb_table = self.dynamodb_client.create_table(
AttributeDefinitions=[
{
'AttributeName': 'id',
'AttributeType': 'S'
}
],
TableName=table_name,
KeySchema=[
{
'AttributeName': 'id',
'KeyType': 'HASH'
},
],
ProvisionedThroughput = {
'ReadCapacityUnits': read_capacity,
'WriteCapacityUnits': write_capacity
}
)
if dynamodb_table:
try:
self._set_async_dynamodb_table_ttl(table_name)
except botocore.exceptions.ClientError:
# this fails because the operation is async, so retry
time.sleep(10)
self._set_async_dynamodb_table_ttl(table_name)
return True, dynamodb_table
def c_table(TableName, **kwargs): # handles the creation of a tabale with error checking
# kwargs optionally passes in BOTO3 sessions for multithreading
try:
db_r.create_table(**t_conf(TableName))
print "INFO :: Waiting for Table [%s] to complete..." % TableName
db_r.Table(TableName).wait_until_exists()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "ResourceInUseException":
db_r.Table(TableName).delete()
print "INFO :: Learning Online %s Table exists, waiting for delete ...." % TableName
db_r.Table(TableName).wait_until_not_exists()
c_table(TableName)
else:
raise
#------------------------------------------------------------------------------
def c_table(TableName, **kwargs): # handles the creation of a tabale with error checking
# kwargs optionally passes in BOTO3 sessions for multithreading
try:
db_r.create_table(**t_conf(TableName))
print "INFO :: Waiting for Table [%s] to complete..." % TableName
db_r.Table(TableName).wait_until_exists()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "ResourceInUseException":
db_r.Table(TableName).delete()
print "INFO :: Learning Online %s Table exists, waiting for delete ...." % TableName
db_r.Table(TableName).wait_until_not_exists()
c_table(TableName)
else:
raise
#------------------------------------------------------------------------------
def c_table(TableName, **kwargs): # handles the creation of a tabale with error checking
# kwargs optionally passes in BOTO3 sessions for multithreading
try:
db_r.create_table(**t_conf(TableName))
print "INFO :: Waiting for Table [%s] to complete..." % TableName
db_r.Table(TableName).wait_until_exists()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "ResourceInUseException":
db_r.Table(TableName).delete()
print "INFO :: Learning Online %s Table exists, waiting for delete ...." % TableName
db_r.Table(TableName).wait_until_not_exists()
c_table(TableName)
else:
raise
#------------------------------------------------------------------------------
def c_table(TableName, **kwargs): # handles the creation of a tabale with error checking
# kwargs optionally passes in BOTO3 sessions for multithreading
try:
db_r.create_table(**t_conf(TableName))
print "INFO :: Waiting for Table [%s] to complete..." % TableName
db_r.Table(TableName).wait_until_exists()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "ResourceInUseException":
db_r.Table(TableName).delete()
print "INFO :: Learning Online %s Table exists, waiting for delete ...." % TableName
db_r.Table(TableName).wait_until_not_exists()
c_table(TableName)
else:
raise
#------------------------------------------------------------------------------
def check_for_200_error(response, **kwargs):
# From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
# There are two opportunities for a copy request to return an error. One
# can occur when Amazon S3 receives the copy request and the other can
# occur while Amazon S3 is copying the files. If the error occurs before
# the copy operation starts, you receive a standard Amazon S3 error. If the
# error occurs during the copy operation, the error response is embedded in
# the 200 OK response. This means that a 200 OK response can contain either
# a success or an error. Make sure to design your application to parse the
# contents of the response and handle it appropriately.
#
# So this handler checks for this case. Even though the server sends a
# 200 response, conceptually this should be handled exactly like a
# 500 response (with respect to raising exceptions, retries, etc.)
# We're connected *before* all the other retry logic handlers, so as long
# as we switch the error code to 500, we'll retry the error as expected.
if response is None:
# A None response can happen if an exception is raised while
# trying to retrieve the response. See Endpoint._get_response().
return
http_response, parsed = response
if _looks_like_special_case_error(http_response):
logger.debug("Error found for response with 200 status code, "
"errors: %s, changing status code to "
"500.", parsed)
http_response.status_code = 500
def check_for_200_error(response, **kwargs):
# From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
# There are two opportunities for a copy request to return an error. One
# can occur when Amazon S3 receives the copy request and the other can
# occur while Amazon S3 is copying the files. If the error occurs before
# the copy operation starts, you receive a standard Amazon S3 error. If the
# error occurs during the copy operation, the error response is embedded in
# the 200 OK response. This means that a 200 OK response can contain either
# a success or an error. Make sure to design your application to parse the
# contents of the response and handle it appropriately.
#
# So this handler checks for this case. Even though the server sends a
# 200 response, conceptually this should be handled exactly like a
# 500 response (with respect to raising exceptions, retries, etc.)
# We're connected *before* all the other retry logic handlers, so as long
# as we switch the error code to 500, we'll retry the error as expected.
if response is None:
# A None response can happen if an exception is raised while
# trying to retrieve the response. See Endpoint._get_response().
return
http_response, parsed = response
if _looks_like_special_case_error(http_response):
logger.debug("Error found for response with 200 status code, "
"errors: %s, changing status code to "
"500.", parsed)
http_response.status_code = 500
def test_credentials(self):
try:
boto3.client('sts',
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
region_name=self.region_name).get_caller_identity()['Arn']
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "AuthFailure" or \
e.response["Error"]["Code"] == "InvalidClientTokenId":
raise AWSException("Invalid AWS access key id or aws secret access key")
except Exception as e:
raise AWSException(str(e))
def test_ssh_key(self, key_name, identity_file):
client = None
self.key_name = key_name
self.identity_file = identity_file
try:
client = boto3.client('ec2',
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
region_name=self.region_name)
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "AuthFailure":
raise AWSException("Invalid AWS access key id or aws secret access key")
except Exception as e:
raise AWSException("There was an error connecting to EC2: %s" % e)
# Search EC2 for the key-name
try:
client.describe_key_pairs(KeyNames=[self.key_name])
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "AuthFailure":
raise AWSException("Invalid AWS access key id or aws secret access key")
elif e.response["Error"]["Code"] == "InvalidKeyPair.NotFound":
raise AWSException("Key %s not found on AWS" % self.key_name)
else:
raise AWSException("There was an error describing the SSH key pairs: %s" %
e.response["Error"]["Message"])
# Verify the identity file exists
if not os.path.isfile(self.identity_file):
raise AWSException("Key identity file %s not found" % self.identity_file)
def create_ssh_key(self, email_address, file_path):
try:
client = boto3.client('ec2',
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
region_name=self.region_name)
except Exception as e:
raise AWSException("There was an error connecting to EC2: %s" % e)
self.key_name = "%s_%s_%s" % (str(email_address.split("@")[0]),
str(socket.gethostname()),
str(int(time.time())))
self.identity_file = file_path + "/" + self.key_name + ".pem"
# Create an EC2 key pair
try:
key = client.create_key_pair(KeyName=self.key_name)
with open(self.identity_file, 'a') as out:
out.write(key['KeyMaterial'] + '\n')
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "AuthFailure":
raise AWSException("Invalid AWS access key id or aws secret access key")
else:
raise AWSException("There was an error creating a new SSH key pair: %s" %
e.response["Error"]["Message"])
except Exception as e:
raise AWSException("Unknown Error: %s" % e)
# Verify the key pair was saved locally
if not os.path.isfile(self.identity_file):
raise AWSException("SSH key %s not saved" % self.identity_file)
def get_account_id(self):
try:
client = boto3.client('sts',
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
region_name=self.region_name)
except Exception as e:
raise AWSException("There was an error connecting to EC2: %s" % e)
try:
return client.get_caller_identity()["Account"]
except botocore.exceptions.ClientError as e:
raise AWSException("There was an error getting the Account ID: %s" %
e.response["Error"]["Message"])
handlers.py 文件源码
项目:tf_aws_ecs_instance_draining_on_scale_in
作者: terraform-community-modules
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def check_for_200_error(response, **kwargs):
# From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html
# There are two opportunities for a copy request to return an error. One
# can occur when Amazon S3 receives the copy request and the other can
# occur while Amazon S3 is copying the files. If the error occurs before
# the copy operation starts, you receive a standard Amazon S3 error. If the
# error occurs during the copy operation, the error response is embedded in
# the 200 OK response. This means that a 200 OK response can contain either
# a success or an error. Make sure to design your application to parse the
# contents of the response and handle it appropriately.
#
# So this handler checks for this case. Even though the server sends a
# 200 response, conceptually this should be handled exactly like a
# 500 response (with respect to raising exceptions, retries, etc.)
# We're connected *before* all the other retry logic handlers, so as long
# as we switch the error code to 500, we'll retry the error as expected.
if response is None:
# A None response can happen if an exception is raised while
# trying to retrieve the response. See Endpoint._get_response().
return
http_response, parsed = response
if _looks_like_special_case_error(http_response):
logger.debug("Error found for response with 200 status code, "
"errors: %s, changing status code to "
"500.", parsed)
http_response.status_code = 500
def update(args):
"""update or create a stack in AWS."""
stack = args.stack
if stack not in local_stacks():
LOG.error('no such stack: ' + stack)
return
if stack not in remote_stacks().keys() and not args.create_missing:
LOG.warning(
'stack ' + stack + ' does not exist in AWS, add --create_missing to create a new stack')
return
# read template and parameters
tpl_body = load_template(stack, True)
params = load_parameters(stack)
# action
cfn = get_cfn()
last_event = None
try:
if stack in remote_stacks().keys():
LOG.info('updating stack %s', stack)
last_event = fetch_all_stack_events(stack)[-1]['Timestamp']
stack_id = cfn.update_stack(
StackName=stack,
TemplateBody=tpl_body,
Parameters=params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']
)['StackId']
LOG.info('created stack with physical id %s', stack_id)
else:
LOG.info('creating stack %s', stack)
stack_id = cfn.create_stack(
StackName=stack,
TemplateBody=tpl_body,
Parameters=params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM']
)['StackId']
LOG.info('created stack with physical id %s', stack_id)
except botocore.exceptions.ClientError as err:
LOG.warning(str(err))
return
except botocore.exceptions.ParamValidationError as err:
LOG.warning(str(err))
return
# synchronous mode
if args.wait or args.events:
wait(stack, show_events=args.events, last_event=last_event)
def get_auth_instance(self, signing_name, region_name,
signature_version=None, **kwargs):
"""
Get an auth instance which can be used to sign a request
using the given signature version.
:type signing_name: string
:param signing_name: Service signing name. This is usually the
same as the service name, but can differ. E.g.
``emr`` vs. ``elasticmapreduce``.
:type region_name: string
:param region_name: Name of the service region, e.g. ``us-east-1``
:type signature_version: string
:param signature_version: Signature name like ``v4``.
:rtype: :py:class:`~botocore.auth.BaseSigner`
:return: Auth instance to sign a request.
"""
if signature_version is None:
signature_version = self._signature_version
cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version)
if cls is None:
raise UnknownSignatureVersionError(
signature_version=signature_version)
# If there's no credentials provided (i.e credentials is None),
# then we'll pass a value of "None" over to the auth classes,
# which already handle the cases where no credentials have
# been provided.
frozen_credentials = None
if self._credentials is not None:
frozen_credentials = self._credentials.get_frozen_credentials()
kwargs['credentials'] = frozen_credentials
if cls.REQUIRES_REGION:
if self._region_name is None:
raise botocore.exceptions.NoRegionError()
kwargs['region_name'] = region_name
kwargs['service_name'] = signing_name
auth = cls(**kwargs)
return auth
# Alias get_auth for backwards compatibility.
def upload(self, file_name, remote_path):
"""
Uploads given file to S3
:param file_name: Path to the file that will be uploaded
:param remote_path: be uploaded
:return: VersionId of the latest upload
"""
if self.prefix and len(self.prefix) > 0:
remote_path = "{0}/{1}".format(self.prefix, remote_path)
# Check if a file with same data exists
if not self.force_upload and self.file_exists(remote_path):
LOG.debug("File with same data is already exists at {0}. "
"Skipping upload".format(remote_path))
return self.make_url(remote_path)
try:
# Default to regular server-side encryption unless customer has
# specified their own KMS keys
additional_args = {
"ServerSideEncryption": "AES256"
}
if self.kms_key_id:
additional_args["ServerSideEncryption"] = "aws:kms"
additional_args["SSEKMSKeyId"] = self.kms_key_id
print_progress_callback = \
ProgressPercentage(file_name, remote_path)
future = self.transfer_manager.upload(file_name,
self.bucket_name,
remote_path,
additional_args,
[print_progress_callback])
future.result()
return self.make_url(remote_path)
except botocore.exceptions.ClientError as ex:
error_code = ex.response["Error"]["Code"]
if error_code == "NoSuchBucket":
raise exceptions.NoSuchBucketError(
bucket_name=self.bucket_name)
raise ex