def __init__(self, profile, queue, hook, dry_run, bin_directory=None):
logging.basicConfig(level=logging.INFO)
self.queue = queue
self.hook = hook
self.profile = profile
if bin_directory:
os.environ["PATH"] = bin_directory + os.pathsep + os.environ["PATH"]
self.aws_bin = spawn.find_executable('aws')
self.python_bin = spawn.find_executable('python')
self.base_cli_command ="{python_bin} {aws_bin} --profile {profile} ".format(
python_bin=self.python_bin,
aws_bin=self.aws_bin,
profile=self.profile)
self.dry_run = dry_run
self.ec2_con = boto.connect_ec2()
self.sqs_con = boto.connect_sqs()
python类connect_sqs()的实例源码
def get_conn(region=None, access_key_id=None, secret_access_key=None):
return boto.connect_sqs(aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key, region=_get_region(region))
def get_or_create_queue(queue_name):
global conn
if conn is None:
conn = boto.connect_sqs()
queue = conn.get_queue(queue_name)
if queue:
return queue
else:
return conn.create_queue(queue_name)
def test_1_basic(self):
c = boto.connect_sqs()
# create a queue so we can test BigMessage
queue_name = 'test%d' % int(time.time())
timeout = 60
queue = c.create_queue(queue_name, timeout)
self.addCleanup(c.delete_queue, queue, True)
queue.set_message_class(BigMessage)
# create a bucket with the same name to store the message in
s3 = boto.connect_s3()
bucket = s3.create_bucket(queue_name)
self.addCleanup(s3.delete_bucket, queue_name)
time.sleep(30)
# now add a message
msg_body = 'This is a test of the big message'
fp = StringIO(msg_body)
s3_url = 's3://%s' % queue_name
message = queue.new_message(fp, s3_url=s3_url)
queue.write(message)
time.sleep(30)
s3_object_name = message.s3_url.split('/')[-1]
# Make sure msg body is in bucket
self.assertTrue(bucket.lookup(s3_object_name))
m = queue.read()
self.assertEqual(m.get_body().decode('utf-8'), msg_body)
m.delete()
time.sleep(30)
# Make sure msg is deleted from bucket
self.assertIsNone(bucket.lookup(s3_object_name))
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def get_sqs_conn(conf):
region = conf.get('SQS_REGION')
if region:
conn = boto.sqs.connect_to_region(region, **aws_creds(conf))
if not conn:
raise ValueErrorRetry("Could not establish SQS connection to region %r" % (region,))
else:
conn = boto.connect_sqs(**aws_creds(conf))
return conn
def get_sqs_conn(conf):
region = conf.get('SQS_REGION')
if region:
conn = boto.sqs.connect_to_region(region, **aws_creds(conf))
if not conn:
raise ValueErrorRetry("Could not establish SQS connection to region %r" % (region,))
else:
conn = boto.connect_sqs(**aws_creds(conf))
return conn
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def queue_json_message(doc, doc_key):
key_name = doc_key.name.replace(os.path.basename(doc_key.name), "message-%s.json" % str(uuid4()))
key = doc_key.bucket.new_key(key_name)
message_data = simplejson.dumps({'bucket': doc_key.bucket.name, 'key': doc_key.name, 'uuid': doc.uuid})
key.set_contents_from_string(message_data)
msg_body = {'bucket': key.bucket.name, 'key': key.name}
queue = boto.connect_sqs(settings.PDF_AWS_KEY, settings.PDF_AWS_SECRET).create_queue(REQUEST_QUEUE)
msg = queue.new_message(body=simplejson.dumps(msg_body))
queue.write(msg)
def _dequeue_json_message(self):
sqs = boto.connect_sqs(settings.PDF_AWS_KEY, settings.PDF_AWS_SECRET)
queue = sqs.create_queue(RESPONSE_QUEUE)
msg = queue.read()
if msg is not None:
data = simplejson.loads(msg.get_body())
bucket = data.get('bucket', None)
key = data.get("key", None)
queue.delete_message(msg)
if bucket is not None and key is not None:
return data
def run(self, **kwargs):
ec2 = boto.connect_ec2(settings.PDF_AWS_KEY, settings.PDF_AWS_SECRET)
sqs = boto.connect_sqs(settings.PDF_AWS_KEY, settings.PDF_AWS_SECRET)
queue = sqs.create_queue(REQUEST_QUEUE)
num = queue.count()
launched = 0
icount = 0
reservations = ec2.get_all_instances()
for reservation in reservations:
for instance in reservation.instances:
if instance.state == "running" and instance.image_id == AMI_ID:
icount += 1
to_boot = min(num - icount, MAX_INSTANCES)
if to_boot > 0:
startup = BOOTSTRAP_SCRIPT % {
'KEY': settings.PDF_AWS_KEY,
'SECRET': settings.PDF_AWS_SECRET,
'RESPONSE_QUEUE': RESPONSE_QUEUE,
'REQUEST_QUEUE': REQUEST_QUEUE}
r = ec2.run_instances(
image_id=AMI_ID,
min_count=to_boot,
max_count=to_boot,
key_name=KEYPAIR,
security_groups=SECURITY_GROUPS,
user_data=startup)
launched = len(r.instances)
return launched
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)
def __init__(self, queue_name):
self.sqs = boto.connect_sqs()
self.queue = self.sqs.lookup(queue_name)