def __init__(self):
super(CopyBot, self).__init__()
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
python类set_file_logger()的实例源码
def __init__(self, config_file=None):
super(SonOfMMM, self).__init__(config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()
def __init__(self):
super(CopyBot, self).__init__()
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def __init__(self, config_file=None):
super(SonOfMMM, self).__init__(config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()
def __init__(self):
ScriptBase.__init__(self)
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def __init__(self, config_file=None):
Service.__init__(self, config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()
def __init__(self):
ScriptBase.__init__(self)
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def __init__(self, config_file=None):
Service.__init__(self, config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()
def __init__(self):
super(CopyBot, self).__init__()
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def __init__(self, config_file=None):
super(SonOfMMM, self).__init__(config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()
def __init__(self):
ScriptBase.__init__(self)
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def __init__(self, config_file=None):
Service.__init__(self, config_file)
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.working_dir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
if self.sd.has_option('ffmpeg_args'):
self.command = '/usr/local/bin/ffmpeg ' + self.sd.get('ffmpeg_args')
else:
self.command = '/usr/local/bin/ffmpeg -y -i %s %s'
self.output_mimetype = self.sd.get('output_mimetype')
if self.sd.has_option('output_ext'):
self.output_ext = self.sd.get('output_ext')
else:
self.output_ext = mimetypes.guess_extension(self.output_mimetype)
self.output_bucket = self.sd.get_obj('output_bucket')
self.input_bucket = self.sd.get_obj('input_bucket')
# check to see if there are any messages queue
# if not, create messages for all files in input_bucket
m = self.input_queue.read(1)
if not m:
self.queue_files()