backup_project.py 文件源码

python
阅读 21 收藏 0 点赞 0 评论 0

项目:django-green-grove 作者: dreipol 项目源码 文件源码
def back_up_bucket(self):
        logger.info('Start backing up the bucket data.')

        boto_connection = boto.connect_s3(
            aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
            host=settings.AWS_S3_HOST,
        )
        source_bucket = boto_connection.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
        destination_bucket = boto_connection.get_bucket(settings.BACKUP_BUCKET_BUCKET_NAME)
        destination_sub_directory = '{location}/{timestamp}'.format(location=settings.BACKUP_BUCKET_LOCATION,
                                                                    timestamp=self.timestamp)

        try:
            key_list = [source_key.key for source_key in source_bucket.list() if source_key.size]
        except ValueError:
            raise ValueError('The backup task was aborted because of some bucket keys with no size. Set '
                             '`DJANGO_GREEN_GROVE_EMPTY_S3_KEYS` in your settings to get a list of the keys.')

        if hasattr(settings, 'DJANGO_GREEN_GROVE_EMPTY_S3_KEYS'):
            error_message = 'Some bucket keys were ignored during the backup task because they have no size'
            try:
                empty_keys = [source_key.key for source_key in source_bucket.list() if not source_key.size]
                error_message += ': %s' % ', '.join(empty_keys)
            except:
                error_message += '.'

            logger.error(error_message)

        for key in key_list:
            new_key_name = '{sub_directory}/{name}'.format(sub_directory=destination_sub_directory, name=key)
            destination_bucket.copy_key(
                new_key_name=new_key_name,
                src_bucket_name=source_bucket.name,
                src_key_name=key
            )

        logger.info('Bucket data successfully copied to the target storage backend.')
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号