def delete_archived_log(target_date): s3 = S3(setting.get('elb_log_s3', 'bucket')) prefix = os.path.join(setting.get('elb_log_s3', 'archived_log_key_prefix')) key_name = os.path.join(prefix, '%s.zip' % target_date) s3.delete(key_name) logger.info('Delete object: %s', key_name)
def upload_to_s3(filename, log_date): s3 = S3(setting.get('elb_log_s3', 'bucket')) prefix = os.path.join(setting.get('elb_log_s3', 'archived_log_key_prefix')) key_name = os.path.join(prefix, '%s.zip' % log_date) s3.upload(key_name, filename) logger.info('Upload %s', key_name)
def delete_logs(key_names): s3 = S3(setting.get('elb_log_s3', 'bucket')) with futures.ThreadPoolExecutor(max_workers=10) as executor: for key_name in key_names: executor.submit(s3.delete, key_name) logger.info('Delete archived logs')
def setUp(self): self.bucket_name = 'qq' self.s3 = S3(self.bucket_name) self.s3.client = MagicMock() self.filename = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'lena.jpg')
def setUp(self, mock_boto_s3_module): self.bucket_name = 'qq' self.s3 = S3(self.bucket_name) self.s3.client = MagicMock() self.s3.client.get_paginator.return_value.paginate.return_value = [{ 'Contents': [{ 'Key': 'key1' }, { 'Key': 'key2' }] }]
def download_logs_of_a_date(log_date, output_folder): log_date = datetime.strptime(log_date, '%Y-%m-%d') key_prefix = setting.get('elb_log_s3', 'log_key_prefix') key_prefix = ''.join([key_prefix, log_date.strftime('%Y/%m/%d')]) s3 = S3(setting.get('elb_log_s3', 'bucket')) key_names = [] with futures.ThreadPoolExecutor(max_workers=10) as executor: for key_name in s3.list(key_prefix): executor.submit(download_log, s3, key_name, output_folder) key_names.append(key_name) logger.info('Download all logs on %s', log_date.isoformat()) return key_names
def download_elb_log(self, bucket_name, key_name, output_fn): s3 = S3(bucket_name) s3.download(key_name, output_fn)
def setUp(self, mock_boto_s3_module): self.bucket_name = 'qq' self.s3 = S3(self.bucket_name) self.s3.client = MagicMock()