def upload(self, key, filename, is_public=False, metadata=None): k = Key(self.bucket) k.key = key headers = {'Cache-Control': 'max-age=31536000'} content_type, encoding = mimetypes.guess_type(filename) if content_type is not None: headers['Content-Type'] = content_type if encoding == 'gzip': headers['Content-Encoding'] = 'gzip' if metadata is not None: for key in metadata: headers['x-amz-meta-' + key] = metadata[key] for _ in xrange(5): try: k.set_contents_from_filename( filename, headers=headers, policy=('public-read' if is_public else 'private') ) logger.info('Upload %s -> %s', filename, k.name) break except Exception as e: logger.exception(e) logger.warn('Try upload again') else: logger.error('Retry more than 5 times, give it up.') raise ExceedMaxRetryError()
def main(): arg_parser = init_arg_parser() args = arg_parser.parse_args() with TempDir() as tempdir: key_names = download_logs_of_a_date(args.date, tempdir) if len(key_names) == 0: logger.warn('Cannot find any log on %s', args.date) return with closing(NamedTemporaryFile(suffix='.zip')) as zip_file: ZipCompressor.compress(tempdir, zip_file.name) upload_to_s3(zip_file.name, args.date) delete_logs(key_names)
def main(): arg_parser = init_arg_parser() args = arg_parser.parse_args() with TempDir() as tempdir: key_names = download_logs_of_a_date(args.date, tempdir) if not key_names: logger.warn('Cannot find any log on %s', args.date) return with closing(NamedTemporaryFile(suffix='.zip')) as zip_file: ZipCompressor.compress(tempdir, zip_file.name) upload_to_s3(zip_file.name, args.date) delete_logs(key_names)