def upload_file(bucket, source, destination, s3_ssenc, bufsize, compress_data): completed = False retry_count = 0 #print("compress_data is {0}({1})".format(compress_data, type(compress_data))) # If file size less than MULTI_PART_UPLOAD_THRESHOLD, # use single part upload source_size = os.path.getsize(source) if source_size <= int(MULTI_PART_UPLOAD_THRESHOLD * MBFACTOR): k = Key(bucket) # Initialize S3 bucket object while not completed and retry_count < MAX_RETRY_COUNT: try: k.key = destination # Prepend S3 path prior to uploading bytes_written = k.set_contents_from_filename(source, encrypt_key=s3_ssenc) completed = True print("[SU] - {0}({1}) upload completed, number of bytes wrriten={2}".format(source, source_size, bytes_written)) except Exception: print("Error uploading file {!s} to {!s}.".format(source)) except Exception: print("Error uploading file {!s} to {!s}.\ Retry count: {}".format(source, destination, retry_count)) retry_count = retry_count + 1 if retry_count >= MAX_RETRY_COUNT: print("Retried too many times uploading file") raise else: # Big file, use multi part upload mp = bucket.initiate_multipart_upload( destination, encrypt_key=s3_ssenc) while not completed and retry_count < MAX_RETRY_COUNT: try: for i, chunk in enumerate(transfer_data(source, bufsize, compress_data)): mp.upload_part_from_file(chunk, i+1) mp.complete_upload() # Finish the upload completed = True print("[MU] - {0} upload completed, xml={1}".format(source, k.to_xml())) except Exception: print("Error uploading file {!s} to {!s}.\ Retry count: {}".format(source, destination, retry_count)) cancel_upload(bucket, mp, destination) retry_count = retry_count + 1 if retry_count >= MAX_RETRY_COUNT: print("Retried too many times uploading file") raise mp.complete_upload()