def copy_part(upload_url: str, source_url: str, dest_platform: str, part: dict): gs = Config.get_native_handle(Replica.gcp) boto3_session = boto3.session.Session() with closing(range_request(source_url, part["start"], part["end"])) as fh: if dest_platform == "s3": chunker = S3SigningChunker(fh, part["end"] - part["start"] + 1, boto3_session.get_credentials(), "s3", boto3_session.region_name) res = http.request("PUT", upload_url, headers=chunker.get_headers("PUT", upload_url), body=chunker, chunked=True, retries=False) logger.info(f"Part upload result: {res.status}") assert 200 <= res.status < 300 logger.info("Part etag: {}".format(res.headers["ETag"])) elif dest_platform == "gs": logger.info(f"Uploading part {part} to gs") # TODO: brianh: is mypy suppression ok? gs_transport = google.auth.transport.requests.AuthorizedSession( gs._credentials) # type: ignore for start in range(0, part["end"] - part["start"] + 1, gs_upload_chunk_size): chunk = fh.read(gs_upload_chunk_size) headers = { "content-range": get_content_range(start, start + len(chunk) - 1, total_bytes=None) } res = gs_transport.request("PUT", upload_url, data=chunk, headers=headers) assert 200 <= res.status_code < 400 assert res.status_code == 200 return res
def test_unknown_size(self): result = _upload.get_content_range(1000, 10000, None) assert result == u"bytes 1000-10000/*"
def test_known_size(self): result = _upload.get_content_range(5, 10, 40) assert result == u"bytes 5-10/40"