def test_etag(self): assert data_transfer._calculate_etag( DATA_DIR / 'small_file.csv') == '"0bec5bf6f93c547bc9c6774acaf85e1a"' assert data_transfer._calculate_etag( DATA_DIR / 'buggy_parquet.parquet') == '"dfb5aca048931d396f4534395617363f"'
def test_upload_large_file_etag_match_metadata_match(self): path = DATA_DIR / 'large_file.npy' etag = data_transfer._calculate_etag(path) self.s3_stubber.add_response(method='head_object', service_response={ 'ContentLength': path.stat().st_size, 'ETag': etag, 'VersionId': 'v1', 'Metadata': { 'helium': '{"foo": "bar"}' } }, expected_params={ 'Bucket': 'example', 'Key': 'large_file.npy', }) urls = data_transfer.copy_file_list([ (path.as_uri(), 's3://example/large_file.npy', path.stat().st_size, { 'foo': 'bar' }), ]) assert urls[0] == 's3://example/large_file.npy?versionId=v1'
def test_upload_large_file_etag_match(self): path = DATA_DIR / 'large_file.npy' self.s3_stubber.add_response( method='head_object', service_response={ 'ContentLength': path.stat().st_size, 'ETag': data_transfer._calculate_etag(path), 'VersionId': 'v1', }, expected_params={ 'Bucket': 'example', 'Key': 'large_file.npy', } ) urls = data_transfer.copy_file_list([ (PhysicalKey.from_path(path), PhysicalKey.from_url('s3://example/large_file.npy'), path.stat().st_size), ]) assert urls[0] == PhysicalKey.from_url('s3://example/large_file.npy?versionId=v1')