def test_copy_multipart_preserve_etag(): from_info, to_info = _get_src_dst() s3 = boto3.client("s3") s3.create_bucket(Bucket=from_info.bucket) _upload_multipart(s3, from_info.bucket, from_info.path) S3Remote._copy(s3, from_info, to_info, {})
def test_copy_singlepart_preserve_etag(): from_info, to_info = _get_src_dst() s3 = boto3.client("s3") s3.create_bucket(Bucket=from_info.bucket) s3.put_object(Bucket=from_info.bucket, Key=from_info.path, Body="data") S3Remote._copy(s3, from_info, to_info, {})
def test_copy_preserve_etag_across_buckets(remote, dvc): s3 = remote.s3 s3.create_bucket(Bucket="another") another = S3Remote(dvc, {"url": "s3://another", "region": "us-east-1"}) from_info = remote.path_info / "foo" to_info = another.path_info / "foo" remote.copy(from_info, to_info) from_etag = S3Remote.get_etag(s3, from_info.bucket, from_info.path) to_etag = S3Remote.get_etag(s3, "another", "foo") assert from_etag == to_etag
def test_link_created_on_non_nested_path(base_info, tmp_dir, dvc, scm): remote = S3Remote(dvc, {"url": str(base_info.parent)}) remote.s3.create_bucket(Bucket=base_info.bucket) remote.s3.put_object( Bucket=base_info.bucket, Key=(base_info / "from").path, Body="data" ) remote.link(base_info / "from", base_info / "to") assert remote.exists(base_info / "from") assert remote.exists(base_info / "to")
def test_grants(dvc): config = { "url": url, "grant_read": "id=read-permission-id,id=other-read-permission-id", "grant_read_acp": "id=read-acp-permission-id", "grant_write_acp": "id=write-acp-permission-id", "grant_full_control": "id=full-control-permission-id", } remote = S3Remote(dvc, config) assert (remote.extra_args["GrantRead"] == "id=read-permission-id,id=other-read-permission-id") assert remote.extra_args["GrantReadACP"] == "id=read-acp-permission-id" assert remote.extra_args["GrantWriteACP"] == "id=write-acp-permission-id" assert (remote.extra_args["GrantFullControl"] == "id=full-control-permission-id")
def test_makedirs_doesnot_try_on_top_level_paths(tmp_dir, dvc, scm): base_info = S3RemoteTree.PATH_CLS("s3://bucket/") remote = S3Remote(dvc, {"url": str(base_info)}) remote.tree.makedirs(base_info)
def remote(cls, repo): with mock_s3(): yield S3Remote(repo, {"url": cls.get_url()})
def test_grants_mutually_exclusive_acl_error(dvc, grants): for grant_option, grant_value in grants.items(): config = {"url": url, "acl": "public-read", grant_option: grant_value} with pytest.raises(ConfigError): S3Remote(dvc, config)
def test_init(dvc): config = {"url": url} remote = S3Remote(dvc, config) assert remote.path_info == url
def _get_src_dst(): base_info = S3Remote.path_cls(S3.get_url()) return base_info / "from", base_info / "to"
@mock_s3 def test_copy_singlepart_preserve_etag(): from_info, to_info = _get_src_dst() s3 = boto3.client("s3") s3.create_bucket(Bucket=from_info.bucket) s3.put_object(Bucket=from_info.bucket, Key=from_info.path, Body="data") S3Remote._copy(s3, from_info, to_info, {}) @mock_s3 @pytest.mark.parametrize( "base_info", [S3Remote.path_cls("s3://bucket/"), S3Remote.path_cls("s3://bucket/ns/")], ) def test_link_created_on_non_nested_path(base_info, tmp_dir, dvc, scm): remote = S3Remote(dvc, {"url": str(base_info.parent)}) remote.s3.create_bucket(Bucket=base_info.bucket) remote.s3.put_object( Bucket=base_info.bucket, Key=(base_info / "from").path, Body="data" ) remote.link(base_info / "from", base_info / "to") assert remote.exists(base_info / "from") assert remote.exists(base_info / "to") @mock_s3 def test_makedirs_doesnot_try_on_top_level_paths(tmp_dir, dvc, scm):
def test_sse_kms_key_id(dvc): remote = S3Remote(dvc, {"url": url, "sse_kms_key_id": "key"}) assert remote.tree.extra_args["SSEKMSKeyId"] == "key"