def test_s3_unsigned(monkeypatch, without_aws_env): s3 = s3_client(aws_unsigned=True) assert s3._request_signer.signature_version == botocore.UNSIGNED monkeypatch.setenv("AWS_UNSIGNED", "yes") s3 = s3_client() assert s3._request_signer.signature_version == botocore.UNSIGNED
def test_s3_basics(without_aws_env): from numpy import s_ from botocore.credentials import ReadOnlyCredentials assert s3_url_parse('s3://bucket/key') == ('bucket', 'key') assert s3_url_parse('s3://bucket/key/') == ('bucket', 'key/') assert s3_url_parse('s3://bucket/k/k/key') == ('bucket', 'k/k/key') with pytest.raises(ValueError): s3_url_parse("file://some/path") assert s3_fmt_range((0, 3)) == "bytes=0-2" assert s3_fmt_range(s_[4:10]) == "bytes=4-9" assert s3_fmt_range(s_[:10]) == "bytes=0-9" assert s3_fmt_range(None) is None for bad in (s_[10:], s_[-2:3], s_[:-3], (-1, 3), (3, -1), s_[1:100:3]): with pytest.raises(ValueError): s3_fmt_range(bad) creds = ReadOnlyCredentials('fake-key', 'fake-secret', None) assert str(s3_client( region_name='kk')._endpoint) == 's3(https://s3.kk.amazonaws.com)' assert str( s3_client(region_name='kk', use_ssl=False)._endpoint) == 's3(http://s3.kk.amazonaws.com)' s3 = s3_client(region_name='us-west-2', creds=creds) assert s3 is not None
def test_s3_io(monkeypatch, without_aws_env): import moto from numpy import s_ url = "s3://bucket/file.txt" bucket, _ = s3_url_parse(url) monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") with moto.mock_s3(): s3 = s3_client(region_name='kk') s3.create_bucket(Bucket=bucket) assert s3_dump(b"33", url, s3=s3) is True assert s3_fetch(url, s3=s3) == b"33" meta = s3_head_object(url, s3=s3) assert meta is not None assert 'LastModified' in meta assert 'ContentLength' in meta assert 'ETag' in meta assert meta['ContentLength'] == 2 assert s3_head_object(url + '-nosuch', s3=s3) is None assert s3_dump(b"0123456789ABCDEF", url, s3=s3) is True assert s3_fetch(url, range=s_[:4], s3=s3) == b"0123" assert s3_fetch(url, range=s_[3:8], s3=s3) == b"34567" with pytest.raises(ValueError): s3_fetch(url, range=s_[::2], s3=s3)
def test_save_blob_s3(blob, monkeypatch, dask_client): region_name = "us-west-2" blob2 = blob + blob dask_blob = dask.delayed(blob) dask_blob2 = dask.delayed(blob2) url = "s3://bucket/file.txt" url2 = "s3://bucket/file-2.txt" bucket, _ = s3_url_parse(url) monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") with moto.mock_s3(): s3 = s3_client(region_name=region_name) s3.create_bucket(Bucket=bucket) rr = save_blob_to_s3(dask_blob, url, region_name=region_name) assert rr.compute() == (url, True) rr = save_blob_to_s3(dask_blob2, url2, region_name=region_name) assert dask_client.compute(rr).result() == (url2, True) bb1 = s3_fetch(url, s3=s3) bb2 = s3_fetch(url2, s3=s3) if isinstance(blob, str): bb1 = bb1.decode("utf8") bb2 = bb2.decode("utf8") assert bb1 == blob assert bb2 == blob2
def test_save_blob_s3_direct(blob, monkeypatch): region_name = "us-west-2" blob2 = blob + blob url = "s3://bucket/file.txt" url2 = "s3://bucket/file-2.txt" bucket, _ = s3_url_parse(url) monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") with moto.mock_s3(): s3 = s3_client(region_name=region_name) s3.create_bucket(Bucket=bucket) assert _save_blob_to_s3(blob, url, region_name=region_name) == (url, True) assert _save_blob_to_s3(blob2, url2, region_name=region_name) == (url2, True) bb1 = s3_fetch(url, s3=s3) bb2 = s3_fetch(url2, s3=s3) if isinstance(blob, str): bb1 = bb1.decode("utf8") bb2 = bb2.decode("utf8") assert bb1 == blob assert bb2 == blob2 assert _save_blob_to_s3( "", "s3://not-a-bucket/f.txt") == ("s3://not-a-bucket/f.txt", False)
def test_s3_client_cache(monkeypatch, without_aws_env): monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") s3 = s3_client(cache=True) assert s3 is s3_client(cache=True) assert s3 is s3_client(cache='purge') assert s3_client(cache='purge') is None assert s3 is not s3_client(cache=True) opts = (dict(), dict(region_name="foo"), dict(region_name="bar"), dict(profile="foo"), dict(profile="foo", region_name="xxx"), dict(profile="bar"), dict(creds=ReadOnlyCredentials('fake1', '...', None)), dict(creds=ReadOnlyCredentials('fake1', '...', None), region_name='custom'), dict(creds=ReadOnlyCredentials('fake2', '...', None))) keys = set(_s3_cache_key(**o) for o in opts) assert len(keys) == len(opts)
def exists(self, task: Union[Task, str]) -> bool: if isinstance(task, str): uri = task else: uri = self.uri(task) _u = urlparse(uri) if _u.scheme == 's3': s3 = s3_client(creds=self._get_creds(), cache=True) meta = s3_head_object(uri, s3=s3) return meta is not None elif _u.scheme == 'file': return Path(_u.path).exists() else: raise ValueError(f"Can't handle url: {uri}")
def test_s3_io(monkeypatch, without_aws_env): import moto from numpy import s_ url = "s3://bucket/file.txt" bucket, _ = s3_url_parse(url) monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret") with moto.mock_s3(): s3 = s3_client(region_name='kk') s3.create_bucket(Bucket=bucket) assert s3_dump(b"33", url, s3=s3) is True assert s3_fetch(url, s3=s3) == b"33" assert s3_dump(b"0123456789ABCDEF", url, s3=s3) is True assert s3_fetch(url, range=s_[:4], s3=s3) == b"0123" assert s3_fetch(url, range=s_[3:8], s3=s3) == b"34567" with pytest.raises(ValueError): s3_fetch(url, range=s_[::2], s3=s3)