def test_get_s3(): s3 = DaskS3FileSystem(key='key', secret='secret') assert s3.key == 'key' assert s3.secret == 'secret' s3 = DaskS3FileSystem(username='******', password='******') assert s3.key == 'key' assert s3.secret == 'secret' with pytest.raises(KeyError): DaskS3FileSystem(key='key', username='******') with pytest.raises(KeyError): DaskS3FileSystem(secret='key', password='******')
def test_get_s3(): s3 = DaskS3FileSystem(key="key", secret="secret") assert s3.key == "key" assert s3.secret == "secret" s3 = DaskS3FileSystem(username="******", password="******") assert s3.key == "key" assert s3.secret == "secret" with pytest.raises(KeyError): DaskS3FileSystem(key="key", username="******") with pytest.raises(KeyError): DaskS3FileSystem(secret="key", password="******")
def s3_context(bucket, files): m = moto.mock_s3() m.start() client = boto3.client('s3') client.create_bucket(Bucket=bucket, ACL='public-read-write') for f, data in files.items(): client.put_object(Bucket=bucket, Key=f, Body=data) yield DaskS3FileSystem(anon=True) for f, data in files.items(): try: client.delete_object(Bucket=bucket, Key=f, Body=data) except Exception: pass m.stop()
def s3_context(bucket, files): m = moto.mock_s3() m.start() client = boto3.client('s3') client.create_bucket(Bucket=bucket, ACL='public-read-write') for f, data in files.items(): client.put_object(Bucket=bucket, Key=f, Body=data) yield DaskS3FileSystem(anon=True) for f, data in files.items(): try: client.delete_object(Bucket=bucket, Key=f, Body=data) except Exception: pass finally: m.stop() httpretty = pytest.importorskip('httpretty') httpretty.HTTPretty.disable() httpretty.HTTPretty.reset()
def s3_context(bucket, files): with ensure_safe_environment_variables(): # temporary workaround as moto fails for botocore >= 1.11 otherwise, # see https://github.com/spulec/moto/issues/1924 & 1952 os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key") os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret") with moto.mock_s3(): client = boto3.client("s3") client.create_bucket(Bucket=bucket, ACL="public-read-write") for f, data in files.items(): client.put_object(Bucket=bucket, Key=f, Body=data) yield DaskS3FileSystem(anon=True) for f, data in files.items(): try: client.delete_object(Bucket=bucket, Key=f, Body=data) except Exception: pass finally: httpretty.HTTPretty.disable() httpretty.HTTPretty.reset()
def test_get_pyarrow_fs_s3(s3): pa = pytest.importorskip('pyarrow') fs = DaskS3FileSystem(anon=True) assert isinstance(get_pyarrow_filesystem(fs), pa.filesystem.S3FSWrapper)