def configure_s3_access(profile=None, region_name="auto", aws_unsigned=False, requester_pays=False, cloud_defaults=True, client=None, **gdal_opts): """ Credentialize for S3 bucket access or configure public access. This function obtains credentials for S3 access and passes them on to processing threads, either local or on dask cluster. .. note:: if credentials are STS based they will eventually expire, currently this case is not handled very well, reads will just start failing eventually and will never recover. :param profile: AWS profile name to use :param region_name: Default region_name to use if not configured for a given/default AWS profile :param aws_unsigned: If ``True`` don't bother with credentials when reading from S3 :param requester_pays: Needed when accessing requester pays buckets :param cloud_defaults: Assume files are in the cloud native format, i.e. no side-car files, disables looking for side-car files, makes things faster but won't work for files that do have side-car files with extra metadata. :param client: Dask distributed ``dask.Client`` instance, if supplied apply settings on the dask cluster rather than locally. :param gdal_opts: Any other option to pass to GDAL environment setup :returns: credentials object or ``None`` if ``aws_unsigned=True`` """ from datacube.utils.aws import get_aws_settings aws, creds = get_aws_settings(profile=profile, region_name=region_name, aws_unsigned=aws_unsigned, requester_pays=requester_pays) if client is None: set_default_rio_config(aws=aws, cloud_defaults=cloud_defaults, **gdal_opts) else: client.register_worker_callbacks( functools.partial(set_default_rio_config, aws=aws, cloud_defaults=cloud_defaults, **gdal_opts)) return creds
def test_get_aws_settings(monkeypatch, without_aws_env): pp = write_files({ "config": """ [default] region = us-west-2 [profile east] region = us-east-1 [profile no_region] """, "credentials": """ [default] aws_access_key_id = AKIAWYXYXYXYXYXYXYXY aws_secret_access_key = fake-fake-fake [east] aws_access_key_id = AKIAEYXYXYXYXYXYXYXY aws_secret_access_key = fake-fake-fake """ }) assert (pp / "credentials").exists() assert (pp / "config").exists() monkeypatch.setenv("AWS_CONFIG_FILE", str(pp / "config")) monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", str(pp / "credentials")) aws, creds = get_aws_settings() assert aws['region_name'] == 'us-west-2' assert aws['aws_access_key_id'] == 'AKIAWYXYXYXYXYXYXYXY' assert aws['aws_secret_access_key'] == 'fake-fake-fake' sess = mk_boto_session(profile="no_region", creds=creds.get_frozen_credentials(), region_name="mordor") assert sess.get_credentials().get_frozen_credentials( ) == creds.get_frozen_credentials() aws, creds = get_aws_settings(profile='east') assert aws['region_name'] == 'us-east-1' assert aws['aws_access_key_id'] == 'AKIAEYXYXYXYXYXYXYXY' assert aws['aws_secret_access_key'] == 'fake-fake-fake' aws, creds = get_aws_settings(aws_unsigned=True) assert creds is None assert aws['region_name'] == 'us-west-2' assert aws['aws_unsigned'] is True aws, creds = get_aws_settings(profile="no_region", region_name="us-west-1", aws_unsigned=True) assert creds is None assert aws['region_name'] == 'us-west-1' assert aws['aws_unsigned'] is True with mock.patch('datacube.utils.aws._fetch_text', return_value=_json(region="mordor")): aws, creds = get_aws_settings(profile="no_region", aws_unsigned=True) assert aws['region_name'] == 'mordor' assert aws['aws_unsigned'] is True
def test_get_aws_settings_no_credentials(without_aws_env): # get_aws_settings should fail when credentials are not available with pytest.raises(ValueError, match="Couldn't get credentials"): aws, creds = get_aws_settings(region_name="fake")