def test_comparable_version_left_referencing(): v = ComparableVersion("1.2.3") assert v.or_higher("1.2.2") is False assert v.or_higher("1.2.3") is True assert v.or_higher("1.2.4") is True assert v.or_lower("1.2.2") is True assert v.or_lower("1.2.3") is True assert v.or_lower("1.2.4") is False assert v.accept_higher("1.2.2") is False assert v.accept_higher("1.2.3") is False assert v.accept_higher("1.2.4") is True assert v.accept_lower("1.2.2") is True assert v.accept_lower("1.2.3") is False assert v.accept_lower("1.2.4") is False
def test_mask_polygon(s2cube, api_version): polygon = shapely.geometry.Polygon([[0, 0], [1.9, 0], [1.9, 1.9], [0, 1.9]]) if api_version < ComparableVersion("1.0.0"): expected_proces_id = "mask" im = s2cube.mask(polygon) else: expected_proces_id = "mask_polygon" im = s2cube.mask_polygon(mask=polygon) graph = _get_leaf_node(im) assert graph["process_id"] == expected_proces_id assert graph["arguments"] == { "data": { 'from_node': 'loadcollection1' }, "mask": { 'coordinates': (((0.0, 0.0), (1.9, 0.0), (1.9, 1.9), (0.0, 1.9), (0.0, 0.0)), ), 'crs': { 'properties': { 'name': 'EPSG:4326' }, 'type': 'name' }, 'type': 'Polygon' } }
def test_batch_job_metadata_to_api_dict(): api_version = ComparableVersion("1.0.0") job = BatchJobMetadata( id="123", status="running", created=datetime.datetime(2022, 1, 18, 16, 42, 0), process={"add": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True}}, title="Untitled01", description="Lorem ipsum.", progress=0.3, cpu_time=datetime.timedelta(seconds=1000), memory_time_megabyte=datetime.timedelta(seconds=2000), started=datetime.datetime(2022, 1, 18, 17, 0, 0), finished=datetime.datetime(2022, 1, 18, 17, 20, 0), epsg=4326, links=[{}], ) assert job.to_api_dict(full=False, api_version=api_version) == { "id": "123", "title": "Untitled01", "description": "Lorem ipsum.", "status": "running", "progress": 0.3, "created": "2022-01-18T16:42:00Z", } assert job.to_api_dict(full=True, api_version=api_version) == { "id": "123", "title": "Untitled01", "description": "Lorem ipsum.", "process": {"add": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True}}, "status": "running", "progress": 0.3, "created": "2022-01-18T16:42:00Z", "usage": { "cpu": {"value": 1000, "unit": "cpu-seconds"}, "memory": {"value": 2000, "unit": "mb-seconds"}, "duration": {"value": 1200, "unit": "seconds"}, } }
def to_api_dict(self, full=True, api_version: ComparableVersion = None) -> dict: """ API-version-aware conversion of batch job metadata to jsonable openEO API compatible dict. see https://openeo.org/documentation/1.0/developers/api/reference.html#operation/describe-job """ # Basic/full fields to export fields = ["id", "title", "description", "status", "progress", "created", "updated", "plan", "costs", "budget"] if full: fields.extend(["process"]) result = {f: getattr(self, f) for f in fields} # Additional cleaning and massaging. result["created"] = rfc3339.datetime(self.created) if self.created else None result["updated"] = rfc3339.datetime(self.updated) if self.updated else None if full: usage = self.usage or {} if self.cpu_time: usage["cpu"] = {"value": int(round(self.cpu_time.total_seconds())), "unit": "cpu-seconds"} if self.duration: usage["duration"] = {"value": int(round(self.duration.total_seconds())), "unit": "seconds"} if self.memory_time_megabyte: usage["memory"] = {"value": int(round(self.memory_time_megabyte.total_seconds())), "unit": "mb-seconds"} if usage: result["usage"] = usage if api_version and api_version.below("1.0.0"): result["process_graph"] = result.pop("process", {}).get("process_graph") result["submitted"] = result.pop("created", None) # TODO wider status checking coverage? if result["status"] == "created": result["status"] = "submitted" return dict_no_none(result)
def test_load_collection_bands_common_name(connection, api_version): im = connection.load_collection("S2", bands=["nir", "red"]) expected = load_json_resource('data/{v}/load_collection_bands.json'.format(v=api_version)) if api_version < ComparableVersion("1.0.0"): expected["loadcollection1"]["arguments"]["bands"] = ["B08", "B04"] else: expected["loadcollection1"]["arguments"]["bands"] = ["nir", "red"] assert im.flat_graph() == expected
def test_aggregate_spatial_parameter_polygon(connection, api_version): if api_version < ComparableVersion("1.0.0"): pytest.skip() geometries = Parameter("polygon") res = (connection.load_collection("S2").filter_bbox( 3, 6, 52, 50).aggregate_spatial(geometries=geometries, reducer="mean")) assert get_execute_graph(res) == load_json_resource( 'data/%s/aggregate_zonal_parameter.json' % api_version)
def test_aggregate_spatial(connection, api_version, reducer): if api_version < ComparableVersion("1.0.0"): pytest.skip() polygon = shapely.geometry.shape(load_json_resource("data/polygon.json")) res = (connection.load_collection("S2").filter_bbox( 3, 6, 52, 50).aggregate_spatial(geometries=polygon, reducer=reducer)) assert get_execute_graph(res) == load_json_resource( 'data/%s/aggregate_zonal_polygon.json' % api_version)
def test_aggregate_spatial_read_vector(connection, api_version, reducer): if api_version < ComparableVersion("1.0.0"): pytest.skip() res = (connection.load_collection("S2").filter_bbox( 3, 6, 52, 50).aggregate_spatial( geometries="/some/path/to/GeometryCollection.geojson", reducer=reducer)) assert get_execute_graph(res) == load_json_resource( 'data/%s/aggregate_zonal_path.json' % api_version)
def test_filter_bands_common_name(s2cube, api_version): im = s2cube.filter_bands(["nir", "red"]) expected = load_json_resource('data/{v}/filter_bands.json'.format(v=api_version)) if api_version < ComparableVersion("1.0.0"): expected["filterbands1"]["arguments"]["bands"] = [] expected["filterbands1"]["arguments"]["common_names"] = ["nir", "red"] else: expected["filterbands1"]["arguments"]["bands"] = ["nir", "red"] assert im.flat_graph() == expected
def test_normalize_collection_metadata_no_id(self, caplog): with pytest.raises(KeyError): _normalize_collection_metadata( {"foo": "bar"}, api_version=ComparableVersion("1.0.0")) errors = [ r.getMessage() for r in caplog.records if r.levelno == logging.ERROR ] assert any("should have 'id' field" in m for m in errors)
def __init__(self, api_version: str, client: FlaskClient, data_root: Path = None): self.api_version = api_version self.api_version_compare = ComparableVersion(self.api_version) self.client = client if data_root: self.data_root = Path(data_root)
def get_process_graph_dict(self, process_graph: dict) -> dict: """ Build dict containing process graph (e.g. to POST, or to expect in metadata), according to API version """ if ComparableVersion("1.0.0").or_higher(self.api_version): data = {"process": {'process_graph': process_graph}} else: data = {'process_graph': process_graph} return data
def __init__(self, api_version: str, client: FlaskClient, data_root: Path = None, url_root: str = "/openeo/{api_version}"): self.api_version = api_version self.api_version_compare = ComparableVersion(self.api_version) self.client = client if data_root: self.data_root = Path(data_root) self.default_request_headers = {} self.url_root = url_root.format(api_version=api_version)
def test_comparable_version_operators(b): a = ComparableVersion("1.2.3") assert (a == a) is True assert (a != a) is False assert (a > b) is True assert (a >= b) is True assert (a < b) is False assert (a <= b) is False assert (b < a) is True assert (b <= a) is True assert (b > a) is False assert (b >= a) is False
def test_authenticate_basic(requests_mock, api_version): requests_mock.get(API_URL, json={"api_version": api_version}) conn = Connection(API_URL) def text_callback(request, context): assert request.headers["Authorization"] == "Basic am9objpqMGhu" return '{"access_token":"w3lc0m3"}' requests_mock.get(API_URL + 'credentials/basic', text=text_callback) assert isinstance(conn.auth, NullAuth) conn.authenticate_basic(username="******", password="******") assert isinstance(conn.auth, BearerAuth) if ComparableVersion(api_version).at_least("1.0.0"): assert conn.auth.bearer == "basic//w3lc0m3" else: assert conn.auth.bearer == "w3lc0m3"
def get_process_graph_dict(self, process_graph: dict, title: str = None, description: str = None) -> dict: """ Build dict containing process graph (e.g. to POST, or to expect in metadata), according to API version """ if ComparableVersion("1.0.0").or_higher(self.api_version): data = {"process": {'process_graph': process_graph}} else: data = {'process_graph': process_graph} if title: data["title"] = title if description: data["description"] = description return data
def test_endpoint_registry(): app = flask.Flask(__name__) bp = flask.Blueprint("test", __name__) endpoint = EndpointRegistry() @bp.route("/hello") def hello(): return "not an endpoint" @endpoint @bp.route("/foo") def foo(): return "simple endpoint" @endpoint(hidden=True) @bp.route("/secret") def secret(): return "secret endpoint" @endpoint(version=ComparableVersion("1.0.0").accept_lower) @bp.route("/old") def old(): return "old endpoint" app.register_blueprint(bp, url_prefix="/bar") result = endpoint.get_path_metadata(bp) # Check metadata assert len(result) == 3 paths, methods, metadatas = zip(*sorted(result)) assert paths == ('/foo', '/old', '/secret') assert methods == ({"GET"}, ) * 3 assert metadatas[0].hidden is False assert metadatas[0].for_version is None assert metadatas[1].for_version("0.4.0") is True assert metadatas[1].for_version("1.0.0") is False assert metadatas[2].hidden is True # Check that view functions still work client = app.test_client() assert b"not an endpoint" == client.get("/bar/hello").data assert b"simple endpoint" == client.get("/bar/foo").data assert b"secret endpoint" == client.get("/bar/secret").data assert b"old endpoint" == client.get("/bar/old").data
def test_normalize_collection_metadata_minimal_100(self, caplog): assert _normalize_collection_metadata( {"id": "foobar"}, api_version=ComparableVersion("1.0.0")) == { 'id': 'foobar', 'stac_version': '0.9.0', 'description': 'foobar', 'extent': { 'spatial': [0, 0, 0, 0], 'temporal': [None, None] }, 'license': 'proprietary', 'links': [], } warnings = set(r.getMessage() for r in caplog.records if r.levelno == logging.WARN) assert warnings == { "Collection 'foobar' metadata does not have field 'extent'." }
def test_authenticate_basic_from_config(requests_mock, api_version): user, pwd = "john281", "J0hndo3" requests_mock.get(API_URL, json={"api_version": api_version}) def text_callback(request, context): assert request.headers[ "Authorization"] == requests.auth._basic_auth_str(username=user, password=pwd) return '{"access_token":"w3lc0m3"}' requests_mock.get(API_URL + 'credentials/basic', text=text_callback) AuthConfig().set_basic_auth(backend=API_URL, username=user, password=pwd) conn = Connection(API_URL) assert isinstance(conn.auth, NullAuth) conn.authenticate_basic() assert isinstance(conn.auth, BearerAuth) if ComparableVersion(api_version).at_least("1.0.0"): assert conn.auth.bearer == "basic//w3lc0m3" else: assert conn.auth.bearer == "w3lc0m3"
def test_normalize_collection_metadata_dimensions_and_bands_100( self, caplog): metadata = { "id": "foobar", "properties": { "cube:dimensions": { "x": { "type": "spatial" }, "b": { "type": "bands", "values": ["B02", "B03"] } }, "eo:bands": [{ "name": "B02" }, { "name": "B03" }] } } res = _normalize_collection_metadata( metadata, api_version=ComparableVersion("1.0.0"), full=True) assert res["cube:dimensions"] == { "x": { "type": "spatial" }, "b": { "type": "bands", "values": ["B02", "B03"] } } assert res["summaries"]["eo:bands"] == [{ "name": "B02" }, { "name": "B03" }]
def get_process_registry(api_version: ComparableVersion) -> ProcessRegistry: if api_version.at_least("1.0.0"): return process_registry_100 else: return process_registry_040
import pytest from openeo.capabilities import ComparableVersion @pytest.mark.parametrize(["a", "b", "c"], [ (ComparableVersion("1.2.3"), ComparableVersion("1.2.3"), ComparableVersion("2.3.4")), (ComparableVersion("1.2.3"), "1.2.3", "2.3.4"), ("1.2.3", ComparableVersion("1.2.3"), ComparableVersion("2.3.4")), ]) def test_comparable_version_equals(a, b, c): assert (a == b) is True assert (a == c) is False assert (a != b) is False assert (a != c) is True if isinstance(a, ComparableVersion): assert a.equals(b) is True assert a.equals(c) is False @pytest.mark.parametrize("b", [ "0.9", "1", "1.2.2", ComparableVersion("0.9"), ComparableVersion("1.1"), ]) def test_comparable_version_operators(b): a = ComparableVersion("1.2.3") assert (a == a) is True
import pytest from openeo.capabilities import ComparableVersion @pytest.mark.parametrize("b", [ "0.9", "1", "1.2.2", ComparableVersion("0.9"), ComparableVersion("1.1"), ]) def test_comparable_version_operators(b): a = ComparableVersion("1.2.3") assert (a == a) is True assert (a != a) is False assert (a > b) is True assert (a >= b) is True assert (a < b) is False assert (a <= b) is False assert (b < a) is True assert (b <= a) is True assert (b > a) is False assert (b >= a) is False def test_comparable_version_right_referencing(): v = ComparableVersion('1.2.3') assert v.above('0') assert v.above('0.1') assert v.above('0.1.2')
def test_comparable_version_right_referencing(): v = ComparableVersion('1.2.3') assert v.above('0') assert v.above('0.1') assert v.above('0.1.2') assert v.above('1.2') assert v.above('1.2.2') assert v.above('1.2.2b') assert v.above('1.2.3') is False assert v.above('1.2.20') is False assert v.above('1.2.4') is False assert v.above('1.10.4') is False assert v.at_least('0') assert v.at_least('1') assert v.at_least('1.1') assert v.at_least('1.10') is False assert v.at_least('1.2') assert v.at_least('1.02') assert v.at_least('1.2.2') assert v.at_least('1.2.3') assert v.at_least('1.2.3a') is False assert v.at_least('1.2.4') is False assert v.at_least('1.3') is False assert v.at_least('2') is False assert v.below('2') assert v.below('1.3') assert v.below('1.2.4') assert v.below('1.2.3b') assert v.below('1.2.3') is False assert v.below('1.2') is False assert v.at_most('2') assert v.at_most('1.3') assert v.at_most('1.2.3c') assert v.at_most('1.2.3') assert v.at_most('1.02.03') assert v.at_most('1.2.2b') is False assert v.at_most('1.2') is False assert v.at_most('1.10') assert v.above(ComparableVersion('1.2')) assert v.at_least(ComparableVersion('1.2.3a')) is False assert v.at_most(ComparableVersion('1.02.03'))
class Connection(RestApiConnection): """ Connection to an openEO backend. """ _MINIMUM_API_VERSION = ComparableVersion("0.4.0") # Temporary workaround flag to enable for backends (e.g. EURAC) that expect id_token to be sent as bearer token # TODO DEPRECATED To remove when all backends properly expect access_token # see https://github.com/Open-EO/openeo-wcps-driver/issues/45 oidc_auth_user_id_token_as_bearer = False def __init__( self, url, auth: AuthBase = None, session: requests.Session = None, default_timeout: int = None, auth_config: AuthConfig = None, refresh_token_store: RefreshTokenStore = None ): """ Constructor of Connection, authenticates user. :param url: String Backend root url """ self._orig_url = url super().__init__( root_url=self.version_discovery(url, session=session), auth=auth, session=session, default_timeout=default_timeout ) self._capabilities_cache = {} # Initial API version check. if self._api_version.below(self._MINIMUM_API_VERSION): raise ApiVersionException("OpenEO API version should be at least {m!s}, but got {v!s}".format( m=self._MINIMUM_API_VERSION, v=self._api_version) ) self._auth_config = auth_config self._refresh_token_store = refresh_token_store or RefreshTokenStore() @classmethod def version_discovery(cls, url: str, session: requests.Session = None) -> str: """ Do automatic openEO API version discovery from given url, using a "well-known URI" strategy. :param url: initial backend url (not including "/.well-known/openeo") :return: root url of highest supported backend version """ try: well_known_url_response = RestApiConnection(url, session=session).get("/.well-known/openeo") assert well_known_url_response.status_code == 200 versions = well_known_url_response.json()["versions"] supported_versions = [v for v in versions if cls._MINIMUM_API_VERSION <= v["api_version"]] assert supported_versions production_versions = [v for v in supported_versions if v.get("production", True)] highest_version = max(production_versions or supported_versions, key=lambda v: v["api_version"]) _log.debug("Highest supported version available in backend: %s" % highest_version) return highest_version['url'] except Exception: # Be very lenient about failing on the well-known URI strategy. return url def _get_auth_config(self) -> AuthConfig: if self._auth_config is None: self._auth_config = AuthConfig() return self._auth_config def authenticate_basic(self, username: str = None, password: str = None) -> 'Connection': """ Authenticate a user to the backend using basic username and password. :param username: User name :param password: User passphrase """ if username is None: username, password = self._get_auth_config().get_basic_auth(backend=self._orig_url) if username is None: raise OpenEoClientException("No username/password given or found.") resp = self.get( '/credentials/basic', # /credentials/basic is the only endpoint that expects a Basic HTTP auth auth=HTTPBasicAuth(username, password) ).json() # Switch to bearer based authentication in further requests. if self._api_version.at_least("1.0.0"): self.auth = BearerAuth(bearer='basic//{t}'.format(t=resp["access_token"])) else: self.auth = BearerAuth(bearer=resp["access_token"]) return self def authenticate_OIDC( self, client_id: str, provider_id: str = None, webbrowser_open=None, timeout=120, server_address: Tuple[str, int] = None ) -> 'Connection': """ Authenticates a user to the backend using OpenID Connect. :param client_id: Client id to use for OpenID Connect authentication :param webbrowser_open: optional handler for the initial OAuth authentication request (opens a webbrowser by default) :param timeout: number of seconds after which to abort the authentication procedure :param server_address: optional tuple (hostname, port_number) to serve the OAuth redirect callback on TODO: deprecated? """ # TODO: option to increase log level temporarily? provider_id, provider = self._get_oidc_provider(provider_id) client_info = OidcClientInfo(client_id=client_id, provider=provider) authenticator = OidcAuthCodePkceAuthenticator( client_info=client_info, webbrowser_open=webbrowser_open, timeout=timeout, server_address=server_address, ) return self._authenticate_oidc(authenticator, provider_id=provider_id) def _get_oidc_provider(self, provider_id: Union[str, None] = None) -> Tuple[str, OidcProviderInfo]: """ Get OpenID Connect discovery URL for given provider_id :param provider_id: id of OIDC provider as specified by backend (/credentials/oidc). Can be None if there is just one provider. :return: updated provider_id and provider info object """ if self._api_version.at_least("1.0.0"): oidc_info = self.get("/credentials/oidc", expected_status=200).json() providers = {p["id"]: p for p in oidc_info["providers"]} _log.info("Found OIDC providers: {p}".format(p=list(providers.keys()))) if provider_id: if provider_id not in providers: raise OpenEoClientException("Requested provider {r!r} not available. Should be one of {p}.".format( r=provider_id, p=list(providers.keys())) ) provider = providers[provider_id] elif len(providers) == 1: # No provider id given, but there is only one anyway: we can handle that. provider_id, provider = providers.popitem() else: raise OpenEoClientException("No provider_id given. Available: {p!r}.".format( p=list(providers.keys())) ) provider = OidcProviderInfo(issuer=provider["issuer"], scopes=provider.get("scopes")) else: # Per spec: '/credentials/oidc' will redirect to OpenID Connect discovery document provider = OidcProviderInfo(discovery_url=self.build_url('/credentials/oidc')) return provider_id, provider def _get_oidc_provider_and_client_info( self, provider_id: str, client_id: Union[str, None], client_secret: Union[str, None] ) -> Tuple[str, OidcClientInfo]: """ Resolve provider_id and client info (as given or from config) :param provider_id: id of OIDC provider as specified by backend (/credentials/oidc). Can be None if there is just one provider. :return: (client_id, client_secret) """ provider_id, provider = self._get_oidc_provider(provider_id) if client_id is None: client_id, client_secret = self._get_auth_config().get_oidc_client_configs( backend=self._orig_url, provider_id=provider_id ) _log.info("Using client_id {c!r} from config (provider {p!r})".format(c=client_id, p=provider_id)) if client_id is None: raise OpenEoClientException("No client ID found.") client_info = OidcClientInfo(client_id=client_id, client_secret=client_secret, provider=provider) return provider_id, client_info def _authenticate_oidc( self, authenticator: OidcAuthenticator, provider_id: str, store_refresh_token: bool = False ) -> 'Connection': """ Authenticate through OIDC and set up bearer token (based on OIDC access_token) for further requests. """ tokens = authenticator.get_tokens() _log.info("Obtained tokens: {t}".format(t=[k for k, v in tokens._asdict().items() if v])) if tokens.refresh_token and store_refresh_token: self._refresh_token_store.set_refresh_token( issuer=authenticator.provider_info.issuer, client_id=authenticator.client_id, refresh_token=tokens.refresh_token ) token = tokens.access_token if not self.oidc_auth_user_id_token_as_bearer else tokens.id_token if self._api_version.at_least("1.0.0"): self.auth = BearerAuth(bearer='oidc/{p}/{t}'.format(p=provider_id, t=token)) else: self.auth = BearerAuth(bearer=token) return self def authenticate_oidc_authorization_code( self, client_id: str = None, client_secret: str = None, provider_id: str = None, timeout: int = None, server_address: Tuple[str, int] = None, webbrowser_open: Callable = None, store_refresh_token=False, ) -> 'Connection': """ OpenID Connect Authorization Code Flow (with PKCE). WARNING: this API is in experimental phase """ provider_id, client_info = self._get_oidc_provider_and_client_info( provider_id=provider_id, client_id=client_id, client_secret=client_secret ) authenticator = OidcAuthCodePkceAuthenticator( client_info=client_info, webbrowser_open=webbrowser_open, timeout=timeout, server_address=server_address ) return self._authenticate_oidc(authenticator, provider_id=provider_id, store_refresh_token=store_refresh_token) def authenticate_oidc_client_credentials( self, client_id: str = None, client_secret: str = None, provider_id: str = None, store_refresh_token=False, ) -> 'Connection': """ OpenID Connect Client Credentials flow. WARNING: this API is in experimental phase """ provider_id, client_info = self._get_oidc_provider_and_client_info( provider_id=provider_id, client_id=client_id, client_secret=client_secret ) authenticator = OidcClientCredentialsAuthenticator(client_info=client_info) return self._authenticate_oidc(authenticator, provider_id=provider_id, store_refresh_token=store_refresh_token) def authenticate_oidc_resource_owner_password_credentials( self, username: str, password: str, client_id: str = None, client_secret: str = None, provider_id: str = None, store_refresh_token=False ) -> 'Connection': """ OpenId Connect Resource Owner Password Credentials WARNING: this API is in experimental phase """ provider_id, client_info = self._get_oidc_provider_and_client_info( provider_id=provider_id, client_id=client_id, client_secret=client_secret ) # TODO: also get username and password from config? authenticator = OidcResourceOwnerPasswordAuthenticator( client_info=client_info, username=username, password=password ) return self._authenticate_oidc(authenticator, provider_id=provider_id, store_refresh_token=store_refresh_token) def authenticate_oidc_refresh_token( self, client_id: str = None, refresh_token: str = None, client_secret: str = None, provider_id: str = None ) -> 'Connection': """ OpenId Connect Refresh Token WARNING: this API is in experimental phase """ provider_id, client_info = self._get_oidc_provider_and_client_info( provider_id=provider_id, client_id=client_id, client_secret=client_secret ) if refresh_token is None: refresh_token = self._refresh_token_store.get_refresh_token( issuer=client_info.provider.issuer, client_id=client_info.client_id ) if refresh_token is None: raise OpenEoClientException("No refresh token given or found") authenticator = OidcRefreshTokenAuthenticator(client_info=client_info, refresh_token=refresh_token) return self._authenticate_oidc(authenticator, provider_id=provider_id) def authenticate_oidc_device( self, client_id: str=None, client_secret: str=None, provider_id: str = None, store_refresh_token=False, **kwargs ) -> 'Connection': """ Authenticate with OAuth Device Authorization grant/flow WARNING: this API is in experimental phase """ provider_id, client_info = self._get_oidc_provider_and_client_info( provider_id=provider_id, client_id=client_id, client_secret=client_secret ) authenticator = OidcDeviceAuthenticator(client_info=client_info, **kwargs) return self._authenticate_oidc(authenticator, provider_id=provider_id, store_refresh_token=store_refresh_token) def describe_account(self) -> str: """ Describes the currently authenticated user account. """ return self.get('/me').json() def user_jobs(self) -> dict: """ Loads all jobs of the current user. :return: jobs: Dict All jobs of the user """ # TODO duplication with `list_jobs()` method return self.get('/jobs').json()["jobs"] def list_collections(self) -> List[dict]: """ Loads all available imagecollections types. :return: list of collection meta data dictionaries """ return self.get('/collections').json()["collections"] def list_collection_ids(self) -> List[str]: """ Get list of all collection ids :return: list of collection ids """ return [collection['id'] for collection in self.list_collections() if 'id' in collection] def capabilities(self) -> RESTCapabilities: """ Loads all available capabilities. :return: data_dict: Dict All available data types """ if "capabilities" not in self._capabilities_cache: self._capabilities_cache["capabilities"] = RESTCapabilities(self.get('/').json()) return self._capabilities_cache["capabilities"] def list_output_formats(self) -> dict: if self._api_version.at_least("1.0.0"): return self.list_file_formats()["output"] else: return self.get('/output_formats').json() list_file_types = legacy_alias(list_output_formats, "list_file_types") def list_file_formats(self) -> dict: """ Get available input and output formats """ if "file_formats" not in self._capabilities_cache: self._capabilities_cache["file_formats"] = self.get('/file_formats').json() return self._capabilities_cache["file_formats"] def list_service_types(self) -> dict: """ Loads all available service types. :return: data_dict: Dict All available service types """ return self.get('/service_types').json() def list_services(self) -> dict: """ Loads all available services of the authenticated user. :return: data_dict: Dict All available service types """ # TODO return parsed service objects return self.get('/services').json() def describe_collection(self, name) -> dict: # TODO: Maybe create some kind of Data class. """ Loads detailed information of a specific image collection. :param name: String Id of the collection :return: data_dict: Dict Detailed information about the collection """ return self.get('/collections/{}'.format(name)).json() def collection_metadata(self, name) -> CollectionMetadata: return CollectionMetadata(metadata=self.describe_collection(name)) def list_processes(self) -> List[dict]: # TODO: Maybe format the result dictionary so that the process_id is the key of the dictionary. """ Loads all available processes of the back end. :return: processes_dict: Dict All available processes of the back end. """ return self.get('/processes').json()["processes"] def list_jobs(self) -> dict: """ Lists all jobs of the authenticated user. :return: job_list: Dict of all jobs of the user. """ # TODO: Maybe format the result so that there get Job classes returned. # TODO: duplication with `user_jobs()` method return self.get('/jobs').json()["jobs"] def save_user_defined_process( self, user_defined_process_id: str, process_graph: dict, parameters: List[Union[dict, Parameter]] = None, public: bool = False) -> RESTUserDefinedProcess: """ Saves a process graph and its metadata in the backend as a user-defined process for the authenticated user. :param user_defined_process_id: unique identifier for the user-defined process :param process_graph: a process graph :param parameters: a list of parameters :param public: visible to other users? :return: a RESTUserDefinedProcess instance """ if user_defined_process_id in set(p["id"] for p in self.list_processes()): warnings.warn("Defining user-defined process {u!r} with same id as a pre-defined process".format( u=user_defined_process_id)) udp = RESTUserDefinedProcess(user_defined_process_id=user_defined_process_id, connection=self) udp.store(process_graph=process_graph, parameters=parameters, public=public) return udp def list_user_defined_processes(self) -> List[dict]: """ Lists all user-defined processes of the authenticated user. """ return self.get("/process_graphs").json()["processes"] def user_defined_process(self, user_defined_process_id: str) -> RESTUserDefinedProcess: """ Get the user-defined process based on its id. The process with the given id should already exist. :param user_defined_process_id: the id of the user-defined process :return: a RESTUserDefinedProcess instance """ return RESTUserDefinedProcess(user_defined_process_id=user_defined_process_id, connection=self) def validate_processgraph(self, process_graph): # Endpoint: POST /validate raise NotImplementedError() @property def _api_version(self) -> ComparableVersion: # TODO make this a public property (it's also useful outside the Connection class) return self.capabilities().api_version_check def datacube_from_process(self, process_id: str, **kwargs) -> DataCube: """ Load a raster datacube, from a custom process. @param process_id: The process id of the custom process. @param kwargs: The arguments of the custom process @return: A DataCube, without valid metadata, as the client is not aware of this custom process. """ if self._api_version.at_least("1.0.0"): graph = PGNode(process_id, kwargs) return DataCube(graph, self) else: raise OpenEoClientException( "This method requires support for at least version 1.0.0 in the openEO backend.") def load_collection(self, collection_id: str, **kwargs) -> Union[ImageCollectionClient, DataCube]: """ Load an image collection by collection id see :py:meth:`openeo.rest.imagecollectionclient.ImageCollectionClient.load_collection` for available arguments. :param collection_id: image collection identifier (string) :return: ImageCollectionClient """ if self._api_version.at_least("1.0.0"): return DataCube.load_collection(collection_id=collection_id, connection=self, **kwargs) else: return ImageCollectionClient.load_collection(collection_id=collection_id, session=self, **kwargs) # Legacy alias. imagecollection = load_collection def create_service(self, graph: dict, type: str, **kwargs) -> dict: # TODO: type hint for graph: is it a nested or a flat one? req = self._build_request_with_process_graph(process_graph=graph, type=type, **kwargs) response = self.post(path="/services", json=req, expected_status=201) # TODO: "location" is url of the service metadata, not (base) url of service (https://github.com/Open-EO/openeo-api/issues/269) # TODO: fetch this metadata and return a full metadata object instead? return { 'url': response.headers.get('Location'), 'service_id': response.headers.get("OpenEO-Identifier"), } def remove_service(self, service_id: str): """ Stop and remove a secondary web service. :param service_id: service identifier :return: """ response = self.delete('/services/' + service_id) def job_results(self, job_id): return self.get("/jobs/{}/results".format(job_id)).json() def job_logs(self, job_id, offset): return self.get("/jobs/{}/logs".format(job_id), params={'offset': offset}).json() def list_files(self): """ Lists all files that the logged in user uploaded. :return: file_list: List of the user uploaded files. """ return self.get('/files').json()['files'] def create_file(self, path): """ Creates virtual file :return: file object. """ # No endpoint just returns a file object. raise NotImplementedError() def _build_request_with_process_graph(self, process_graph: dict, **kwargs) -> dict: """ Prepare a json payload with a process graph to submit to /result, /services, /jobs, ... :param process_graph: flat dict representing a process graph """ result = kwargs if self._api_version.at_least("1.0.0"): result["process"] = {"process_graph": process_graph} else: result["process_graph"] = process_graph return result # TODO: Maybe rename to execute and merge with execute(). def download(self, graph: dict, outputfile: Union[Path, str, None] = None): """ Downloads the result of a process graph synchronously, and save the result to the given file or return bytes object if no outputfile is specified. This method is useful to export binary content such as images. For json content, the execute method is recommended. :param graph: (flat) dict representing a process graph :param outputfile: output file """ request = self._build_request_with_process_graph(process_graph=graph) r = self.post(path="/result", json=request, stream=True, timeout=1000) if outputfile is not None: with Path(outputfile).open(mode="wb") as f: shutil.copyfileobj(r.raw, f) else: return r.content def execute(self, process_graph: dict): """ Execute a process graph synchronously. :param process_graph: (flat) dict representing a process graph """ req = self._build_request_with_process_graph(process_graph=process_graph) return self.post(path="/result", json=req).json() def create_job(self, process_graph: dict, title: str = None, description: str = None, plan: str = None, budget=None, additional: Dict = None) -> RESTJob: """ Posts a job to the back end. :param process_graph: (flat) dict representing process graph :param title: String title of the job :param description: String description of the job :param plan: billing plan :param budget: Budget :param additional: additional job options to pass to the backend :return: job_id: String Job id of the new created job """ # TODO move all this (RESTJob factory) logic to RESTJob? req = self._build_request_with_process_graph( process_graph=process_graph, title=title, description=description, plan=plan, budget=budget ) if additional: # TODO: get rid of this non-standard field? https://github.com/Open-EO/openeo-api/issues/276 req["job_options"] = additional response = self.post("/jobs", json=req, expected_status=201) if "openeo-identifier" in response.headers: job_id = response.headers['openeo-identifier'] elif "location" in response.headers: _log.warning("Backend did not explicitly respond with job id, will guess it from redirect URL.") job_id = response.headers['location'].split("/")[-1] else: raise OpenEoClientException("Failed fo extract job id") return RESTJob(job_id, self) def job(self, job_id: str): """ Get the job based on the id. The job with the given id should already exist. Use :py:meth:`openeo.rest.connection.Connection.create_job` to create new jobs :param job_id: the job id of an existing job :return: A job object. """ return RESTJob(job_id, self) def load_disk_collection(self, format: str, glob_pattern: str, options: dict = {}) -> ImageCollectionClient: """ Loads image data from disk as an ImageCollection. :param format: the file format, e.g. 'GTiff' :param glob_pattern: a glob pattern that matches the files to load from disk :param options: options specific to the file format :return: the data as an ImageCollection """ if self._api_version.at_least("1.0.0"): return DataCube.load_disk_collection(self, format, glob_pattern, **options) else: return ImageCollectionClient.load_disk_collection(self, format, glob_pattern, **options)
def apply_process(process_id: str, args: Dict, viewingParameters): parent_process = viewingParameters.get('parent_process') if 'filter_daterange' == process_id or 'filter_temporal' == process_id: """ filter_daterange <= pre 0.3.x filter_temporal >= 0.4.x """ # TODO `viewingParameters` is function argument, but written to/manipulated (used as some kind of state object) # which is not obvious and confusing when debugging viewingParameters = viewingParameters or {} if 'extent' in args: #version >= 0.4 extent = args['extent'] if len(extent) != 2: raise AttributeError( "extent property should be an array of length 2, but got: " + str(extent)) viewingParameters["from"] = extent[0] viewingParameters["to"] = extent[1] else: viewingParameters["from"] = extract_arg(args, "from") viewingParameters["to"] = extract_arg(args, "to") elif 'filter_bbox' == process_id: viewingParameters = viewingParameters or {} if "left" in args: # <=0.3.x viewingParameters["left"] = extract_arg(args, "left") viewingParameters["right"] = extract_arg(args, "right") viewingParameters["top"] = extract_arg(args, "top") viewingParameters["bottom"] = extract_arg(args, "bottom") viewingParameters["srs"] = extract_arg(args, "srs") else: extent = args if "extent" in args: extent = args["extent"] # >=0.4.x viewingParameters["left"] = extract_arg(extent, "west") viewingParameters["right"] = extract_arg(extent, "east") viewingParameters["top"] = extract_arg(extent, "north") viewingParameters["bottom"] = extract_arg(extent, "south") viewingParameters["srs"] = extent.get("crs") or "EPSG:4326" elif process_id in [ 'zonal_statistics', 'aggregate_polygon', 'aggregate_spatial' ]: polygons = extract_arg_list(args, ['regions', 'polygons']) if viewingParameters.get("left") is None: if "type" in polygons: # it's GeoJSON geometries = _as_geometry_collection( polygons ) if polygons['type'] == 'FeatureCollection' else polygons bbox = shape(geometries).bounds if "from_node" in polygons: # it's a dereferenced from_node that contains a DelayedVector geometries = convert_node(polygons["node"], viewingParameters) bbox = geometries.bounds viewingParameters["left"] = bbox[0] viewingParameters["right"] = bbox[2] viewingParameters["bottom"] = bbox[1] viewingParameters["top"] = bbox[3] viewingParameters["srs"] = "EPSG:4326" args[ 'polygons'] = geometries # might as well cache the value instead of re-evaluating it further on elif 'filter_bands' == process_id: viewingParameters = viewingParameters or {} viewingParameters["bands"] = extract_arg(args, "bands") elif 'apply' == parent_process: if "data" in viewingParameters: # The `apply` process passes it's `data` parameter as `x` parameter to subprocess viewingParameters["x"] = viewingParameters["data"] #first we resolve child nodes and arguments args = { name: convert_node(expr, viewingParameters) for (name, expr) in args.items() } #when all arguments and dependencies are resolved, we can run the process if parent_process == "apply": image_collection = extract_arg_list(args, ['x', 'data', 'imagery']) if process_id == "run_udf": udf = _get_udf(args) return image_collection.apply_tiles(udf) else: return image_collection.apply(process_id, args) elif parent_process in ["reduce", "reduce_dimension"]: image_collection = extract_arg_list(args, ['data', 'imagery']) dimension = extract_arg(viewingParameters, 'dimension') binary = viewingParameters.get('binary', False) dimension, band_dim, temporal_dim = _check_dimension( cube=image_collection, dim=dimension, process=parent_process) if 'run_udf' == process_id and not binary: if dimension == temporal_dim: udf = _get_udf(args) #EP-2760 a special case of reduce where only a single udf based callback is provided. The more generic case is not yet supported. return image_collection.apply_tiles_spatiotemporal(udf) elif dimension == band_dim: udf = _get_udf(args) return image_collection.apply_tiles(udf) return image_collection.reduce(process_id, dimension) elif parent_process == 'apply_dimension': image_collection = extract_arg(args, 'data') dimension = viewingParameters.get( 'dimension', None ) # By default, applies the the process on all pixel values (as apply does). dimension, band_dim, temporal_dim = _check_dimension( cube=image_collection, dim=dimension, process=parent_process) if process_id == "run_udf": udf = _get_udf(args) if dimension == temporal_dim: return image_collection.apply_tiles_spatiotemporal(udf) else: return image_collection.apply_tiles(udf) else: return image_collection.apply_dimension(process_id, dimension) elif parent_process in ['aggregate_polygon', 'aggregate_spatial']: image_collection = extract_arg_list(args, ['data', 'imagery']) binary = viewingParameters.get('binary', False) name = viewingParameters.get('name', 'result') polygons = extract_arg(viewingParameters, 'polygons') # can be either (inline) GeoJSON or something returned by read_vector is_geojson = isinstance(polygons, Dict) if is_geojson: geometries = shape(polygons) return image_collection.zonal_statistics(geometries, func=process_id) # TODO: rename to aggregate_polygon? return image_collection.zonal_statistics(polygons.path, func=process_id) elif parent_process == 'aggregate_temporal': image_collection = extract_arg_list(args, ['data', 'imagery']) intervals = extract_arg(viewingParameters, 'intervals') labels = extract_arg(viewingParameters, 'labels') dimension = viewingParameters.get('dimension', None) dimension, _, _ = _check_dimension(cube=image_collection, dim=dimension, process=parent_process) return image_collection.aggregate_temporal(intervals, labels, process_id, dimension) else: if ComparableVersion("1.0.0").or_higher(viewingParameters["version"]): process_function = process_registry_100.get_function(process_id) else: process_function = process_registry_040.get_function(process_id) return process_function(args, viewingParameters)
except pkg_resources.DistributionNotFound: version_info[package] = "n/a" return { 'date': date_to_rfc3339(datetime.datetime.utcnow()), 'versions': version_info } @openeo_bp.route('/health') def health(): return jsonify({ "health": backend_implementation.health_check() }) @api_endpoint(version=ComparableVersion("0.3.1").or_lower) @openeo_bp.route('/capabilities') def capabilities(): return jsonify([ "/data", "/execute", "/processes" ]) @api_endpoint(version=ComparableVersion("1.0.0").accept_lower) @openeo_bp.route('/output_formats') def output_formats(): # TODO deprecated endpoint, remove it when v0.4 API support is not necessary anymore return jsonify(backend_implementation.file_formats()["output"])
def result_callback(request, context): post_data = request.json() pg = (post_data["process"] if api_version >= ComparableVersion("1.0.0") else post_data)["process_graph"] assert pg["saveresult1"]["arguments"]["format"] == expected_format return b"data"
def _normalize_collection_metadata(metadata: dict, api_version: ComparableVersion, full=False) -> dict: """ Make sure the given collection metadata roughly complies to desirec version of OpenEO spec. """ # Make copy and remove all "private" fields metadata = copy.deepcopy(metadata) metadata = {k: v for (k, v) in metadata.items() if not k.startswith('_')} # Metadata should at least contain an id. if "id" not in metadata: _log.error("Collection metadata should have 'id' field: {m!r}".format(m=metadata)) raise KeyError("id") collection_id = metadata["id"] # Version dependent metadata conversions cube_dims_100 = deep_get(metadata, "cube:dimensions", default=None) cube_dims_040 = deep_get(metadata, "properties", "cube:dimensions", default=None) eo_bands_100 = deep_get(metadata, "summaries", "eo:bands", default=None) eo_bands_040 = deep_get(metadata, "properties", "eo:bands", default=None) if api_version.below("1.0.0"): if full and not cube_dims_040 and cube_dims_100: metadata.setdefault("properties", {}) metadata["properties"]["cube:dimensions"] = cube_dims_100 if full and not eo_bands_040 and eo_bands_100: metadata.setdefault("properties", {}) metadata["properties"]["eo:bands"] = eo_bands_100 else: if full and not cube_dims_100 and cube_dims_040: _log.warning("Collection metadata 'cube:dimensions' in API 0.4 style instead of 1.0 style") metadata["cube:dimensions"] = cube_dims_040 if full and not eo_bands_100 and eo_bands_040: _log.warning("Collection metadata 'eo:bands' in API 0.4 style instead of 1.0 style") metadata.setdefault("summaries", {}) metadata["summaries"]["eo:bands"] = eo_bands_040 # Make sure some required fields are set. metadata.setdefault("stac_version", "0.9.0" if api_version.at_least("1.0.0") else "0.6.2") metadata.setdefault("links", []) metadata.setdefault("description", collection_id) metadata.setdefault("license", "proprietary") # Warn about missing fields where simple defaults are not feasible. fallbacks = { "extent": {"spatial": [0, 0, 0, 0], "temporal": [None, None]}, } if full: if api_version.at_least("1.0.0"): fallbacks["cube:dimensions"] = {} fallbacks["summaries"] = {} else: fallbacks["properties"] = {} fallbacks["other_properties"] = {} for key, value in fallbacks.items(): if key not in metadata: _log.warning("Collection {c!r} metadata does not have field {k!r}.".format(c=collection_id, k=key)) metadata[key] = value if not full: basic_keys = [ "stac_version", "stac_extensions", "id", "title", "description", "keywords", "version", "deprecated", "license", "providers", "extent", "links" ] metadata = {k: v for k, v in metadata.items() if k in basic_keys} return metadata
def requested_api_version() -> ComparableVersion: """Get the currently requested API version as a ComparableVersion object""" return ComparableVersion(g.api_version)