Ejemplo n.º 1
0
async def test_put_component_transformation_without_update_code(
        async_test_client, clean_test_db_engine):
    patched_session = sessionmaker(clean_test_db_engine)
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            patched_session,
    ):

        path = "./tests/data/components/alerts-from-score_100_38f168ef-cb06-d89c-79b3-0cd823f32e9d.json"
        example_component_tr_json = load_json(path)

        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              example_component_tr_json["id"]) +
                "?update_component_code=False",
                json=example_component_tr_json,
            )

        component_tr_in_db = read_single_transformation_revision(
            example_component_tr_json["id"])

        assert response.status_code == 201
        assert "COMPONENT_INFO" not in response.json()["content"]
        assert "COMPONENT_INFO" not in component_tr_in_db.content
        assert "register" in response.json()["content"]
        assert "register" in component_tr_in_db.content
Ejemplo n.º 2
0
async def test_update_transformation_revision_with_invalid_name_workflow(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        store_single_transformation_revision(
            TransformationRevision(**tr_json_workflow_2))

        tr_json_workflow_2_update_invalid_name = deepcopy(
            tr_json_workflow_2_update)
        tr_json_workflow_2_update_invalid_name["name"] = "'"

        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("workflow 2"))),
                json=tr_json_workflow_2_update_invalid_name,
            )

        print(response.json())
        assert response.status_code == 422
        assert "string does not match regex" in response.json(
        )["detail"][0]["msg"]
        assert "name" in response.json()["detail"][0]["loc"]
Ejemplo n.º 3
0
def import_transformation(
    tr_json: dict,
    path: str,
    strip_wirings: bool = False,
    directly_into_db: bool = False,
    update_component_code: bool = True,
) -> None:

    if strip_wirings:
        tr_json["test_wiring"] = {"input_wirings": [], "output_wirings": []}

    if directly_into_db:
        tr = TransformationRevision(**tr_json)
        logger.info(
            ("Update or create database entry"
             " for transformation revision %s of type %s\n"
             "in category %s with name %s"),
            str(tr.id),
            str(tr.type),
            tr.category,
            tr.name,
        )
        update_or_create_single_transformation_revision(tr)

    else:
        headers = get_auth_headers()

        response = requests.put(
            posix_urljoin(get_config().hd_backend_api_url, "transformations",
                          tr_json["id"]),
            params={
                "allow_overwrite_released": True,
                "update_component_code": update_component_code,
            },
            verify=get_config().hd_backend_verify_certs,
            json=tr_json,
            auth=get_backend_basic_auth()  # type: ignore
            if get_config().hd_backend_use_basic_auth else None,
            headers=headers,
        )
        logger.info(
            ("PUT transformation status code: %d"
             " for transformation revision %s of type %s\n"
             "in category %s with name %s"),
            response.status_code,
            tr_json["id"],
            tr_json["type"],
            tr_json["name"],
            tr_json["category"],
        )
        if response.status_code != 201:
            msg = (f"COULD NOT PUT {tr_json['type']} from path {path}\n."
                   f"Response status code {response.status_code}"
                   f"with response text:\n{response.text}")
            logger.error(msg)
Ejemplo n.º 4
0
async def test_execute_for_full_workflow_dto(async_test_client,
                                             clean_test_db_engine):
    patched_session = sessionmaker(clean_test_db_engine)
    with mock.patch(
            "hetdesrun.persistence.dbservice.nesting.Session",
            patched_session,
    ):
        with mock.patch(
                "hetdesrun.persistence.dbservice.revision.Session",
                patched_session,
        ):
            async with async_test_client as ac:

                json_files = [
                    "./transformations/components/connectors/pass-through-integer_100_57eea09f-d28e-89af-4e81-2027697a3f0f.json",
                    "./transformations/components/connectors/pass-through-series_100_bfa27afc-dea8-b8aa-4b15-94402f0739b6.json",
                    "./transformations/components/connectors/pass-through-string_100_2b1b474f-ddf5-1f4d-fec4-17ef9122112b.json",
                    "./transformations/components/remaining-useful-life/univariate-linear-rul-regression_100_8d61a267-3a71-51cd-2817-48c320469d6b.json",
                    "./transformations/components/visualization/univariate-linear-rul-regression-result-plot_100_9c3f88ce-1311-241e-18b7-acf7d3f5a051.json",
                    "./transformations/components/arithmetic/consecutive-differences_100_ce801dcb-8ce1-14ad-029d-a14796dcac92.json",
                    "./transformations/components/basic/filter_100_18260aab-bdd6-af5c-cac1-7bafde85188f.json",
                    "./transformations/components/basic/greater-or-equal_100_f759e4c0-1468-0f2e-9740-41302b860193.json",
                    "./transformations/components/basic/last-datetime-index_100_c8e3bc64-b214-6486-31db-92a8888d8991.json",
                    "./transformations/components/basic/restrict-to-time-interval_100_bf469c0a-d17c-ca6f-59ac-9838b2ff67ac.json",
                    "./transformations/components/connectors/pass-through-float_100_2f511674-f766-748d-2de3-ad5e62e10a1a.json",
                    "./transformations/components/visualization/single-timeseries-plot_100_8fba9b51-a0f1-6c6c-a6d4-e224103b819c.json",
                    "./transformations/workflows/examples/data-from-last-positive-step_100_2cbb87e7-ea99-4404-abe1-be550f22763f.json",
                    "./transformations/workflows/examples/univariate-linear-rul-regression-example_100_806df1b9-2fc8-4463-943f-3d258c569663.json",
                    "./transformations/workflows/examples/linear-rul-from-last-positive-step_100_3d504361-e351-4d52-8734-391aa47e8f24.json",
                ]

                for file in json_files:
                    tr_json = load_json(file)

                    response = await ac.put(
                        posix_urljoin(
                            get_config().hd_backend_api_url,
                            "transformations",
                            tr_json["id"],
                        ) + "?allow_overwrite_released=True",
                        json=tr_json,
                    )

                workflow_id = UUID("3d504361-e351-4d52-8734-391aa47e8f24")
                tr_workflow = read_single_transformation_revision(workflow_id)
                wiring_dto = WiringFrontendDto.from_wiring(
                    tr_workflow.test_wiring, workflow_id)

                response = await ac.post(
                    "/api/workflows/" + str(workflow_id) + "/execute",
                    json=json.loads(wiring_dto.json(by_alias=True)),
                )

                assert response.status_code == 200
                assert "output_types_by_output_name" in response.json()
Ejemplo n.º 5
0
async def post_framelike_records(
    list_of_records: List[dict],
    attributes: Optional[Any],
    ref_id: str,
    adapter_key: str,
    endpoint: Literal["timeseries", "dataframe"],
    client: AsyncClient,
) -> None:
    """Post a list of dicts (records) to the appropriate endpoint"""
    headers = get_generic_rest_adapter_auth_headers()
    if attributes is not None and len(attributes) != 0:
        logger.debug("Sending Data-Attributes via POST request header")
        headers["Data-Attributes"] = encode_attributes(attributes)

    url = posix_urljoin(await get_generic_rest_adapter_base_url(adapter_key),
                        endpoint)

    start_time = datetime.datetime.now(datetime.timezone.utc)
    logger.info(
        "Start sending framelike data at %s to %s for id %s",
        start_time.isoformat(),
        url,
        ref_id,
    )

    try:
        response = await client.post(
            url,
            params=[("id" if endpoint == "dataframe" else "timeseriesId",
                     ref_id)],
            json=list_of_records,
            headers=headers,
            timeout=60,
        )
    except httpx.HTTPError as e:
        msg = f"Http error while posting framelike data to {url} for id {ref_id}: {str(e)}"
        logger.info(msg)
        raise AdapterConnectionError(msg) from e

    if response.status_code not in (200, 201):
        msg = (
            f"Failed posting framelike data to {url} for id {ref_id}."
            f" Status code: {str(response.status_code)}. Response text: {response.text}"
        )
        raise AdapterConnectionError(msg)
    logger.info(
        "Successfully finished posting framelike data to %s for id %s at %s",
        url,
        ref_id,
        datetime.datetime.now(datetime.timezone.utc).isoformat(),
    )
Ejemplo n.º 6
0
async def test_get_transformation_revision_by_id_with_inexistent_workflow(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        async with async_test_client as ac:
            response = await ac.get(
                posix_urljoin(
                    "/api/transformations/",
                    str(get_uuid_from_seed("inexistent workflow")),
                ))
        assert response.status_code == 404
        assert "Found no" in response.json()["detail"]
Ejemplo n.º 7
0
async def send_single_metadatum_to_adapter(
    filtered_sink: FilteredSink,
    metadatum_value: Any,
    adapter_key: str,
    client: httpx.AsyncClient,
) -> None:
    if filtered_sink.ref_id_type == RefIdType.SOURCE:
        endpoint = "sources"
    elif filtered_sink.ref_id_type == RefIdType.SINK:
        endpoint = "sinks"
    else:
        endpoint = "thingNodes"

    url = posix_urljoin(
        await get_generic_rest_adapter_base_url(adapter_key),
        endpoint,
        urllib.parse.quote(str(filtered_sink.ref_id)),
        "metadata",
        urllib.parse.quote(str(filtered_sink.ref_key)),
    )

    value_datatype = ExternalType((filtered_sink.type)).value_datatype
    assert value_datatype is not None  # for mypy

    try:
        resp = await client.post(
            url,
            json=({
                "key": filtered_sink.ref_key,
                "value": metadatum_value,
                "dataType": value_datatype.value,
            }),
        )
    except httpx.HTTPError as e:
        msg = (f"Posting metadata to generic rest adapter endpoint {url}"
               f" failed with Exception {str(e)}")

        logger.info(msg)
        raise AdapterConnectionError(
            f"Posting metadata from generic rest adapter endpoint {url} failed."
        ) from e

    if resp.status_code != 200 and resp.status_code != 201:
        msg = (
            f"Posting metadata to generic rest adapter endpoint {url} failed."
            f" Status code: {resp.status_code}. Text: {resp.text}")
        logger.info(msg)
        raise AdapterConnectionError(msg)
    logger.debug("Received Response for metadata posting from url %s:\n%s",
                 url, str(resp.text))
Ejemplo n.º 8
0
async def test_get_transformation_revision_by_id_with_workflow(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        store_single_transformation_revision(
            TransformationRevision(**tr_json_workflow_1))

        async with async_test_client as ac:
            response = await ac.get(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("workflow 1"))))
        assert response.status_code == 200
        assert response.json() == tr_json_workflow_1
Ejemplo n.º 9
0
async def test_update_transformation_revision_with_released_component(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        store_single_transformation_revision(
            TransformationRevision(**tr_json_component_2))

        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("component 2"))),
                json=tr_json_component_2_update,
            )

        assert response.status_code == 403
Ejemplo n.º 10
0
async def load_generic_adapter_base_urls() -> List[BackendRegisteredGenericRestAdapter]:
    """Loads generic REST adapter infos from the corresponding designer backend endpoint"""

    headers = get_generic_rest_adapter_auth_headers()

    url = posix_urljoin(runtime_config.hd_backend_api_url, "adapters/")
    logger.info("Start getting Generic REST Adapter URLS from HD Backend url %s", url)

    async with httpx.AsyncClient(
        verify=runtime_config.hd_backend_verify_certs
    ) as client:
        try:
            resp = await client.get(url, headers=headers)
        except httpx.HTTPError as e:
            msg = f"Failure connecting to hd backend adapters endpoint ({url}): " + str(
                e
            )
            logger.info(msg)
            raise AdapterConnectionError(msg) from e

    if resp.status_code != 200:
        msg = (
            f"HTTP failure trying to receive generic adapter infos from hd backend ({url}):"
            f" Status code {str(resp.status_code)}. Response: {resp.text}"
        )

        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        loaded_generic_rest_adapters: List[
            BackendRegisteredGenericRestAdapter
        ] = BackendRegisteredGenericRestAdapters.parse_obj(resp.json()).__root__
    except ValidationError as e:
        msg = "Failure trying to parse received generic adapter infos: " + str(e)

        logger.info(msg)
        raise AdapterHandlingException(msg) from e

    logger.info(
        "Finished getting Generic REST Adapter URLS from HD Backend url %s", url
    )

    return loaded_generic_rest_adapters
Ejemplo n.º 11
0
async def test_update_transformation_revision_with_non_existing_workflow(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("workflow 2"))),
                json=tr_json_workflow_2_update,
            )

        workflow_tr_in_db = read_single_transformation_revision(
            get_uuid_from_seed("workflow 2"))

        assert response.status_code == 201
        assert response.json()["name"] == "new name"
        assert len(workflow_tr_in_db.content.links) == 2
Ejemplo n.º 12
0
 def _obtain_public_key_data(self) -> None:
     if self._keycloak_public_key_data is not None:
         # assume public key is not rotated and therefore valid forever
         return
     url = posix_urljoin(
         self.creds.auth_url,
         "realms",
         self.creds.realm,
         "protocol/openid-connect/certs",
     )
     try:
         resp = get(url)
     except HTTPError as e:
         logger.info("Error trying to get public key from Keycloak: %s",
                     str(e))
         raise AuthentificationError(
             "Error trying to get public key from Keycloak") from e
     with self._keycloak_public_key_lock:
         self._keycloak_public_key_data = resp.json()
Ejemplo n.º 13
0
async def test_update_transformation_revision_with_released_component_and_allow_overwrite_flag(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        store_single_transformation_revision(
            TransformationRevision(**tr_json_component_2))

        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("component 2"))) +
                "?allow_overwrite_released=true",
                json=tr_json_component_2_update,
            )

        assert response.status_code == 201
        assert response.json()["name"] == "new name"
        assert response.json()["category"] == "Test"
Ejemplo n.º 14
0
async def test_deprecate_transformation_revision_with_component(
        async_test_client, clean_test_db_engine):
    with mock.patch(
            "hetdesrun.persistence.dbservice.revision.Session",
            sessionmaker(clean_test_db_engine),
    ):
        store_single_transformation_revision(
            TransformationRevision(**tr_json_component_2))

        async with async_test_client as ac:
            response = await ac.put(
                posix_urljoin("/api/transformations/",
                              str(get_uuid_from_seed("component 2"))),
                json=tr_json_component_2_deprecate,
            )

        assert response.status_code == 201
        assert response.json()["name"] != "new name"
        assert response.json()["category"] != "Test"
        assert "disabled_timestamp" in response.json()["content"]
        assert "released_timestamp" in response.json()["content"]
Ejemplo n.º 15
0
def obtain_token_from_keycloak(
    service_user_credentials: ServiceUserCredentials, ) -> TokenResponse:

    now = datetime.datetime.utcnow()
    try:
        resp = post(
            url=posix_urljoin(
                service_user_credentials.auth_url,
                "realms",
                service_user_credentials.realm,
                "protocol/openid-connect/token",
            ),
            data={
                "client_id": service_user_credentials.client_id,
                "username": service_user_credentials.username,
                "password": service_user_credentials.password,
                "grant_type": "password",
            },
        )
    except HTTPError as e:
        logger.info("Error trying to get token from Keycloak")
        raise AuthentificationError(
            "Error trying to get token from Keycloak: " + str(e)) from e

    token_dict = resp.json()
    token_dict["issue_timestamp"] = now

    try:
        token_response = TokenResponse.parse_obj(token_dict)
    except ValidationError as e:
        logger.info(
            "Could not understand answer to token request from keycloak")
        raise AuthentificationError(
            "Could not understand answer to token request from keycloak"
        ) from e

    return token_response
Ejemplo n.º 16
0
async def test_put_workflow_transformation(async_test_client,
                                           clean_test_db_engine):
    patched_session = sessionmaker(clean_test_db_engine)
    with mock.patch(
            "hetdesrun.persistence.dbservice.nesting.Session",
            patched_session,
    ):
        with mock.patch(
                "hetdesrun.persistence.dbservice.revision.Session",
                patched_session,
        ):

            example_workflow_tr_json = load_json(
                "./transformations/workflows/examples/data-from-last-positive-step_100_2cbb87e7-ea99-4404-abe1-be550f22763f.json"
            )

            async with async_test_client as ac:
                response = await ac.put(
                    posix_urljoin("/api/transformations/",
                                  example_workflow_tr_json["id"]),
                    json=example_workflow_tr_json,
                )

            assert response.status_code == 201
Ejemplo n.º 17
0
async def run_execution_input(
    execution_input: WorkflowExecutionInput, ) -> ExecutionResponseFrontendDto:
    """Runs the provided execution input

    Depending on configuration this either calls a function or queries the
    external runtime service endpoint (if this instance is not considered to
    act as runtime service).

    Raises subtypes of TrafoExecutionError on errors.
    """
    output_types = {
        output.name: output.type
        for output in execution_input.workflow.outputs
    }

    execution_result: WorkflowExecutionResult

    if get_config().is_runtime_service:
        execution_result = await runtime_service(execution_input)
    else:
        headers = get_auth_headers()

        async with httpx.AsyncClient(
                verify=get_config().hd_runtime_verify_certs) as client:
            url = posix_urljoin(get_config().hd_runtime_engine_url, "runtime")
            try:
                response = await client.post(
                    url,
                    headers=headers,
                    json=json.loads(execution_input.json()
                                    ),  # TODO: avoid double serialization.
                    # see https://github.com/samuelcolvin/pydantic/issues/1409 and
                    # https://github.com/samuelcolvin/pydantic/issues/1409#issuecomment-877175194
                    timeout=None,
                )
            except httpx.HTTPError as e:
                # handles both request errors (connection problems)
                # and 4xx and 5xx errors. See https://www.python-httpx.org/exceptions/
                msg = f"Failure connecting to hd runtime endpoint ({url}):\n{str(e)}"
                logger.info(msg)
                raise TrafoExecutionRuntimeConnectionError(msg) from e
            try:
                json_obj = response.json()
                execution_result = WorkflowExecutionResult(**json_obj)
            except ValidationError as e:
                msg = (
                    f"Could not validate hd runtime result object. Exception:\n{str(e)}"
                    f"\nJson Object is:\n{str(json_obj)}")
                logger.info(msg)
                raise TrafoExecutionResultValidationError(msg) from e

    execution_response = ExecutionResponseFrontendDto(
        error=execution_result.error,
        output_results_by_output_name=execution_result.
        output_results_by_output_name,
        output_types_by_output_name=output_types,
        result=execution_result.result,
        traceback=execution_result.traceback,
        job_id=execution_input.job_id,
    )

    return execution_response
Ejemplo n.º 18
0
async def load_framelike_data(
        filtered_sources: List[FilteredSource],
        additional_params:
    List[Tuple[
        str,
        str]],  # for timeseries: [("from", from_timestamp), ("to", to_timestamp)]
        adapter_key: str,
        endpoint: Literal["timeseries",
                          "dataframe"],  # "timeseries" or "dataframe"
) -> pd.DataFrame:
    """Load framelike data from REST endpoint"""

    url = posix_urljoin(await get_generic_rest_adapter_base_url(adapter_key),
                        endpoint)

    if len({fs.type for fs in filtered_sources}) > 1:
        raise AdapterHandlingException(
            "Got more than one datatype in same grouped data")

    if len(filtered_sources) == 0:
        raise AdapterHandlingException("Requested fetching 0 sources")

    common_data_type = filtered_sources[0].type

    if (common_data_type
            == ExternalType.DATAFRAME) and len(filtered_sources) > 1:
        raise AdapterHandlingException(
            "Cannot request more than one dataframe together")

    logger.info(
        ("Requesting framelike data from generic rest adapter %s from endpoint %s:"
         " ids %s with additional params %s with common datatype %s"),
        adapter_key,
        url,
        str([filtered_source.ref_id for filtered_source in filtered_sources]),
        str(additional_params),
        str(common_data_type),
    )

    headers = get_generic_rest_adapter_auth_headers()

    with requests.Session() as session:
        try:
            start_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                "Start receiving generic rest adapter %s framelike data at %s",
                adapter_key,
                start_time.isoformat(),
            )
            resp = session.get(
                url,
                params=[("id", quote(str(filtered_source.ref_id)))
                        for filtered_source in filtered_sources] +
                additional_params,
                stream=True,
                headers=headers,
                verify=runtime_config.hd_adapters_verify_certs,
            )
            if (resp.status_code == 404 and "errorCode" in resp.text
                    and resp.json()["errorCode"] == "RESULT_EMPTY"):
                logger.info(
                    ("Received RESULT_EMPTY error_code from generic rest adapter %s"
                     " framelike endpoint %s, therefore returning empty DataFrame"
                     ),
                    adapter_key,
                    url,
                )
                if endpoint == "timeseries":
                    return create_empty_ts_df(ExternalType(common_data_type))
                # must be "dataframe":
                return df_empty({})

            if resp.status_code != 200:
                msg = (
                    f"Requesting framelike data from generic rest adapter endpoint {url} failed."
                    f" Status code: {resp.status_code}. Text: {resp.text}")
                logger.info(msg)
                raise AdapterConnectionError(msg)
            logger.info("Start reading in and parsing framelike data")

            df = pd.read_json(resp.raw, lines=True)
            end_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                ("Finished receiving generic rest framelike data (including dataframe parsing)"
                 " at %s. DataFrame shape is %s with columns %s"),
                end_time.isoformat(),
                str(df.shape),
                str(df.columns),
            )
            logger.info(
                ("Receiving generic rest adapter framelike data took"
                 " (including dataframe parsing)"
                 " %s"),
                str(end_time - start_time),
            )
            logger.debug(
                "Received dataframe of form %s:\n%s",
                str(df.shape) if len(df) > 0 else "EMPTY RESULT",
                str(df) if len(df) > 0 else "EMPTY RESULT",
            )
        except requests.HTTPError as e:
            msg = (
                f"Requesting framelike data from generic rest adapter endpoint {url}"
                f" failed with Exception {str(e)}")

            logger.info(msg)
            raise AdapterConnectionError(
                f"Requesting framelike from generic rest adapter endpoint {url} failed."
            ) from e
    logger.info("Complete generic rest adapter %s framelike request",
                adapter_key)
    if len(df) == 0:
        if endpoint == "timeseries":
            return create_empty_ts_df(ExternalType(common_data_type))
        # must be dataframe:
        return df_empty({})

    if "timestamp" in df.columns and endpoint == "dataframe":
        try:
            parsed_timestamps = pd.to_datetime(df["timestamp"])
        except ValueError as e:
            logger.info(
                "Column 'timestamp' of dataframe from %s could not be parsed and therefore"
                " not be set to index. Proceeding with default index. Error was: %s",
                url,
                str(e),
            )
        else:
            df.index = parsed_timestamps
            df = df.sort_index()

    return df
Ejemplo n.º 19
0
async def load_single_metadatum_from_adapter(filtered_source: FilteredSource,
                                             adapter_key: str,
                                             client: httpx.AsyncClient) -> Any:

    if filtered_source.ref_id_type == RefIdType.SOURCE:
        endpoint = "sources"
    elif filtered_source.ref_id_type == RefIdType.SINK:
        endpoint = "sinks"
    else:
        endpoint = "thingNodes"

    url = posix_urljoin(
        await get_generic_rest_adapter_base_url(adapter_key),
        endpoint,
        urllib.parse.quote(str(filtered_source.ref_id)),
        "metadata",
        urllib.parse.quote(str(filtered_source.ref_key)),
    )
    try:
        resp = await client.get(url)
    except httpx.HTTPError as e:
        msg = (
            f"Requesting metadata data from generic rest adapter endpoint {url}"
            f" failed with Exception {str(e)}")

        logger.info(msg)
        raise AdapterConnectionError(
            f"Requesting metadata from generic rest adapter endpoint {url} failed."
        ) from e

    if resp.status_code != 200:
        msg = (
            f"Requesting metadata data from generic rest adapter endpoint {url} failed."
            f" Status code: {resp.status_code}. Text: {resp.text}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        metadatum = Metadatum.parse_obj(resp.json())
    except ValidationError as e:
        msg = (
            f"Validation failure trying to parse received metadata from adapter"
            f"url {url}: {str(resp.json())}\nError is: " + str(e))

        logger.info(msg)
        raise AdapterHandlingException(msg) from e

    logger.debug("Received metadata json from url %s:\n%s", url,
                 str(resp.json()))

    if metadatum.key != str(filtered_source.ref_key):
        msg = (f"received metadata has wrong key "
               f"(not the requested one {str(filtered_source.ref_key)})"
               f". Received metdatum is {str(metadatum)}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    value_datatype = ExternalType(filtered_source.type).value_datatype
    assert value_datatype is not None  # for mypy

    if metadatum.dataType is not None and metadatum.dataType != value_datatype:
        msg = (f"received metadata has wrong value dataType "
               f"(not the requested one inside {str(filtered_source.type)})"
               f". Received metdatum is {str(metadatum)}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        parsed_value = value_datatype.parse_object(metadatum.value)
    except ValidationError as e:
        msg = (
            f"Validation failure trying to parse received metadata from adapter"
            f"url {url}: {str(metadatum)}\nError is: " + str(e))

        logger.info(msg)
        raise AdapterHandlingException(msg) from e
    return parsed_value  # type: ignore
Ejemplo n.º 20
0
def get_transformation_from_java_backend(id: UUID, type: Type) -> Any:
    """
    Loads a single transformation revision together with its documentation based on its id
    """

    headers = get_auth_headers()

    if type == Type.COMPONENT:
        url = posix_urljoin(get_config().hd_backend_api_url, "components",
                            str(id))
    else:
        url = posix_urljoin(get_config().hd_backend_api_url, "workflows",
                            str(id))

    # Get transformation revision from old backend
    response = requests.get(
        url,
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )
    logger.info(
        "GET %s status code: %i for %s with id %ss",
        type,
        response.status_code,
        type,
        str(id),
    )
    if response.status_code != 200:
        msg = (f"COULD NOT GET {type} with id {id}.\n"
               f"Response status code {response.status_code} "
               f"with response text:\n{response.json()['detail']}")
        logger.error(msg)
        raise Exception(msg)

    revision_json = response.json()

    # Get documentation from old backend
    doc_response = requests.get(
        posix_urljoin(get_config().hd_backend_api_url, "documentations",
                      str(id)),
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )
    logger.info(
        "GET documentation status code: %i for %s with id %s",
        response.status_code,
        type,
        str(id),
    )
    if response.status_code != 200:
        msg = (f"COULD NOT GET documentation with id {id}.\n"
               f"Response status code {response.status_code} "
               f"with response text:\n{response.json()['detail']}")
        logger.error(msg)
        raise Exception(msg)

    doc_text = doc_response.json().get("document", "")

    frontend_dto: Union[ComponentRevisionFrontendDto,
                        WorkflowRevisionFrontendDto]

    # Generate transformation revision
    if type == Type.COMPONENT:
        revision_json["type"] = Type.COMPONENT
        frontend_dto = ComponentRevisionFrontendDto(**revision_json, )
    else:
        frontend_dto = WorkflowRevisionFrontendDto(**revision_json, )

    transformation_revision = frontend_dto.to_transformation_revision(
        documentation=doc_text)

    tr_json = json.loads(transformation_revision.json())

    return tr_json
Ejemplo n.º 21
0
def export_transformations(
    download_path: str,
    type: Optional[Type] = None,
    ids: Optional[List[UUID]] = None,
    names: Optional[List[str]] = None,
    category: Optional[str] = None,
    include_deprecated: bool = True,
    java_backend: bool = False,
) -> None:
    """
    Exports all transformations, together with their documentation, and saves them as json files
    in subdirectories of the provided path corresponding to the respective category,
    based on the provide criteria. If more than one criterion is provided,
    only transformations which fulfilll all criteria will be exported.

    WARNING: Overwrites existing files with the same name!

    Args:
        download_path (str): The directory on the local system, where we save the transformations.
        type (Type): One of the two types of the enum Type: WORKFLOW or COMPONENT
        ids (List[UUID]): The ids of the transformations.
        names (List[str]): The names of the transformations.
        include_deprecated (Optional[bool]): If set to True, disabled transformations are exported.

    Usage examples:
        export_transformations("/mnt/obj_repo/migration_data")
        export_transformations(
            "/mnt/obj_repo/migration_data",
            ids=["d71a0cef-1d56-818f-a1a5-dd6bb6d50399","806df1b9-2fc8-4463-943f-3d258c569663"]
        )
        export_transformations("/mnt/obj_repo/migration_data/components", type="COMPONENT")
        export_transformations(
            "/mnt/obj_repo/migration_data/components",
            type="COMPONENT",
            names=["Divide", "Add"]
        )
        export_transformations(
            "/mnt/obj_repo/migration_data/workflows",
            type="WORKFLOW",
            category="Examples"
        )

    """
    import hetdesrun.models.wiring  # pylint: disable=import-outside-toplevel

    hetdesrun.models.wiring.EXPORT_MODE = True

    import hetdesrun.backend.models.wiring  # pylint: disable=import-outside-toplevel

    hetdesrun.backend.models.wiring.EXPORT_MODE = True

    headers = get_auth_headers()

    endpoint = "transformations" if not java_backend else "base-items"

    url = posix_urljoin(get_config().hd_backend_api_url, endpoint)
    response = requests.get(
        url,
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )

    if response.status_code != 200:
        msg = (f"No transformation revision found at url {url}.\n"
               f" Status code was {str(response.status_code)}.\n"
               f" Response was: {str(response.text)}")
        raise Exception(msg)

    id_list = []
    type_dict: Dict[UUID, Type] = {}
    transformation_dict: Dict[UUID, dict] = {}

    for transformation in response.json():
        transformation_id = transformation["id"].lower()
        transformation_type = transformation["type"]
        transformation_name = transformation["name"]
        transformation_category = transformation["category"]
        logger.info(
            "found transformation %s of type %s\nwith name %s in category %s",
            transformation_id,
            transformation_type,
            transformation_name,
            transformation_category,
        )

        if java_backend:
            transformation_dict[
                transformation_id] = get_transformation_from_java_backend(
                    transformation_id, transformation_type)
        else:
            transformation_dict[transformation_id] = transformation

        if (criterion_unset_or_matches_value(type, transformation_type)
                and selection_list_empty_or_contains_value(
                    ids, transformation_id)
                and selection_list_empty_or_contains_value(
                    names, transformation_name)
                and criterion_unset_or_matches_value(category,
                                                     transformation_category)):
            if include_deprecated or transformation["state"] != State.DISABLED:
                logger.info("transformation %s will be exported",
                            transformation_id)
                id_list.append(transformation_id)
                type_dict[transformation_id] = transformation_type

    # Export individual transformation
    for transformation_id in id_list:
        save_transformation(transformation_dict[transformation_id],
                            download_path)
Ejemplo n.º 22
0
async def load_generic_adapter_base_urls() -> List[BackendRegisteredGenericRestAdapter]:
    """Loads generic REST adapter infos from the corresponding designer backend endpoint"""

    headers = get_generic_rest_adapter_auth_headers()

    url = posix_urljoin(get_config().hd_backend_api_url, "adapters/")
    logger.info("Start getting Generic REST Adapter URLS from HD Backend url %s", url)

    if get_config().is_backend_service:
        # call function directly
        adapter_list = await get_all_adapters()

        try:
            loaded_generic_rest_adapters: List[BackendRegisteredGenericRestAdapter] = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in adapter_list
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e
    else:
        # call backend service "adapters" endpoint
        async with httpx.AsyncClient(
            verify=get_config().hd_backend_verify_certs
        ) as client:
            try:
                resp = await client.get(url, headers=headers)
            except httpx.HTTPError as e:
                msg = (
                    f"Failure connecting to hd backend adapters endpoint ({url}): "
                    + str(e)
                )
                logger.info(msg)
                raise AdapterConnectionError(msg) from e

        if resp.status_code != 200:
            msg = (
                f"HTTP failure trying to receive generic adapter infos from hd backend ({url}):"
                f" Status code {str(resp.status_code)}. Response: {resp.text}"
            )

            logger.info(msg)
            raise AdapterConnectionError(msg)

        try:
            loaded_generic_rest_adapters = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in AdapterFrontendDtoRegisteredGenericRestAdapters.parse_obj(
                    resp.json()
                ).__root__
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e

        logger.info(
            "Finished getting Generic REST Adapter URLS from HD Backend url %s", url
        )

    return loaded_generic_rest_adapters