Exemplo n.º 1
0
async def post_framelike_records(
    list_of_records: List[dict],
    attributes: Optional[Any],
    ref_id: str,
    adapter_key: str,
    endpoint: Literal["timeseries", "dataframe"],
    client: AsyncClient,
) -> None:
    """Post a list of dicts (records) to the appropriate endpoint"""
    headers = get_generic_rest_adapter_auth_headers()
    if attributes is not None and len(attributes) != 0:
        logger.debug("Sending Data-Attributes via POST request header")
        headers["Data-Attributes"] = encode_attributes(attributes)

    url = posix_urljoin(await get_generic_rest_adapter_base_url(adapter_key),
                        endpoint)

    start_time = datetime.datetime.now(datetime.timezone.utc)
    logger.info(
        "Start sending framelike data at %s to %s for id %s",
        start_time.isoformat(),
        url,
        ref_id,
    )

    try:
        response = await client.post(
            url,
            params=[("id" if endpoint == "dataframe" else "timeseriesId",
                     ref_id)],
            json=list_of_records,
            headers=headers,
            timeout=60,
        )
    except httpx.HTTPError as e:
        msg = f"Http error while posting framelike data to {url} for id {ref_id}: {str(e)}"
        logger.info(msg)
        raise AdapterConnectionError(msg) from e

    if response.status_code not in (200, 201):
        msg = (
            f"Failed posting framelike data to {url} for id {ref_id}."
            f" Status code: {str(response.status_code)}. Response text: {response.text}"
        )
        raise AdapterConnectionError(msg)
    logger.info(
        "Successfully finished posting framelike data to %s for id %s at %s",
        url,
        ref_id,
        datetime.datetime.now(datetime.timezone.utc).isoformat(),
    )
Exemplo n.º 2
0
async def send_single_metadatum_to_adapter(
    filtered_sink: FilteredSink,
    metadatum_value: Any,
    adapter_key: str,
    client: httpx.AsyncClient,
) -> None:
    if filtered_sink.ref_id_type == RefIdType.SOURCE:
        endpoint = "sources"
    elif filtered_sink.ref_id_type == RefIdType.SINK:
        endpoint = "sinks"
    else:
        endpoint = "thingNodes"

    url = posix_urljoin(
        await get_generic_rest_adapter_base_url(adapter_key),
        endpoint,
        urllib.parse.quote(str(filtered_sink.ref_id)),
        "metadata",
        urllib.parse.quote(str(filtered_sink.ref_key)),
    )

    value_datatype = ExternalType((filtered_sink.type)).value_datatype
    assert value_datatype is not None  # for mypy

    try:
        resp = await client.post(
            url,
            json=({
                "key": filtered_sink.ref_key,
                "value": metadatum_value,
                "dataType": value_datatype.value,
            }),
        )
    except httpx.HTTPError as e:
        msg = (f"Posting metadata to generic rest adapter endpoint {url}"
               f" failed with Exception {str(e)}")

        logger.info(msg)
        raise AdapterConnectionError(
            f"Posting metadata from generic rest adapter endpoint {url} failed."
        ) from e

    if resp.status_code != 200 and resp.status_code != 201:
        msg = (
            f"Posting metadata to generic rest adapter endpoint {url} failed."
            f" Status code: {resp.status_code}. Text: {resp.text}")
        logger.info(msg)
        raise AdapterConnectionError(msg)
    logger.debug("Received Response for metadata posting from url %s:\n%s",
                 url, str(resp.text))
Exemplo n.º 3
0
async def load_generic_adapter_base_urls() -> List[BackendRegisteredGenericRestAdapter]:
    """Loads generic REST adapter infos from the corresponding designer backend endpoint"""

    headers = get_generic_rest_adapter_auth_headers()

    url = posix_urljoin(runtime_config.hd_backend_api_url, "adapters/")
    logger.info("Start getting Generic REST Adapter URLS from HD Backend url %s", url)

    async with httpx.AsyncClient(
        verify=runtime_config.hd_backend_verify_certs
    ) as client:
        try:
            resp = await client.get(url, headers=headers)
        except httpx.HTTPError as e:
            msg = f"Failure connecting to hd backend adapters endpoint ({url}): " + str(
                e
            )
            logger.info(msg)
            raise AdapterConnectionError(msg) from e

    if resp.status_code != 200:
        msg = (
            f"HTTP failure trying to receive generic adapter infos from hd backend ({url}):"
            f" Status code {str(resp.status_code)}. Response: {resp.text}"
        )

        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        loaded_generic_rest_adapters: List[
            BackendRegisteredGenericRestAdapter
        ] = BackendRegisteredGenericRestAdapters.parse_obj(resp.json()).__root__
    except ValidationError as e:
        msg = "Failure trying to parse received generic adapter infos: " + str(e)

        logger.info(msg)
        raise AdapterHandlingException(msg) from e

    logger.info(
        "Finished getting Generic REST Adapter URLS from HD Backend url %s", url
    )

    return loaded_generic_rest_adapters
Exemplo n.º 4
0
async def load_framelike_data(
        filtered_sources: List[FilteredSource],
        additional_params:
    List[Tuple[
        str,
        str]],  # for timeseries: [("from", from_timestamp), ("to", to_timestamp)]
        adapter_key: str,
        endpoint: Literal["timeseries",
                          "dataframe"],  # "timeseries" or "dataframe"
) -> pd.DataFrame:
    """Load framelike data from REST endpoint"""

    url = posix_urljoin(await get_generic_rest_adapter_base_url(adapter_key),
                        endpoint)

    if len({fs.type for fs in filtered_sources}) > 1:
        raise AdapterHandlingException(
            "Got more than one datatype in same grouped data")

    if len(filtered_sources) == 0:
        raise AdapterHandlingException("Requested fetching 0 sources")

    common_data_type = filtered_sources[0].type

    if (common_data_type
            == ExternalType.DATAFRAME) and len(filtered_sources) > 1:
        raise AdapterHandlingException(
            "Cannot request more than one dataframe together")

    logger.info(
        ("Requesting framelike data from generic rest adapter %s from endpoint %s:"
         " ids %s with additional params %s with common datatype %s"),
        adapter_key,
        url,
        str([filtered_source.ref_id for filtered_source in filtered_sources]),
        str(additional_params),
        str(common_data_type),
    )

    headers = get_generic_rest_adapter_auth_headers()

    with requests.Session() as session:
        try:
            start_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                "Start receiving generic rest adapter %s framelike data at %s",
                adapter_key,
                start_time.isoformat(),
            )
            resp = session.get(
                url,
                params=[("id", quote(str(filtered_source.ref_id)))
                        for filtered_source in filtered_sources] +
                additional_params,
                stream=True,
                headers=headers,
                verify=runtime_config.hd_adapters_verify_certs,
            )
            if (resp.status_code == 404 and "errorCode" in resp.text
                    and resp.json()["errorCode"] == "RESULT_EMPTY"):
                logger.info(
                    ("Received RESULT_EMPTY error_code from generic rest adapter %s"
                     " framelike endpoint %s, therefore returning empty DataFrame"
                     ),
                    adapter_key,
                    url,
                )
                if endpoint == "timeseries":
                    return create_empty_ts_df(ExternalType(common_data_type))
                # must be "dataframe":
                return df_empty({})

            if resp.status_code != 200:
                msg = (
                    f"Requesting framelike data from generic rest adapter endpoint {url} failed."
                    f" Status code: {resp.status_code}. Text: {resp.text}")
                logger.info(msg)
                raise AdapterConnectionError(msg)
            logger.info("Start reading in and parsing framelike data")

            df = pd.read_json(resp.raw, lines=True)
            end_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                ("Finished receiving generic rest framelike data (including dataframe parsing)"
                 " at %s. DataFrame shape is %s with columns %s"),
                end_time.isoformat(),
                str(df.shape),
                str(df.columns),
            )
            logger.info(
                ("Receiving generic rest adapter framelike data took"
                 " (including dataframe parsing)"
                 " %s"),
                str(end_time - start_time),
            )
            logger.debug(
                "Received dataframe of form %s:\n%s",
                str(df.shape) if len(df) > 0 else "EMPTY RESULT",
                str(df) if len(df) > 0 else "EMPTY RESULT",
            )
        except requests.HTTPError as e:
            msg = (
                f"Requesting framelike data from generic rest adapter endpoint {url}"
                f" failed with Exception {str(e)}")

            logger.info(msg)
            raise AdapterConnectionError(
                f"Requesting framelike from generic rest adapter endpoint {url} failed."
            ) from e
    logger.info("Complete generic rest adapter %s framelike request",
                adapter_key)
    if len(df) == 0:
        if endpoint == "timeseries":
            return create_empty_ts_df(ExternalType(common_data_type))
        # must be dataframe:
        return df_empty({})

    if "timestamp" in df.columns and endpoint == "dataframe":
        try:
            parsed_timestamps = pd.to_datetime(df["timestamp"])
        except ValueError as e:
            logger.info(
                "Column 'timestamp' of dataframe from %s could not be parsed and therefore"
                " not be set to index. Proceeding with default index. Error was: %s",
                url,
                str(e),
            )
        else:
            df.index = parsed_timestamps
            df = df.sort_index()

    return df
Exemplo n.º 5
0
async def load_single_metadatum_from_adapter(filtered_source: FilteredSource,
                                             adapter_key: str,
                                             client: httpx.AsyncClient) -> Any:

    if filtered_source.ref_id_type == RefIdType.SOURCE:
        endpoint = "sources"
    elif filtered_source.ref_id_type == RefIdType.SINK:
        endpoint = "sinks"
    else:
        endpoint = "thingNodes"

    url = posix_urljoin(
        await get_generic_rest_adapter_base_url(adapter_key),
        endpoint,
        urllib.parse.quote(str(filtered_source.ref_id)),
        "metadata",
        urllib.parse.quote(str(filtered_source.ref_key)),
    )
    try:
        resp = await client.get(url)
    except httpx.HTTPError as e:
        msg = (
            f"Requesting metadata data from generic rest adapter endpoint {url}"
            f" failed with Exception {str(e)}")

        logger.info(msg)
        raise AdapterConnectionError(
            f"Requesting metadata from generic rest adapter endpoint {url} failed."
        ) from e

    if resp.status_code != 200:
        msg = (
            f"Requesting metadata data from generic rest adapter endpoint {url} failed."
            f" Status code: {resp.status_code}. Text: {resp.text}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        metadatum = Metadatum.parse_obj(resp.json())
    except ValidationError as e:
        msg = (
            f"Validation failure trying to parse received metadata from adapter"
            f"url {url}: {str(resp.json())}\nError is: " + str(e))

        logger.info(msg)
        raise AdapterHandlingException(msg) from e

    logger.debug("Received metadata json from url %s:\n%s", url,
                 str(resp.json()))

    if metadatum.key != str(filtered_source.ref_key):
        msg = (f"received metadata has wrong key "
               f"(not the requested one {str(filtered_source.ref_key)})"
               f". Received metdatum is {str(metadatum)}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    value_datatype = ExternalType(filtered_source.type).value_datatype
    assert value_datatype is not None  # for mypy

    if metadatum.dataType is not None and metadatum.dataType != value_datatype:
        msg = (f"received metadata has wrong value dataType "
               f"(not the requested one inside {str(filtered_source.type)})"
               f". Received metdatum is {str(metadatum)}")
        logger.info(msg)
        raise AdapterConnectionError(msg)

    try:
        parsed_value = value_datatype.parse_object(metadatum.value)
    except ValidationError as e:
        msg = (
            f"Validation failure trying to parse received metadata from adapter"
            f"url {url}: {str(metadatum)}\nError is: " + str(e))

        logger.info(msg)
        raise AdapterHandlingException(msg) from e
    return parsed_value  # type: ignore
Exemplo n.º 6
0
async def load_generic_adapter_base_urls() -> List[BackendRegisteredGenericRestAdapter]:
    """Loads generic REST adapter infos from the corresponding designer backend endpoint"""

    headers = get_generic_rest_adapter_auth_headers()

    url = posix_urljoin(get_config().hd_backend_api_url, "adapters/")
    logger.info("Start getting Generic REST Adapter URLS from HD Backend url %s", url)

    if get_config().is_backend_service:
        # call function directly
        adapter_list = await get_all_adapters()

        try:
            loaded_generic_rest_adapters: List[BackendRegisteredGenericRestAdapter] = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in adapter_list
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e
    else:
        # call backend service "adapters" endpoint
        async with httpx.AsyncClient(
            verify=get_config().hd_backend_verify_certs
        ) as client:
            try:
                resp = await client.get(url, headers=headers)
            except httpx.HTTPError as e:
                msg = (
                    f"Failure connecting to hd backend adapters endpoint ({url}): "
                    + str(e)
                )
                logger.info(msg)
                raise AdapterConnectionError(msg) from e

        if resp.status_code != 200:
            msg = (
                f"HTTP failure trying to receive generic adapter infos from hd backend ({url}):"
                f" Status code {str(resp.status_code)}. Response: {resp.text}"
            )

            logger.info(msg)
            raise AdapterConnectionError(msg)

        try:
            loaded_generic_rest_adapters = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in AdapterFrontendDtoRegisteredGenericRestAdapters.parse_obj(
                    resp.json()
                ).__root__
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e

        logger.info(
            "Finished getting Generic REST Adapter URLS from HD Backend url %s", url
        )

    return loaded_generic_rest_adapters