コード例 #1
0
ファイル: main.py プロジェクト: hetida/hetida-designer
def run_migrations(
    alembic_dir: str = "./alembic",
    connection_url=get_config().sqlalchemy_connection_string,
) -> None:
    """Runs alembic migrations from within Python code

    Should only be used for local development server. Not recommended
    for multi-process/thread production servers.

    Note: The docker container runs migrations via prestart.sh script in the
    production setup.
    """

    from hetdesrun import migrations_invoked_from_py

    migrations_invoked_from_py = True

    from pydantic import SecretStr

    import hetdesrun.persistence.dbmodels
    from alembic import command
    from alembic.config import Config
    from hetdesrun.persistence import get_db_engine

    engine = get_db_engine()

    logger.info("Using DB engine driver: %s", str(engine.url.drivername))

    if isinstance(connection_url, SecretStr):
        connection_url_to_use = connection_url.get_secret_value()
    else:
        connection_url_to_use = connection_url

    logger.info("Running DB migrations in %s", alembic_dir)
    alembic_cfg = Config()
    alembic_cfg.set_main_option("script_location", alembic_dir)
    # alembic_cfg.set_main_option("sqlalchemy.url", connection_url_to_use)
    # alembic_cfg.set_section_option("logger_root", "level", "DEBUG")
    # alembic_cfg.set_section_option("logger_alembic", "level", "DEBUG")
    # alembic_cfg.set_section_option("logger_sqlalchemy", "level", "DEBUG")
    command.upgrade(alembic_cfg, "head")
    logger.info("Finished running migrations.")
コード例 #2
0
def get_auth_headers() -> Dict[str, str]:
    """Forward access token when making requests to runtime or adapters"""
    auth_ctx_dict = get_request_auth_context()
    try:
        token = auth_ctx_dict["token"]
    except KeyError:
        possible_fixed_token = get_config(
        ).auth_bearer_token_for_external_requests
        if possible_fixed_token is not None:
            logger.debug((
                "No stored auth token, but explicit token for external requests is present."
                " Using the explicitely configured token for external requests with schema"
                " Bearer."))
            return {"Authorization": "Bearer " + possible_fixed_token}
        logger.debug("No stored auth token. Not setting auth header")
        return {}
    logger.debug(
        "Found stored auth token. Setting Authorization header with schema Bearer"
    )
    return {"Authorization": "Bearer " + token}
コード例 #3
0
def configure_logging(the_logger: logging.Logger,
                      log_execution_context: bool = False) -> None:
    """Configure logging

    Arguments:
        the_logger {Python logger} -- any logger

    Keyword Arguments:
        log_execution_context {bool} -- whether runtime execution context should
            be made available and logged (default: {False})

    If log_execution_context is True a LoggingFilter will be attached to the
    LogHandler. Attaching to the handler (instead of the logger) guarantees that
    the filter will be applied even when descendant loggers are used which don't have
    handlers on their own (confer https://docs.python.org/3/_images/logging_flow.png)

    This filter actually does no filtering but augments the LogRecords with
    execution context information (id of component instance and component uuid).
    A filter is used here for context provision because it can be attached to
    a handler (in contrast to a LoggingAdapter). Attaching the filter to custom
    LoggingHandlers allows to send this information to external services.

    Additionally the formatter is set up to log this context information.
    """
    the_logger.setLevel(get_config().log_level.value)
    logging_handler = logging.StreamHandler()  # use sys.stderr by default
    # sys.stderr will be propagated by mod_wsgi to Apache error log for webservice
    if log_execution_context:
        logging_handler.addFilter(execution_context_filter)
    formatter = logging.Formatter(
        "%(asctime)s %(process)d %(levelname)s: %(message)s "
        "[in %(pathname)s:%(lineno)d" +
        ((", component instance: %(currently_executed_instance_id)s"
          ", component id: %(currently_executed_component_id)s"
          ", component node name: %(currently_executed_component_node_name)s"
          ", job id: %(job_id)s"
          "]") if log_execution_context else "]"))
    logging_handler.setFormatter(formatter)
    the_logger.addHandler(logging_handler)
コード例 #4
0
ファイル: execution.py プロジェクト: hetida/hetida-designer
async def run_execution_input(
    execution_input: WorkflowExecutionInput, ) -> ExecutionResponseFrontendDto:
    """Runs the provided execution input

    Depending on configuration this either calls a function or queries the
    external runtime service endpoint (if this instance is not considered to
    act as runtime service).

    Raises subtypes of TrafoExecutionError on errors.
    """
    output_types = {
        output.name: output.type
        for output in execution_input.workflow.outputs
    }

    execution_result: WorkflowExecutionResult

    if get_config().is_runtime_service:
        execution_result = await runtime_service(execution_input)
    else:
        headers = get_auth_headers()

        async with httpx.AsyncClient(
                verify=get_config().hd_runtime_verify_certs) as client:
            url = posix_urljoin(get_config().hd_runtime_engine_url, "runtime")
            try:
                response = await client.post(
                    url,
                    headers=headers,
                    json=json.loads(execution_input.json()
                                    ),  # TODO: avoid double serialization.
                    # see https://github.com/samuelcolvin/pydantic/issues/1409 and
                    # https://github.com/samuelcolvin/pydantic/issues/1409#issuecomment-877175194
                    timeout=None,
                )
            except httpx.HTTPError as e:
                # handles both request errors (connection problems)
                # and 4xx and 5xx errors. See https://www.python-httpx.org/exceptions/
                msg = f"Failure connecting to hd runtime endpoint ({url}):\n{str(e)}"
                logger.info(msg)
                raise TrafoExecutionRuntimeConnectionError(msg) from e
            try:
                json_obj = response.json()
                execution_result = WorkflowExecutionResult(**json_obj)
            except ValidationError as e:
                msg = (
                    f"Could not validate hd runtime result object. Exception:\n{str(e)}"
                    f"\nJson Object is:\n{str(json_obj)}")
                logger.info(msg)
                raise TrafoExecutionResultValidationError(msg) from e

    execution_response = ExecutionResponseFrontendDto(
        error=execution_result.error,
        output_results_by_output_name=execution_result.
        output_results_by_output_name,
        output_types_by_output_name=output_types,
        result=execution_result.result,
        traceback=execution_result.traceback,
        job_id=execution_input.job_id,
    )

    return execution_response
コード例 #5
0
def get_auth_deps() -> List[Any]:
    """Return the authentication dependencies based on the application settings."""
    return [Depends(has_access)] if get_config().auth else []
コード例 #6
0
                "No stored auth token, but explicit token for external requests is present."
                " Using the explicitely configured token for external requests with schema"
                " Bearer."))
            return {"Authorization": "Bearer " + possible_fixed_token}
        logger.debug("No stored auth token. Not setting auth header")
        return {}
    logger.debug(
        "Found stored auth token. Setting Authorization header with schema Bearer"
    )
    return {"Authorization": "Bearer " + token}


security = HTTPBearer()

bearer_verifier = BearerVerifier.from_verifier_options(
    auth_url=get_config().auth_public_key_url or "", )


async def has_access(credentials: HTTPBasicCredentials = Depends(
    security)) -> None:
    """Validate access"""

    if credentials is None:
        logger.info("Unauthorized: Could not obtain credentials from request")

        raise HTTPException(
            status_code=HTTP_403_FORBIDDEN,
            detail="Could not obtain credentials from request",
        )

    if not credentials.scheme == "Bearer":  # type: ignore
コード例 #7
0
import logging
from typing import List

from fastapi import HTTPException, status

from hetdesrun.backend.models.adapter import AdapterFrontendDto
from hetdesrun.webservice.config import get_config
from hetdesrun.webservice.router import HandleTrailingSlashAPIRouter

logger = logging.getLogger(__name__)

adapters = get_config().hd_adapters


adapter_router = HandleTrailingSlashAPIRouter(
    prefix="/adapters",
    tags=["adapters"],
    responses={  # are these only used for display in the Swagger UI?
        status.HTTP_401_UNAUTHORIZED: {"description": "Unauthorized"},
        status.HTTP_403_FORBIDDEN: {"description": "Forbidden"},
        status.HTTP_404_NOT_FOUND: {"description": "Not Found"},
        status.HTTP_500_INTERNAL_SERVER_ERROR: {"description": "Internal server error"},
    },
)


@adapter_router.get(
    "",
    response_model=List[AdapterFrontendDto],
    summary="Returns all adapters",
    status_code=status.HTTP_200_OK,
コード例 #8
0
ファイル: export.py プロジェクト: hetida/hetida-designer
def get_transformation_from_java_backend(id: UUID, type: Type) -> Any:
    """
    Loads a single transformation revision together with its documentation based on its id
    """

    headers = get_auth_headers()

    if type == Type.COMPONENT:
        url = posix_urljoin(get_config().hd_backend_api_url, "components",
                            str(id))
    else:
        url = posix_urljoin(get_config().hd_backend_api_url, "workflows",
                            str(id))

    # Get transformation revision from old backend
    response = requests.get(
        url,
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )
    logger.info(
        "GET %s status code: %i for %s with id %ss",
        type,
        response.status_code,
        type,
        str(id),
    )
    if response.status_code != 200:
        msg = (f"COULD NOT GET {type} with id {id}.\n"
               f"Response status code {response.status_code} "
               f"with response text:\n{response.json()['detail']}")
        logger.error(msg)
        raise Exception(msg)

    revision_json = response.json()

    # Get documentation from old backend
    doc_response = requests.get(
        posix_urljoin(get_config().hd_backend_api_url, "documentations",
                      str(id)),
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )
    logger.info(
        "GET documentation status code: %i for %s with id %s",
        response.status_code,
        type,
        str(id),
    )
    if response.status_code != 200:
        msg = (f"COULD NOT GET documentation with id {id}.\n"
               f"Response status code {response.status_code} "
               f"with response text:\n{response.json()['detail']}")
        logger.error(msg)
        raise Exception(msg)

    doc_text = doc_response.json().get("document", "")

    frontend_dto: Union[ComponentRevisionFrontendDto,
                        WorkflowRevisionFrontendDto]

    # Generate transformation revision
    if type == Type.COMPONENT:
        revision_json["type"] = Type.COMPONENT
        frontend_dto = ComponentRevisionFrontendDto(**revision_json, )
    else:
        frontend_dto = WorkflowRevisionFrontendDto(**revision_json, )

    transformation_revision = frontend_dto.to_transformation_revision(
        documentation=doc_text)

    tr_json = json.loads(transformation_revision.json())

    return tr_json
コード例 #9
0
ファイル: export.py プロジェクト: hetida/hetida-designer
def export_transformations(
    download_path: str,
    type: Optional[Type] = None,
    ids: Optional[List[UUID]] = None,
    names: Optional[List[str]] = None,
    category: Optional[str] = None,
    include_deprecated: bool = True,
    java_backend: bool = False,
) -> None:
    """
    Exports all transformations, together with their documentation, and saves them as json files
    in subdirectories of the provided path corresponding to the respective category,
    based on the provide criteria. If more than one criterion is provided,
    only transformations which fulfilll all criteria will be exported.

    WARNING: Overwrites existing files with the same name!

    Args:
        download_path (str): The directory on the local system, where we save the transformations.
        type (Type): One of the two types of the enum Type: WORKFLOW or COMPONENT
        ids (List[UUID]): The ids of the transformations.
        names (List[str]): The names of the transformations.
        include_deprecated (Optional[bool]): If set to True, disabled transformations are exported.

    Usage examples:
        export_transformations("/mnt/obj_repo/migration_data")
        export_transformations(
            "/mnt/obj_repo/migration_data",
            ids=["d71a0cef-1d56-818f-a1a5-dd6bb6d50399","806df1b9-2fc8-4463-943f-3d258c569663"]
        )
        export_transformations("/mnt/obj_repo/migration_data/components", type="COMPONENT")
        export_transformations(
            "/mnt/obj_repo/migration_data/components",
            type="COMPONENT",
            names=["Divide", "Add"]
        )
        export_transformations(
            "/mnt/obj_repo/migration_data/workflows",
            type="WORKFLOW",
            category="Examples"
        )

    """
    import hetdesrun.models.wiring  # pylint: disable=import-outside-toplevel

    hetdesrun.models.wiring.EXPORT_MODE = True

    import hetdesrun.backend.models.wiring  # pylint: disable=import-outside-toplevel

    hetdesrun.backend.models.wiring.EXPORT_MODE = True

    headers = get_auth_headers()

    endpoint = "transformations" if not java_backend else "base-items"

    url = posix_urljoin(get_config().hd_backend_api_url, endpoint)
    response = requests.get(
        url,
        verify=get_config().hd_backend_verify_certs,
        auth=get_backend_basic_auth()  # type: ignore
        if get_config().hd_backend_use_basic_auth else None,
        headers=headers,
    )

    if response.status_code != 200:
        msg = (f"No transformation revision found at url {url}.\n"
               f" Status code was {str(response.status_code)}.\n"
               f" Response was: {str(response.text)}")
        raise Exception(msg)

    id_list = []
    type_dict: Dict[UUID, Type] = {}
    transformation_dict: Dict[UUID, dict] = {}

    for transformation in response.json():
        transformation_id = transformation["id"].lower()
        transformation_type = transformation["type"]
        transformation_name = transformation["name"]
        transformation_category = transformation["category"]
        logger.info(
            "found transformation %s of type %s\nwith name %s in category %s",
            transformation_id,
            transformation_type,
            transformation_name,
            transformation_category,
        )

        if java_backend:
            transformation_dict[
                transformation_id] = get_transformation_from_java_backend(
                    transformation_id, transformation_type)
        else:
            transformation_dict[transformation_id] = transformation

        if (criterion_unset_or_matches_value(type, transformation_type)
                and selection_list_empty_or_contains_value(
                    ids, transformation_id)
                and selection_list_empty_or_contains_value(
                    names, transformation_name)
                and criterion_unset_or_matches_value(category,
                                                     transformation_category)):
            if include_deprecated or transformation["state"] != State.DISABLED:
                logger.info("transformation %s will be exported",
                            transformation_id)
                id_list.append(transformation_id)
                type_dict[transformation_id] = transformation_type

    # Export individual transformation
    for transformation_id in id_list:
        save_transformation(transformation_dict[transformation_id],
                            download_path)
コード例 #10
0
async def test_execute_for_nested_workflow(async_test_client,
                                           clean_test_db_engine):
    patched_session = sessionmaker(clean_test_db_engine)
    with mock.patch(
            "hetdesrun.persistence.dbservice.nesting.Session",
            patched_session,
    ):
        with mock.patch(
                "hetdesrun.persistence.dbservice.revision.Session",
                patched_session,
        ):
            async with async_test_client as ac:

                json_files = [
                    "./transformations/components/connectors/pass-through-integer_100_57eea09f-d28e-89af-4e81-2027697a3f0f.json",
                    "./transformations/components/connectors/pass-through-series_100_bfa27afc-dea8-b8aa-4b15-94402f0739b6.json",
                    "./transformations/components/connectors/pass-through-string_100_2b1b474f-ddf5-1f4d-fec4-17ef9122112b.json",
                    "./transformations/components/remaining-useful-life/univariate-linear-rul-regression_100_8d61a267-3a71-51cd-2817-48c320469d6b.json",
                    "./transformations/components/visualization/univariate-linear-rul-regression-result-plot_100_9c3f88ce-1311-241e-18b7-acf7d3f5a051.json",
                    "./transformations/components/arithmetic/consecutive-differences_100_ce801dcb-8ce1-14ad-029d-a14796dcac92.json",
                    "./transformations/components/basic/filter_100_18260aab-bdd6-af5c-cac1-7bafde85188f.json",
                    "./transformations/components/basic/greater-or-equal_100_f759e4c0-1468-0f2e-9740-41302b860193.json",
                    "./transformations/components/basic/last-datetime-index_100_c8e3bc64-b214-6486-31db-92a8888d8991.json",
                    "./transformations/components/basic/restrict-to-time-interval_100_bf469c0a-d17c-ca6f-59ac-9838b2ff67ac.json",
                    "./transformations/components/connectors/pass-through-float_100_2f511674-f766-748d-2de3-ad5e62e10a1a.json",
                    "./transformations/components/visualization/single-timeseries-plot_100_8fba9b51-a0f1-6c6c-a6d4-e224103b819c.json",
                    "./transformations/workflows/examples/data-from-last-positive-step_100_2cbb87e7-ea99-4404-abe1-be550f22763f.json",
                    "./transformations/workflows/examples/univariate-linear-rul-regression-example_100_806df1b9-2fc8-4463-943f-3d258c569663.json",
                    "./transformations/workflows/examples/linear-rul-from-last-positive-step_100_3d504361-e351-4d52-8734-391aa47e8f24.json",
                ]

                for file in json_files:
                    tr_json = load_json(file)

                    response = await ac.put(
                        posix_urljoin(
                            get_config().hd_backend_api_url,
                            "transformations",
                            tr_json["id"],
                        ) + "?allow_overwrite_released=True",
                        json=tr_json,
                    )

                component_id = UUID("57eea09f-d28e-89af-4e81-2027697a3f0f")
                updated_component = read_single_transformation_revision(
                    component_id)
                updated_component.deprecate()

                response = await ac.put(
                    "/api/transformations/" + str(component_id),
                    json=json.loads(updated_component.json(by_alias=True)),
                )

                # linear rul from last positive step
                workflow_id = UUID("3d504361-e351-4d52-8734-391aa47e8f24")
                tr_workflow = read_single_transformation_revision(workflow_id)

                exec_by_id_input = ExecByIdInput(
                    id=workflow_id, wiring=tr_workflow.test_wiring)

                response = await ac.post(
                    "/api/transformations/execute",
                    json=json.loads(exec_by_id_input.json()),
                )

                assert response.status_code == 200
                assert "output_types_by_output_name" in response.json()
                assert response.json()["result"] == "ok"
                assert (abs(response.json()["output_results_by_output_name"]
                            ["intercept"] - 2.88) < 0.01)
                assert (response.json()["output_results_by_output_name"]
                        ["before_step_detect"] == {})
                assert (response.json()["output_results_by_output_name"]
                        ["rul_regression_result_plot"] == {})
コード例 #11
0
def get_object_path(name: str, tag: str) -> str:
    return os.path.join(get_config().model_repo_path, name + "_" + tag)
コード例 #12
0
ファイル: main.py プロジェクト: hetida/hetida-designer
    # alembic_cfg.set_section_option("logger_alembic", "level", "DEBUG")
    # alembic_cfg.set_section_option("logger_sqlalchemy", "level", "DEBUG")
    command.upgrade(alembic_cfg, "head")
    logger.info("Finished running migrations.")


def run_trafo_rev_deployment():
    from hetdesrun.exportimport.importing import import_transformations

    import_transformations("./transformations",
                           update_component_code=False,
                           directly_into_db=True)


in_memory_db = detect_in_memory_db()
is_backend = get_config().is_backend_service

if in_memory_db:
    logger.info(
        "Detected in-memory db usage: Running migrations during importing of main.py."
    )
    run_migrations()

    if is_backend:
        logger.info("Detected in-memory db usage: "
                    "Running base component and example workflow deployment "
                    "during importing of main.py.")
        run_trafo_rev_deployment()

if __name__ == "__main__":
コード例 #13
0
ファイル: baseurl.py プロジェクト: hetida/hetida-designer
async def load_generic_adapter_base_urls() -> List[BackendRegisteredGenericRestAdapter]:
    """Loads generic REST adapter infos from the corresponding designer backend endpoint"""

    headers = get_generic_rest_adapter_auth_headers()

    url = posix_urljoin(get_config().hd_backend_api_url, "adapters/")
    logger.info("Start getting Generic REST Adapter URLS from HD Backend url %s", url)

    if get_config().is_backend_service:
        # call function directly
        adapter_list = await get_all_adapters()

        try:
            loaded_generic_rest_adapters: List[BackendRegisteredGenericRestAdapter] = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in adapter_list
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e
    else:
        # call backend service "adapters" endpoint
        async with httpx.AsyncClient(
            verify=get_config().hd_backend_verify_certs
        ) as client:
            try:
                resp = await client.get(url, headers=headers)
            except httpx.HTTPError as e:
                msg = (
                    f"Failure connecting to hd backend adapters endpoint ({url}): "
                    + str(e)
                )
                logger.info(msg)
                raise AdapterConnectionError(msg) from e

        if resp.status_code != 200:
            msg = (
                f"HTTP failure trying to receive generic adapter infos from hd backend ({url}):"
                f" Status code {str(resp.status_code)}. Response: {resp.text}"
            )

            logger.info(msg)
            raise AdapterConnectionError(msg)

        try:
            loaded_generic_rest_adapters = [
                BackendRegisteredGenericRestAdapter(
                    id=adapter_dto.id,
                    name=adapter_dto.name,
                    url=adapter_dto.url,
                    internalUrl=adapter_dto.internal_url,
                )
                for adapter_dto in AdapterFrontendDtoRegisteredGenericRestAdapters.parse_obj(
                    resp.json()
                ).__root__
            ]
        except ValidationError as e:
            msg = "Failure trying to parse received generic adapter infos: " + str(e)

            logger.info(msg)
            raise AdapterHandlingException(msg) from e

        logger.info(
            "Finished getting Generic REST Adapter URLS from HD Backend url %s", url
        )

    return loaded_generic_rest_adapters
コード例 #14
0
async def load_framelike_data(
        filtered_sources: List[FilteredSource],
        additional_params:
    List[Tuple[
        str,
        str]],  # for timeseries: [("from", from_timestamp), ("to", to_timestamp)]
        adapter_key: str,
        endpoint: Literal["timeseries",
                          "dataframe"],  # "timeseries" or "dataframe"
) -> pd.DataFrame:
    """Load framelike data from REST endpoint"""

    url = posix_urljoin(await get_generic_rest_adapter_base_url(adapter_key),
                        endpoint)

    valid, msg = are_valid_sources(filtered_sources)
    if not valid:
        logger.error(msg)
        raise AdapterHandlingException(msg)

    common_data_type = filtered_sources[0].type

    logger.info(
        ("Requesting framelike data from generic rest adapter %s from endpoint %s:"
         " ids %s with additional params %s with common datatype %s"),
        adapter_key,
        url,
        str([filtered_source.ref_id for filtered_source in filtered_sources]),
        str(additional_params),
        str(common_data_type),
    )

    headers = get_generic_rest_adapter_auth_headers()

    with requests.Session() as session:
        try:
            start_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                "Start receiving generic rest adapter %s framelike data at %s",
                adapter_key,
                start_time.isoformat(),
            )
            resp = session.get(
                url,
                params=[("id", (str(filtered_source.ref_id)))
                        for filtered_source in filtered_sources] +
                additional_params,
                stream=True,
                headers=headers,
                verify=get_config().hd_adapters_verify_certs,
            )
            if (resp.status_code == 404 and "errorCode" in resp.text
                    and resp.json()["errorCode"] == "RESULT_EMPTY"):
                logger.info(
                    ("Received RESULT_EMPTY error_code from generic rest adapter %s"
                     " framelike endpoint %s, therefore returning empty DataFrame"
                     ),
                    adapter_key,
                    url,
                )
                if endpoint == "timeseries":
                    return create_empty_ts_df(ExternalType(common_data_type))
                # must be "dataframe":
                return df_empty({})

            if resp.status_code != 200:
                msg = (
                    f"Requesting framelike data from generic rest adapter endpoint {url} failed."
                    f" Status code: {resp.status_code}. Text: {resp.text}")
                logger.info(msg)
                raise AdapterConnectionError(msg)
            logger.info("Start reading in and parsing framelike data")

            df = pd.read_json(resp.raw, lines=True)
            end_time = datetime.datetime.now(datetime.timezone.utc)
            logger.info(
                ("Finished receiving generic rest framelike data (including dataframe parsing)"
                 " at %s. DataFrame shape is %s with columns %s"),
                end_time.isoformat(),
                str(df.shape),
                str(df.columns),
            )
            logger.info(
                ("Receiving generic rest adapter framelike data took"
                 " (including dataframe parsing)"
                 " %s"),
                str(end_time - start_time),
            )

            if "Data-Attributes" in resp.headers:
                logger.debug("Got Data-Attributes via GET response header")
                data_attributes = resp.headers["Data-Attributes"]
                df.attrs = decode_attributes(data_attributes)

            logger.debug(
                "Received dataframe of form %s:\n%s",
                str(df.shape) if len(df) > 0 else "EMPTY RESULT",
                str(df) if len(df) > 0 else "EMPTY RESULT",
            )
        except requests.HTTPError as e:
            msg = (
                f"Requesting framelike data from generic rest adapter endpoint {url}"
                f" failed with Exception {str(e)}")

            logger.info(msg)
            raise AdapterConnectionError(
                f"Requesting framelike from generic rest adapter endpoint {url} failed."
            ) from e
    logger.info("Complete generic rest adapter %s framelike request",
                adapter_key)
    if len(df) == 0:
        if endpoint == "timeseries":
            return create_empty_ts_df(ExternalType(common_data_type),
                                      attrs=df.attrs)
        # must be dataframe:
        return df_empty({}, attrs=df.attrs)

    if "timestamp" in df.columns and endpoint == "dataframe":
        try:
            parsed_timestamps = pd.to_datetime(df["timestamp"])
        except ValueError as e:
            logger.info(
                "Column 'timestamp' of dataframe from %s could not be parsed and therefore"
                " not be set to index. Proceeding with default index. Error was: %s",
                url,
                str(e),
            )
        else:
            df.index = parsed_timestamps
            df = df.sort_index()

    return df
コード例 #15
0
ファイル: utils.py プロジェクト: hetida/hetida-designer
def get_backend_basic_auth() -> Tuple[Optional[str], Optional[str]]:
    return (
        get_config().hd_backend_basic_auth_user,
        get_config().hd_backend_basic_auth_password,
    )