示例#1
0
def _init_endpoint_record(graph_server, model: V2ModelServer):
    logger.info("Initializing endpoint records")
    try:
        project, uri, tag, hash_key = parse_versioned_object_uri(
            graph_server.function_uri)

        if model.version:
            versioned_model_name = f"{model.name}:{model.version}"
        else:
            versioned_model_name = f"{model.name}:latest"

        model_endpoint = ModelEndpoint(
            metadata=ModelEndpointMetadata(project=project,
                                           labels=model.labels),
            spec=ModelEndpointSpec(
                function_uri=graph_server.function_uri,
                model=versioned_model_name,
                model_class=model.__class__.__name__,
                model_uri=model.model_path,
                stream_path=config.model_endpoint_monitoring.store_prefixes.
                default.format(project=project, kind="stream"),
                active=True,
            ),
            status=ModelEndpointStatus(),
        )

        db = mlrun.get_run_db()

        db.create_or_patch_model_endpoint(
            project=project,
            endpoint_id=model_endpoint.metadata.uid,
            model_endpoint=model_endpoint,
        )
    except Exception as e:
        logger.error("Failed to create endpoint record", exc=e)
示例#2
0
def _mock_random_endpoint(state: Optional[str] = None) -> ModelEndpoint:
    def random_labels():
        return {f"{choice(string.ascii_letters)}": randint(0, 100) for _ in range(1, 5)}

    return ModelEndpoint(
        metadata=ModelEndpointMetadata(project=TEST_PROJECT, labels=random_labels()),
        spec=ModelEndpointSpec(
            function_uri=f"test/function_{randint(0, 100)}:v{randint(0, 100)}",
            model=f"model_{randint(0, 100)}:v{randint(0, 100)}",
            model_class="classifier",
        ),
        status=ModelEndpointStatus(state=state),
    )
示例#3
0
def _init_endpoint_record(context, model_logger: Optional[_ModelLogPusher]):
    if model_logger is None or isinstance(model_logger.output_stream,
                                          _DummyStream):
        return

    try:
        project, uri, tag, hash_key = parse_versioned_object_uri(
            model_logger.function_uri)

        if model_logger.model.version:
            model = f"{model_logger.model.name}:{model_logger.model.version}"
        else:
            model = model_logger.model.name

        model_endpoint = ModelEndpoint(
            metadata=ModelEndpointMetadata(project=project,
                                           labels=model_logger.model.labels),
            spec=ModelEndpointSpec(
                function_uri=model_logger.function_uri,
                model=model,
                model_class=model_logger.model.__class__.__name__,
                model_uri=model_logger.model.model_path,
                stream_path=model_logger.stream_path,
                active=True,
            ),
            status=ModelEndpointStatus(),
        )

        db = mlrun.get_run_db()

        db.create_or_patch(
            project=project,
            endpoint_id=model_endpoint.metadata.uid,
            model_endpoint=model_endpoint,
        )
    except Exception as e:
        logger.error("Failed to create endpoint record", exc=e)
示例#4
0
    def get_endpoint(
        access_key: str,
        project: str,
        endpoint_id: str,
        metrics: Optional[List[str]] = None,
        start: str = "now-1h",
        end: str = "now",
        feature_analysis: bool = False,
    ) -> ModelEndpoint:
        """
        Returns a ModelEndpoint object with additional metrics and feature related data.

        :param access_key: V3IO access key for managing user permissions
        :param project: The name of the project
        :param endpoint_id: The id of the model endpoint
        :param metrics: A list of metrics to return for each endpoint, read more in 'TimeMetric'
        :param start: The start time of the metrics
        :param end: The end time of the metrics
        :param feature_analysis: When True, the base feature statistics and current feature statistics will be added to
        the output of the resulting object
        """

        logger.info(
            "Getting model endpoint record from kv",
            endpoint_id=endpoint_id,
        )

        client = get_v3io_client(endpoint=config.v3io_api)

        path = config.model_endpoint_monitoring.store_prefixes.default.format(
            project=project, kind=ENDPOINTS)
        _, container, path = parse_model_endpoint_store_prefix(path)

        endpoint = client.kv.get(
            container=container,
            table_path=path,
            key=endpoint_id,
            access_key=access_key,
            raise_for_status=RaiseForStatus.never,
        )
        endpoint = endpoint.output.item

        if not endpoint:
            raise MLRunNotFoundError(f"Endpoint {endpoint_id} not found")

        labels = endpoint.get("labels")

        feature_names = endpoint.get("feature_names")
        feature_names = _json_loads_if_not_none(feature_names)

        label_names = endpoint.get("label_names")
        label_names = _json_loads_if_not_none(label_names)

        feature_stats = endpoint.get("feature_stats")
        feature_stats = _json_loads_if_not_none(feature_stats)

        current_stats = endpoint.get("current_stats")
        current_stats = _json_loads_if_not_none(current_stats)

        drift_measures = endpoint.get("drift_measures")
        drift_measures = _json_loads_if_not_none(drift_measures)

        monitor_configuration = endpoint.get("monitor_configuration")
        monitor_configuration = _json_loads_if_not_none(monitor_configuration)

        endpoint = ModelEndpoint(
            metadata=ModelEndpointMetadata(
                project=endpoint.get("project"),
                labels=_json_loads_if_not_none(labels),
                uid=endpoint_id,
            ),
            spec=ModelEndpointSpec(
                function_uri=endpoint.get("function_uri"),
                model=endpoint.get("model"),
                model_class=endpoint.get("model_class") or None,
                model_uri=endpoint.get("model_uri") or None,
                feature_names=feature_names or None,
                label_names=label_names or None,
                stream_path=endpoint.get("stream_path") or None,
                algorithm=endpoint.get("algorithm") or None,
                monitor_configuration=monitor_configuration or None,
                active=endpoint.get("active") or None,
            ),
            status=ModelEndpointStatus(
                state=endpoint.get("state") or None,
                feature_stats=feature_stats or None,
                current_stats=current_stats or None,
                first_request=endpoint.get("first_request") or None,
                last_request=endpoint.get("last_request") or None,
                accuracy=endpoint.get("accuracy") or None,
                error_count=endpoint.get("error_count") or None,
                drift_status=endpoint.get("drift_status") or None,
            ),
        )

        if feature_analysis and feature_names:
            endpoint_features = get_endpoint_features(
                feature_names=feature_names,
                feature_stats=feature_stats,
                current_stats=current_stats,
            )
            if endpoint_features:
                endpoint.status.features = endpoint_features
                endpoint.status.drift_measures = drift_measures

        if metrics:
            endpoint_metrics = get_endpoint_metrics(
                access_key=access_key,
                project=project,
                endpoint_id=endpoint_id,
                start=start,
                end=end,
                metrics=metrics,
            )
            if endpoint_metrics:
                endpoint.status.metrics = endpoint_metrics

        return endpoint