def app_builder(self, conn: Cluster,
                 metadata: Dict[str, str]) -> ApplicationBuilder:
     builder = ApplicationBuilder(self.name)
     if self.metadata:
         metadata.update(self.metadata)
     builder.with_metadatas(metadata)
     if self.singular:
         name, version = self.singular.model.split(":")
         mv = ModelVersion.find(conn, name, version)
         builder.with_stage(
             ExecutionStage(
                 signature=None,
                 model_variants=[
                     ModelVariant(modelVersionId=mv.id,
                                  weight=100,
                                  deploymentConfigurationName=self.singular.
                                  deployment_config)
                 ]))
     elif self.pipeline:
         for stage in self.pipeline:
             variants: List[ModelVariant] = []
             for model in stage:
                 name, version = model.model.split(":")
                 mv = ModelVersion.find(conn, name, version)
                 variants.append(
                     ModelVariant(modelVersionId=mv.id,
                                  weight=model.weight,
                                  deploymentConfigurationName=model.
                                  deployment_config))
             builder.with_stage(
                 ExecutionStage(signature=None, model_variants=variants))
     else:
         raise ValueError(
             "Invalid application: no 'singular' or 'pipeline' fields")
     return builder
Example #2
0
def push(obj, model_version, filename, s3path):
    model, version = model_version.split(":")
    mv = ModelVersion.find(obj, model, int(version))
    if filename and s3path:
        raise click.ClickException(
            "Both --filename and --s3path were provided. Need only one of them."
        )
    if filename:
        click.echo("Uploading local file")
        res = _upload_local_file(obj, mv.id, filename)
        if res.ok:
            click.echo("Data uploaded")
        else:
            raise click.ClickException(str(res))
    elif s3path:
        click.echo("Uploading S3 path")
        res = _upload_s3_file(obj, mv.id, s3path)
        if res.ok:
            click.echo("Data uploaded")
        else:
            raise click.ClickException(str(res))
    else:
        raise click.ClickException("Neither S3 nor file was defined.")
    click.echo(
        f"Data profile for {model_version} will be available: {obj.http_address}/models/{mv.name}/{mv.version}"
    )
Example #3
0
def update_application(model_name: str,
                       model_version: int,
                       hydrosphere_uri: str,
                       application_name: str = "kf_lr_app"):
    import time
    from hydrosdk import Cluster
    from hydrosdk.modelversion import ModelVersion
    from hydrosdk.application import Application, ApplicationBuilder, ExecutionStageBuilder

    cluster = Cluster(hydrosphere_uri)
    mv = ModelVersion.find(cluster, model_name, model_version)

    try:
        Application.delete(cluster, application_name)
        time.sleep(5)
    except Exception as e:
        print(
            f"Got error while trying to delete an application {application_name}"
        )
        print(e)

    print("Creating an Application object")
    app_builder = ApplicationBuilder(cluster, application_name)
    stage = ExecutionStageBuilder().with_model_variant(mv, 100).build()
    app_builder = app_builder.with_stage(stage)
    print(f"Uploading model to the cluster {hydrosphere_uri}")
    app = app_builder.build()
    app = app.lock_while_starting()
    print(app)
    return app
Example #4
0
def logs(obj, model_name):
    (name, version) = model_name.split(':')
    mv = ModelVersion.find(obj, name, version)
    logs = mv.build_logs()
    for l in logs:
        click.echo(l.data)
    click.echo("End of logs")
Example #5
0
def test_pipeline_apply(mock_app_build, mock_app_delete, mock_app_find,
                        mock_mv_find, singular_yaml_path):
    conn = Cluster("http://")
    mock_mv_find.return_value = ModelVersion(cluster=conn,
                                             id=1,
                                             model_id=1,
                                             name="claims-model",
                                             version=1,
                                             signature=ModelSignature(),
                                             status="Released",
                                             image=DockerImage(name="aaa",
                                                               tag="aaa"),
                                             runtime=DockerImage(name="aaa",
                                                                 tag="aaa"),
                                             is_external=False)
    mock_app_find.return_value = HS_APP(cluster=conn,
                                        id=1,
                                        name="app",
                                        execution_graph=ExecutionGraph([]),
                                        status="Ready",
                                        signature=ModelSignature(),
                                        kafka_streaming=[])
    mock_app_build.return_value = HS_APP(cluster=conn,
                                         id=1,
                                         name="app",
                                         execution_graph=ExecutionGraph([]),
                                         status="Ready",
                                         signature=ModelSignature(),
                                         kafka_streaming=[])
    app: Application = Application.parse_file(singular_yaml_path)
    app.apply(conn, os.path.dirname(singular_yaml_path))
Example #6
0
def test_model_analyze_without_grpc():
    cl = Cluster("asdasd:9091", check_connection=False)
    model = ModelVersion(cluster=cl,
                         id=1,
                         model_id=2,
                         name="test",
                         version=3,
                         signature=ModelSignature(),
                         status=None,
                         image="",
                         runtime="",
                         is_external=True)
    req_id = "asdsad"
    request = PredictRequest()
    error = "error"
    with pytest.raises(HydrosphereException):
        model.analyze(request_id=req_id, request=request, error=error)
    def apply(self, conn: Cluster, cwd):
        mv_builder = ModelVersionBuilder(name = self.name,path = cwd) \
            .with_runtime(DockerImage.from_string(self.runtime)) \
            .with_payload(self.payload) \
            .with_signature(self.contract.to_proto())

        if self.install_command:
            mv_builder.with_install_command(self.install_command)

        if self.training_data:
            mv_builder.with_training_data(self.training_data)

        collected_meta = CollectedMetadata.collect(cwd).to_metadata()
        if self.metadata:
            collected_meta.update(self.metadata)
        mv_builder.with_metadata(collected_meta)

        if self.monitoring_configuration:
            mc = SDK_MC(self.monitoring_configuration.batch_size)
            mv_builder.with_monitoring_configuration(mc)

        logging.debug(f"Model version builder:\n{mv_builder}")

        mv = mv_builder.build(conn)
        build_log_handler = DockerLogHandler()
        logging.info("Build logs:")
        for ev in mv.build_logs():
            build_log_handler.show(ev.data)

        if self.monitoring:
            logging.info(
                f"Uploading monitoring configuration for the model {mv.name}:{mv.version}"
            )
            for mon in self.monitoring:
                name, version = mon.config.monitoring_model.split(":")
                mon_mv = SDK_MV.find(conn, name, int(version))
                sdk_conf = MetricSpecConfig(modelversion_id=mon_mv.id,
                                            threshold=mon.config.threshold,
                                            threshold_op=mon.config.operator)
                sdk_ms = MetricSpec.create(cluster=conn,
                                           name=mon.name,
                                           modelversion_id=mv.id,
                                           config=sdk_conf)
                logging.debug(
                    f"Created metric spec: {sdk_ms.name} with id {sdk_ms.id}")

        if mv.training_data:
            logging.info("Uploading training data")
            resp = mv.upload_training_data()
            logging.info(f"Training data profile is available at {resp.url}")

        return mv
def test_create_high_level(cluster: Cluster,
                           model_version_builder: ModelVersionBuilder,
                           monitoring_mv: ModelVersion):
    root_mv: ModelVersion = model_version_builder.build(cluster)
    root_mv.lock_till_released(timeout=config.lock_timeout)

    metric = monitoring_mv.as_metric(10, ThresholdCmpOp.NOT_EQ)
    root_mv.assign_metrics([metric])
    try:
        assert monitoring_mv.name in [
            ms.name
            for ms in MetricSpec.find_by_modelversion(cluster, root_mv.id)
        ]
    finally:
        for ms in MetricSpec.find_by_modelversion(cluster, root_mv.id):
            MetricSpec.delete(cluster, ms.id)
Example #9
0
def list(obj):
    models = ModelVersion.list(obj)
    versions_view = []
    for m in models:
        versions_view.append({
            'id': m.id,
            'name': m.name,
            '#': m.version,
            'status': m.status,
            'runtime': str(m.runtime),
            'apps': m.applications
        })
    click.echo(
        tabulate(sorted(versions_view, key=lambda x: (x['name'], x['#'])),
                 headers="keys",
                 tablefmt="github"))
Example #10
0
def test_external_model_parse():
    import json
    json_str = """
    {
    "id": 138,
    "created": "2021-09-24T11:29:09.624Z",
    "finished": "2021-09-24T11:29:09.624Z",
    "modelVersion": 1,
    "modelSignature": {
        "signatureName": "predict",
        "inputs": [{
            "name": "in",
            "dtype": "DT_DOUBLE",
            "shape": {
                "dims": []
            },
            "profile": "NONE"
        }],
        "outputs": [{
            "name": "out",
            "dtype": "DT_DOUBLE",
            "shape": {
                "dims": []
            },
            "profile": "NONE"
        }]
    },
    "model": {
        "id": 18,
        "name": "external-model"
    },
    "status": "Released",
    "metadata": {},
    "applications": [],
    "image": null,
    "runtime": null,
    "monitoringConfiguration": {
        "batchSize": 100
    },
    "isExternal": true
    }"""
    json_dict = json.loads(json_str)
    cl = Cluster("asdasd:9091", "asdasd:9090", check_connection=False)

    mv = ModelVersion._from_json(cl, json_dict)
    print(mv)
    assert False
Example #11
0
 def __init__(self,
              cluster: Cluster,
              model_version_id: int,
              servable_name: str,
              status: ServableStatus,
              status_message: Optional[str],
              deployment_configuration_name: Optional[str],
              metadata: Optional[dict] = None) -> 'Servable':
     self.model_version_id = model_version_id
     self.name = servable_name
     self.meta = metadata or {}
     self.cluster = cluster
     self.status = status
     self.status_message = status_message
     self.deployment_configuration_name = deployment_configuration_name
     self.model_version = ModelVersion.find_by_id(cluster,
                                                  self.model_version_id)
Example #12
0
def test_list_models_by_model_name(cluster: Cluster, runtime: DockerImage,
                                   payload: list, signature: ModelSignature):
    def create_model_version_builder(name: str):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        model_path = os.path.join(current_dir, 'resources/identity_model/')
        return ModelVersionBuilder(name, model_path) \
            .with_runtime(runtime) \
            .with_payload(payload) \
            .with_signature(signature)

    name1 = f"{config.default_model_name}-one-{random.randint(0, 1e5)}"
    name2 = f"{config.default_model_name}-two-{random.randint(0, 1e5)}"
    mv1: ModelVersion = create_model_version_builder(name1).build(cluster)
    mv2: ModelVersion = create_model_version_builder(name1).build(cluster)
    _ = create_model_version_builder(name2).build(cluster)

    mvs = ModelVersion.find_by_model_name(cluster, name1)
    assert len(mvs) == 2
    # test sorting
    assert mvs[0].id == mv1.id
    assert mvs[1].id == mv2.id
Example #13
0
def test_model_analyze_error():
    cl = Cluster("asdasd:9091", "asdasd:9090", check_connection=False)
    model = ModelVersion(cluster=cl,
                         id=1,
                         model_id=2,
                         name="test",
                         version=3,
                         signature=ModelSignature(),
                         status=None,
                         image="",
                         runtime="",
                         is_external=True)
    req_id = "asdsad"
    request = PredictRequest()
    error = "error"
    res = _analyze(model, Mock(), req_id, request, error=error)
    assert res.metadata.request_id == req_id
    assert res.metadata.model_version_id == 1
    assert res.metadata.model_name == "test"
    assert res.metadata.model_version == 3
    assert res.error == "error"
    assert res.response == PredictResponse()
Example #14
0
def test_model_json_parser_for_external_models(
        cluster: Cluster, external_modelversion_json: dict):
    external_modelversion_json["id"] = 420
    mv: ModelVersion = ModelVersion._from_json(cluster,
                                               external_modelversion_json)
    assert mv.id == 420
Example #15
0
def test_modelversion_list(cluster: Cluster,
                           model_version_builder: ModelVersionBuilder):
    mv: ModelVersion = model_version_builder.build(cluster)
    assert mv.id in [
        modelversion.id for modelversion in ModelVersion.list(cluster)
    ]
Example #16
0
def test_modelversion_find(cluster: Cluster,
                           model_version_builder: ModelVersionBuilder):
    mv: ModelVersion = model_version_builder.build(cluster)
    mv_found: ModelVersion = ModelVersion.find(cluster, mv.name, mv.version)
    assert mv.id == mv_found.id