Exemple #1
0
def test_realtime_api(
    client: cx.Client, api: str, timeout: int = None, api_config_name: str = "cortex.yaml"
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    for api_spec in api_specs:
        client.create_api(api_spec=api_spec, project_dir=api_dir)

    try:
        assert apis_ready(
            client=client, api_names=[api_name], timeout=timeout
        ), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)

        response = request_prediction(client, api_name, payload)

        assert (
            response.status_code == HTTPStatus.OK
        ), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"

        if expectations and "response" in expectations:
            assert_response_expectations(response, expectations["response"])
    finally:
        delete_apis(client, [api_name])
Exemple #2
0
def test_realtime_api(
    printer: Callable,
    client: cx.Client,
    api: str,
    timeout: int = None,
    api_config_name: str = "cortex_cpu.yaml",
    node_groups: List[str] = [],
    extra_path: str = "",
    method: str = "POST",
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)
    assert len(api_specs) == 1

    if len(node_groups) > 0:
        api_specs[0]["node_groups"] = node_groups

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    for api_spec in api_specs:
        client.deploy(api_spec=api_spec)

    try:
        assert apis_ready(client=client, api_names=[api_name],
                          timeout=timeout), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)
        if method == "POST":
            response = post_request(client, api_name, payload, extra_path)
        else:
            response = get_request(client, api_name, payload, extra_path)

        assert (
            response.status_code == HTTPStatus.OK
        ), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"

        if expectations and "response" in expectations:
            assert_response_expectations(response, expectations["response"])
    except:
        # best effort
        try:
            api_info = client.get_api(api_name)
            printer(json.dumps(api_info, indent=2))
            td.Thread(target=lambda: stream_api_logs(client, api_name),
                      daemon=True).start()
            time.sleep(5)
        finally:
            raise
    finally:
        delete_apis(client, [api_name])
Exemple #3
0
def test_realtime_api(
    client: cx.Client, api: str, timeout: int = None, api_config_name: str = "cortex.yaml"
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    for api_spec in api_specs:
        client.create_api(api_spec=api_spec, project_dir=api_dir)

    try:
        assert apis_ready(
            client=client, api_names=[api_name], timeout=timeout
        ), f"apis {api_name} not ready"

        if not expectations or "grpc" not in expectations:
            with open(str(api_dir / "sample.json")) as f:
                payload = json.load(f)
            response = request_prediction(client, api_name, payload)

            assert (
                response.status_code == HTTPStatus.OK
            ), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"

            if expectations and "response" in expectations:
                assert_response_expectations(response, expectations["response"])

        if expectations and "grpc" in expectations:
            stub, input_sample, output_values, output_type, is_output_stream = generate_grpc(
                client, api_name, api_dir, expectations["grpc"]
            )
            if is_output_stream:
                for response, output_val in zip(stub.Predict(input_sample), output_values):
                    assert (
                        type(response) == output_type
                    ), f"didn't receive response of type {str(output_type)}, but received {str(type(response))}"
                    assert response == output_val, f"received {response} instead of {output_val}"
            else:
                response = stub.Predict(input_sample)
                assert (
                    type(stub.Predict(input_sample)) == output_type
                ), f"didn't receive response of type {str(output_type)}, but received {str(type(response))}"
                assert (
                    response == output_values[0]
                ), f"received {response} instead of {output_values[0]}"
    finally:
        delete_apis(client, [api_name])
Exemple #4
0
def test_async_api(
    client: cx.Client,
    api: str,
    deploy_timeout: int = None,
    poll_retries: int = 5,
    poll_sleep_seconds: int = 1,
    api_config_name: str = "cortex.yaml",
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    assert len(api_specs) == 1

    api_name = api_specs[0]["name"]
    client.create_api(api_spec=api_specs[0], project_dir=api_dir)

    try:
        assert apis_ready(
            client=client, api_names=[api_name], timeout=deploy_timeout
        ), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)

        response = request_prediction(client, api_name, payload)

        assert (
            response.status_code == HTTPStatus.OK
        ), f"workload submission status code: got {response.status_code}, expected {HTTPStatus.OK}"

        response_json = response.json()
        assert "id" in response_json

        request_id = response_json["id"]

        result_response = None
        for i in range(poll_retries + 1):
            result_response = retrieve_async_result(
                cliett=client, api_name=api_name, request_id=request_id
            )

            if result_response.status_code == HTTPStatus.OK:
                break

            time.sleep(poll_sleep_seconds)

        assert (
            result_response.status_code == HTTPStatus.OK
        ), f"result retrieval status code: got {result_response.status_code}, expected {HTTPStatus.OK}"

        result_response_json = result_response.json()

        # validate keys are in the result json response
        assert (
            "id" in result_response_json
        ), f"id key was not present in result response (response: {result_response_json})"
        assert (
            "status" in result_response_json
        ), f"status key was not present in result response (response: {result_response_json})"
        assert (
            "result" in result_response_json
        ), f"result key was not present in result response (response: {result_response_json})"
        assert (
            "timestamp" in result_response_json
        ), f"timestamp key was not present in result response (response: {result_response_json})"

        # validate result json response has valid values
        assert (
            result_response_json["id"] == request_id
        ), f"result 'id' and request 'id' mismatch ({result_response_json['id']} != {request_id})"
        assert (
            result_response_json["status"] == "completed"
        ), f"async workload did not complete (response: {result_response_json})"
        assert result_response_json["timestamp"] != "", "result 'timestamp' value was empty"
        assert result_response_json["result"] != "", "result 'result' value was empty"

        # assert result expectations
        if expectations:
            assert_json_expectations(result_response_json["result"], expectations["response"])

    finally:
        delete_apis(client, [api_name])
Exemple #5
0
def test_long_running_realtime(
    printer: Callable,
    client: cx.Client,
    api: str,
    long_running_config: Dict[str, Union[int, float]],
    deploy_timeout: int = None,
    api_config_name: str = "cortex.yaml",
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)

    time_to_run = long_running_config["time_to_run"]
    status_code_timeout = long_running_config["status_code_timeout"]

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    for api_spec in api_specs:
        client.create_api(api_spec=api_spec, project_dir=api_dir)

    try:
        assert apis_ready(client=client,
                          api_names=[api_name],
                          timeout=deploy_timeout), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)

        counter = 0
        start_time = time.time()
        while time.time() - start_time <= time_to_run:
            response = request_prediction(client, api_name, payload)

            assert (
                response.status_code == HTTPStatus.OK
            ), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"

            if expectations and "response" in expectations:
                assert_response_expectations(response,
                                             expectations["response"])

            counter += 1

        printer("verifying number of processed requests using the client")
        assert api_requests(
            client, api_name, counter, timeout=status_code_timeout
        ), f"the number of 2xx response codes for api {api_name} doesn't match the expected number {counter}"

    except:
        # best effort
        try:
            api_info = client.get_api(api_name)
            printer(json.dumps(api_info, indent=2))
            td.Thread(target=lambda: client.stream_api_logs(api_name),
                      daemon=True).start()
            time.sleep(5)
        except:
            pass
        raise
    finally:
        delete_apis(client, [api_name])
Exemple #6
0
def test_long_running_realtime(
    printer: Callable,
    client: cx.Client,
    api: str,
    long_running_config: Dict[str, Union[int, float]],
    deploy_timeout: int = None,
    api_config_name: str = "cortex_cpu.yaml",
    node_groups: List[str] = [],
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)
    assert len(api_specs) == 1

    time_to_run = long_running_config["time_to_run"]

    if len(node_groups) > 0:
        api_specs[0]["node_groups"] = node_groups

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    for api_spec in api_specs:
        client.deploy(api_spec=api_spec)

    try:
        assert apis_ready(client=client,
                          api_names=[api_name],
                          timeout=deploy_timeout), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)

        counter = 0
        start_time = time.time()
        while time.time() - start_time <= time_to_run:
            response = post_request(client, api_name, payload)

            assert (
                response.status_code == HTTPStatus.OK
            ), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"

            if expectations and "response" in expectations:
                assert_response_expectations(response,
                                             expectations["response"])

            counter += 1

    except:
        # best effort
        try:
            api_info = client.get_api(api_name)
            printer(json.dumps(api_info, indent=2))
            td.Thread(target=lambda: stream_api_logs(client, api_name),
                      daemon=True).start()
            time.sleep(5)
        finally:
            raise
    finally:
        delete_apis(client, [api_name])
Exemple #7
0
def test_async_api(
    printer: Callable,
    client: cx.Client,
    api: str,
    deploy_timeout: int = None,
    poll_retries: int = 5,
    poll_sleep_seconds: int = 1,
    api_config_name: str = "cortex_cpu.yaml",
    node_groups: List[str] = [],
):
    api_dir = TEST_APIS_DIR / api
    with open(str(api_dir / api_config_name)) as f:
        api_specs = yaml.safe_load(f)
    assert len(api_specs) == 1

    if len(node_groups) > 0:
        api_specs[0]["node_groups"] = node_groups

    expectations = None
    expectations_file = api_dir / "expectations.yaml"
    if expectations_file.exists():
        expectations = parse_expectations(str(expectations_file))

    api_name = api_specs[0]["name"]
    client.deploy(api_spec=api_specs[0])

    try:
        assert apis_ready(client=client,
                          api_names=[api_name],
                          timeout=deploy_timeout), f"apis {api_name} not ready"

        with open(str(api_dir / "sample.json")) as f:
            payload = json.load(f)

        response = post_request(client, api_name, payload)

        assert (
            response.status_code == HTTPStatus.OK
        ), f"workload submission status code: got {response.status_code}, expected {HTTPStatus.OK}"

        response_json = response.json()
        assert "id" in response_json

        request_id = response_json["id"]

        result_response = None
        for _ in range(poll_retries + 1):
            result_response = retrieve_async_result(client=client,
                                                    api_name=api_name,
                                                    request_id=request_id)

            if result_response.status_code != HTTPStatus.OK:
                time.sleep(poll_sleep_seconds)
                continue

            result_response_json = result_response.json()
            assert (
                "id" in result_response_json
            ), f"id key was not present in result response (response: {result_response_json})"
            assert (
                "status" in result_response_json
            ), f"status key was not present in result response (response: {result_response_json})"

            if result_response_json["status"] != "completed":
                time.sleep(poll_sleep_seconds)
                continue
            break

        assert (
            result_response.status_code == HTTPStatus.OK
        ), f"result retrieval status code: got {result_response.status_code}, expected {HTTPStatus.OK}"

        # validate keys are in the result json response
        assert (
            "result" in result_response_json
        ), f"result key was not present in result response (response: {result_response_json})"

        assert (
            "timestamp" in result_response_json
        ), f"timestamp key was not present in result response (response: {result_response_json})"

        # validate result json response has valid values
        assert (
            result_response_json["id"] == request_id
        ), f"result 'id' and request 'id' mismatch ({result_response_json['id']} != {request_id})"
        assert (
            result_response_json["status"] == "completed"
        ), f"async workload did not complete (response: {result_response_json})"
        assert result_response_json[
            "timestamp"] != "", "result 'timestamp' value was empty"
        assert result_response_json[
            "result"] != "", "result 'result' value was empty"

        # assert result expectations
        if expectations:
            assert_json_expectations(result_response_json["result"],
                                     expectations["response"])

    except:
        # best effort
        try:
            api_info = client.get_api(api_name)
            printer(json.dumps(api_info, indent=2))
            printer(json.dumps(result_response_json, indent=2))
            td.Thread(
                target=lambda: stream_api_logs(client, api_name),
                daemon=True,
            ).start()
            time.sleep(5)
        except:
            pass
        raise

    finally:
        delete_apis(client, [api_name])