def test_v2_canary_failing_lowerlimit_criteria():
    example = copy.deepcopy(er_example_step1)
    example['spec']['criteria']['objectives'][0].pop('upperLimit')
    example['spec']['criteria']['objectives'][0]['lowerLimit'] = 500
    expr = ExperimentResource(**example)
    resp = get_version_assessments(expr.convert_to_float())
    assert resp.data == {'default': [False], 'canary': [False]}
def test_v2_va_without_mean_latency_metric():
    example = copy.deepcopy(er_example_step1)
    example['status']['analysis']['aggregatedMetrics']["data"].pop(
        'mean-latency', None)
    expr = ExperimentResource(**example)
    resp = get_version_assessments(expr.convert_to_float())
    assert resp.message == \
        "Error: ; Warning: Aggregated metric object for mean-latency metric is unavailable.; Info: "
Exemple #3
0
def provide_version_assessments(experiment_resource: ExperimentResource = Body(
    ..., example=er_example_step1)):
    """
    POST iter8 2.0 experiment resource, whose status includes aggregated metrics,
    and obtain version assessments.
    \f
    :body er: ExperimentResource
    """
    return get_version_assessments(experiment_resource.convert_to_float())
def test_v2_va_with_no_metric_value():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_no_response.json')
        mock.get(er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=json.load(open(file_path)))

        expr = ExperimentResource(**er_example)
        resp = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()

        example = copy.deepcopy(er_example_step1)
        example['status']['analysis']["aggregatedMetrics"] = resp
        expr = ExperimentResource(**example)
        resp2 = get_version_assessments(expr.convert_to_float())

        assert resp2.data == {'default': [False], 'canary': [False]}
def test_v2_abn_va_without_am():
    expr = ExperimentResource(**abn_er_example)
    try:
        get_version_assessments(expr.convert_to_float())
    except AttributeError:
        pass
def test_v2_version_assessment_endpoint():
    expr = ExperimentResource(**er_example_step1)
    get_version_assessments(expr.convert_to_float())
def test_v2_canary_failing_upperlimit_criteria():
    expr = ExperimentResource(**er_example_step1)
    resp = get_version_assessments(expr.convert_to_float())
    assert resp.data == {'default': [True], 'canary': [False]}