Ejemplo n.º 1
0
def test_v2_abn_am_without_candidates():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_response.json')
        mock.get(abn_er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=json.load(open(file_path)))
        example = copy.deepcopy(abn_er_example)
        del example['spec']['versionInfo']['candidates']
        expr = ExperimentResource(**example)
        get_aggregated_metrics(expr.convert_to_float()).convert_to_quantity()
Ejemplo n.º 2
0
def provide_aggregated_metrics(ere: ExperimentResource = Body(
    ..., example=er_example)):
    """
    POST iter8 2.0 experiment resource and metric resources and obtain aggregated metrics.
    \f
    :body er: ExperimentResource
    """
    return get_aggregated_metrics(ere.convert_to_float()).convert_to_quantity()
Ejemplo n.º 3
0
def provide_aggregated_metrics(
        ermr: ExperimentResourceAndMetricResources = Body(
            ..., example=ermr_example)):
    """
    POST iter8 2.0 experiment resource and metric resources and obtain aggregated metrics.
    \f
    :body ermr: ExperimentResourceAndMetricResources
    """
    aggregated_metrics = get_aggregated_metrics(ermr)
    return aggregated_metrics
Ejemplo n.º 4
0
def test_mock_metrics():
    ercopy = copy.deepcopy(er_example)
    ercopy["status"]["metrics"] = mocked_mr_example
    expr = ExperimentResource(**ercopy)

    agm = get_aggregated_metrics(expr.convert_to_float())
    logger.info(agm)
    assert agm.data['request-count'].data['default'].value > 100.0
    assert agm.data['request-count'].data['canary'].value > 100.0
    assert agm.data['mean-latency'].data['default'].value > 15.0
    assert agm.data['mean-latency'].data['canary'].value > 9.0
Ejemplo n.º 5
0
def test_v2_aggregated_metrics_endpoint():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_response.json')
        response_json = json.load(open(file_path))
        mock.get(er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=response_json)

        expr = ExperimentResource(**er_example)
        agm = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()
        assert agm.data['request-count'].data['default'].value == \
            response_json['data']['result'][0]['value'][1]

        ercopy = copy.deepcopy(er_example)
        del ercopy["status"]["metrics"]
        expr = ExperimentResource(**ercopy)
        agm = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()
Ejemplo n.º 6
0
def test_mock_metrics_with_negative_elapsed():
    ercopy = copy.deepcopy(er_example)
    ercopy["status"]["metrics"] = mocked_mr_example
    expr = ExperimentResource(**ercopy)
    expr.status.startTime = datetime.now(timezone.utc) + timedelta(hours=10)

    agm = get_aggregated_metrics(expr.convert_to_float())
    logger.info(agm)
    assert agm.data['request-count'].data['default'].value > 0
    assert agm.data['request-count'].data['canary'].value > 0
    assert agm.data['mean-latency'].data['default'].value > 0
    assert agm.data['mean-latency'].data['canary'].value > 0
Ejemplo n.º 7
0
def get_analytics_results(exp_res: ExperimentResource):
    """
    Get analysis results using experiment resource and metric resources.
    """
    exp_res.status.analysis = Analysis()
    exp_res.status.analysis.aggregated_metrics = get_aggregated_metrics(
        exp_res)
    exp_res.status.analysis.version_assessments = get_version_assessments(
        exp_res)
    exp_res.status.analysis.winner_assessment = get_winner_assessment(exp_res)
    exp_res.status.analysis.weights = get_weights(exp_res)
    return exp_res.status.analysis
Ejemplo n.º 8
0
def test_v2_no_prometheus_response():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_no_response.json')
        mock.get(er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=json.load(open(file_path)))

        expr = ExperimentResource(**er_example)
        resp = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()
        expected_response = {
            "request-count": {
                "max": None,
                "min": None,
                "data": {
                    "default": {
                        "max": None,
                        "min": None,
                        "sample_size": None,
                        "value": None
                    },
                    "canary": {
                        "max": None,
                        "min": None,
                        "sample_size": None,
                        "value": None
                    }
                }
            },
            "mean-latency": {
                "max": None,
                "min": None,
                "data": {
                    "default": {
                        "max": None,
                        "min": None,
                        "sample_size": None,
                        "value": None
                    },
                    "canary": {
                        "max": None,
                        "min": None,
                        "sample_size": None,
                        "value": None
                    }
                }
            }
        }
        assert resp.data == expected_response
Ejemplo n.º 9
0
def get_analytics_results(expr: ExperimentResource):
    """
    Get analysis results using experiment resource and metric resources.
    """
    # if experiment contains aggregated builtin metric histograms, retain it
    ana = Analysis()
    if expr.status.analysis is not None:
        ana.aggregated_builtin_hists = expr.status.analysis.aggregated_builtin_hists
    expr.status.analysis = ana
    expr.status.analysis.aggregated_metrics = get_aggregated_metrics(expr)
    expr.status.analysis.version_assessments = get_version_assessments(expr)
    expr.status.analysis.winner_assessment = get_winner_assessment(expr)
    expr.status.analysis.weights = get_weights(expr)
    return expr.status.analysis
Ejemplo n.º 10
0
def test_v2_abn_using_previous_metric_status_none():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_no_response.json')
        mock.get(abn_er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=json.load(open(file_path)))

        example = copy.deepcopy(abn_er_example)

        example['status']['metrics'] = abn_mr_example[:2]
        expr = ExperimentResource(**example)
        resp = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()
        assert resp.data['mean-latency'].data['default'].value is None
Ejemplo n.º 11
0
def test_v2_va_with_no_metric_value():
    with requests_mock.mock(real_http=True) as mock:
        file_path = os.path.join(os.path.dirname(__file__),
                                 'data/prom_responses',
                                 'prometheus_sample_no_response.json')
        mock.get(er_example["status"]["metrics"][0]["metricObj"]["spec"]
                 ["urlTemplate"],
                 json=json.load(open(file_path)))

        expr = ExperimentResource(**er_example)
        resp = get_aggregated_metrics(
            expr.convert_to_float()).convert_to_quantity()

        example = copy.deepcopy(er_example_step1)
        example['status']['analysis']["aggregatedMetrics"] = resp
        expr = ExperimentResource(**example)
        resp2 = get_version_assessments(expr.convert_to_float())

        assert resp2.data == {'default': [False], 'canary': [False]}