def aggregate_and_send_metrics(url: str, app_name: str, instance_id: str,
                               custom_headers: dict, features: dict,
                               last_sent: datetime) -> None:
    feature_stats_list = []

    for feature_name in features.keys():
        feature_stats = {
            features[feature_name].name: {
                "yes": features[feature_name].yes_count,
                "no": features[feature_name].no_count
            }
        }

        features[feature_name].reset_stats()
        feature_stats_list.append(feature_stats)

    metrics_request = {
        "appName": app_name,
        "instanceId": instance_id,
        "bucket": {
            "start": last_sent.isoformat(),
            "stop": datetime.now().isoformat(),
            "toggles": dict(ChainMap(*feature_stats_list))
        }
    }

    send_metrics(url, metrics_request, custom_headers)
    last_sent = datetime.now()
def aggregate_and_send_metrics(url, app_name, instance_id, custom_headers,
                               features, ondisk_cache):
    feature_stats_list = []

    for feature_name in features.keys():
        feature_stats = {
            features[feature_name].name: {
                "yes": features[feature_name].yes_count,
                "no": features[feature_name].no_count
            }
        }

        features[feature_name].reset_stats()
        feature_stats_list.append(feature_stats)

    metrics_request = {
        "appName": app_name,
        "instanceId": instance_id,
        "bucket": {
            "start": ondisk_cache[METRIC_LAST_SENT_TIME].isoformat(),
            "stop": datetime.now(timezone.utc).isoformat(),
            "toggles": dict(ChainMap(*feature_stats_list))
        }
    }

    send_metrics(url, metrics_request, custom_headers)
    ondisk_cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
    ondisk_cache.sync()
Esempio n. 3
0
def test_send_metrics_failure():
    responses.add(responses.POST, FULL_METRICS_URL, json={}, status=500)

    result = send_metrics(URL, MOCK_METRICS_REQUEST, CUSTOM_HEADERS)

    assert len(responses.calls) == 1
    assert not result
Esempio n. 4
0
def test_send_metrics(payload, status, expected):
    responses.add(responses.POST, FULL_METRICS_URL, **payload, status=status)

    result = send_metrics(URL, MOCK_METRICS_REQUEST, CUSTOM_HEADERS, CUSTOM_OPTIONS)

    assert len(responses.calls) == 1
    assert expected(result)
Esempio n. 5
0
def test_register_client_exception():
    responses.add(responses.POST,
                  FULL_METRICS_URL,
                  body=ConnectionError("Test connection error."),
                  status=200)

    result = send_metrics(URL, MOCK_METRICS_REQUEST, CUSTOM_HEADERS)

    assert len(responses.calls) == 1
    assert not result
Esempio n. 6
0
def aggregate_and_send_metrics(url: str,
                               app_name: str,
                               instance_id: str,
                               custom_headers: dict,
                               custom_options: dict,
                               features: dict,
                               ondisk_cache: redis.Redis
                               ) -> None:
    feature_stats_list = []

    for feature_name in features.keys():
        feature_stats = {
            features[feature_name].name: {
                "yes": features[feature_name].yes_count,
                "no": features[feature_name].no_count
            }
        }

        features[feature_name].reset_stats()
        feature_stats_list.append(feature_stats)

    metric_last_seen_time = pickle.loads(
        ondisk_cache.get(
            METRIC_LAST_SENT_TIME
        )
    )

    metrics_request = {
        "appName": app_name,
        "instanceId": instance_id,
        "bucket": {
            "start": metric_last_seen_time.isoformat(),
            "stop": datetime.now(timezone.utc).isoformat(),
            "toggles": dict(ChainMap(*feature_stats_list))
        }
    }

    send_metrics(url, metrics_request, custom_headers, custom_options)
    ondisk_cache.set(
        METRIC_LAST_SENT_TIME,
        pickle.dumps(datetime.now(timezone.utc))
    )
def aggregate_and_send_metrics(url: str, app_name: str, instance_id: str,
                               custom_headers: dict, custom_options: dict,
                               features: dict, cache: BaseCache) -> None:
    feature_stats_list = []

    for feature_name in features.keys():
        if not (features[feature_name].yes_count
                or features[feature_name].no_count):
            continue

        feature_stats = {
            features[feature_name].name: {
                "yes": features[feature_name].yes_count,
                "no": features[feature_name].no_count
            }
        }

        features[feature_name].reset_stats()
        feature_stats_list.append(feature_stats)

    metrics_request = {
        "appName": app_name,
        "instanceId": instance_id,
        "bucket": {
            "start": cache.get(METRIC_LAST_SENT_TIME).isoformat(),
            "stop": datetime.now(timezone.utc).isoformat(),
            "toggles": dict(ChainMap(*feature_stats_list))
        }
    }

    if feature_stats_list:
        send_metrics(url, metrics_request, custom_headers, custom_options)
        cache.set(METRIC_LAST_SENT_TIME, datetime.now(timezone.utc))
    else:
        LOGGER.debug(
            "No feature flags with metrics, skipping metrics submission.")