Exemple #1
0
    def serialize(context: MetricsContext) -> List[str]:
        config = get_config()

        dimension_keys = []
        dimensions_properties: Dict[str, str] = {}

        for dimension_set in context.get_dimensions():
            keys = list(dimension_set.keys())
            dimension_keys.append(keys[0:MAX_DIMENSIONS])
            dimensions_properties = {**dimensions_properties, **dimension_set}

        def create_body() -> Dict[str, Any]:
            body: Dict[str, Any] = {
                **dimensions_properties,
                **context.properties,
            }
            if not config.disable_metric_extraction:
                body["_aws"] = {
                    **context.meta,
                    "CloudWatchMetrics": [
                        {
                            "Dimensions": dimension_keys,
                            "Metrics": [],
                            "Namespace": context.namespace,
                        },
                    ],
                }
            return body

        current_body: Dict[str, Any] = create_body()
        event_batches: List[str] = []
        num_metrics_in_current_body = 0

        for metric_name, metric in context.metrics.items():

            if len(metric.values) == 1:
                current_body[metric_name] = metric.values[0]
            else:
                current_body[metric_name] = metric.values

            if not config.disable_metric_extraction:
                current_body["_aws"]["CloudWatchMetrics"][0]["Metrics"].append(
                    {
                        "Name": metric_name,
                        "Unit": metric.unit
                    })

            num_metrics_in_current_body += 1

            should_serialize: bool = num_metrics_in_current_body == MAX_METRICS_PER_EVENT
            if should_serialize:
                event_batches.append(json.dumps(current_body))
                current_body = create_body()
                num_metrics_in_current_body = 0

        if not event_batches or num_metrics_in_current_body > 0:
            event_batches.append(json.dumps(current_body))

        return event_batches
Exemple #2
0
    def serialize(context: MetricsContext) -> List[str]:
        dimension_keys = []
        dimensions_properties: Dict[str, str] = {}

        for dimension_set in context.get_dimensions():
            keys = list(dimension_set.keys())
            dimension_keys.append(keys[0:MAX_DIMENSIONS])
            dimensions_properties = {**dimensions_properties, **dimension_set}

        def create_body() -> Dict[str, Any]:
            return {
                **dimensions_properties,
                **context.properties,
                "_aws": {
                    **context.meta,
                    "CloudWatchMetrics": [
                        {
                            "Dimensions": dimension_keys,
                            "Metrics": [],
                            "Namespace": context.namespace,
                        },
                    ],
                },
            }

        current_body: Dict[str, Any] = create_body()
        event_batches: List[str] = []

        for metric_name, metric in context.metrics.items():

            if len(metric.values) == 1:
                current_body[metric_name] = metric.values[0]
            else:
                current_body[metric_name] = metric.values

            current_body["_aws"]["CloudWatchMetrics"][0]["Metrics"].append({
                "Name":
                metric_name,
                "Unit":
                metric.unit
            })

            should_serialize: bool = len(
                current_body["_aws"]["CloudWatchMetrics"][0]
                ["Metrics"]) == MAX_METRICS_PER_EVENT
            if should_serialize:
                event_batches.append(json.dumps(current_body))
                current_body = create_body()

        if not event_batches or current_body["_aws"]["CloudWatchMetrics"][0][
                "Metrics"]:
            event_batches.append(json.dumps(current_body))

        return event_batches
def test_get_dimensions_returns_default_dimensions_if_custom_dimensions_not_set(
):
    # arrange
    context = MetricsContext()
    dimension_key = fake.word()
    dimension_value = fake.word()
    expected_dimensions = {dimension_key: dimension_value}
    context.set_default_dimensions(expected_dimensions)

    # act
    actual_dimensions = context.get_dimensions()

    # assert
    assert [expected_dimensions] == actual_dimensions
Exemple #4
0
    def serialize(context: MetricsContext) -> List[str]:
        dimension_keys = []
        dimensions_properties: Dict[str, str] = {}

        for dimension_set in context.get_dimensions():
            keys = list(dimension_set.keys())
            dimension_keys.append(keys[0:MAX_DIMENSIONS])
            dimensions_properties = {**dimensions_properties, **dimension_set}

        metric_pointers: List[Dict[str, str]] = []

        metric_definitions = {
            "Dimensions": dimension_keys,
            "Metrics": metric_pointers,
            "Namespace": context.namespace,
        }
        cloud_watch_metrics = [metric_definitions]

        event_batches: List[str] = []

        body: Dict[str, Any] = {
            **dimensions_properties,
            **context.properties,
            "_aws": {
                **context.meta, "CloudWatchMetrics": cloud_watch_metrics
            },
        }

        for metric_name, metric in context.metrics.items():

            if len(metric.values) == 1:
                body[metric_name] = metric.values[0]
            else:
                body[metric_name] = metric.values

            metric_pointers.append({"Name": metric_name, "Unit": metric.unit})

            should_serialize: bool = len(
                metric_pointers) == MAX_METRICS_PER_EVENT
            if should_serialize:
                event_batches.append(json.dumps(body))
                metric_pointers = []
                body["_aws"]["CloudWatchMetrics"][0][
                    "Metrics"] = metric_pointers

        if not event_batches or metric_pointers:
            event_batches.append(json.dumps(body))

        return event_batches
def test_get_dimensions_returns_merged_custom_and_default_dimensions():
    # arrange
    context = MetricsContext()
    custom_dimension_key = fake.word()
    custom_dimension_value = fake.word()

    default_dimension_key = fake.word()
    default_dimension_value = fake.word()

    expected_dimensions = {
        default_dimension_key: default_dimension_value,
        custom_dimension_key: custom_dimension_value,
    }

    context.set_default_dimensions(
        {default_dimension_key: default_dimension_value})
    context.put_dimensions({custom_dimension_key: custom_dimension_value})

    # act
    actual_dimensions = context.get_dimensions()

    # assert
    assert [expected_dimensions] == actual_dimensions
Exemple #6
0
    def serialize(context: MetricsContext) -> str:
        dimension_keys = []
        dimensions_properties: Dict[str, str] = {}

        for dimension_set in context.get_dimensions():
            keys = list(dimension_set.keys())
            dimension_keys.append(keys[0:MAX_DIMENSIONS])
            dimensions_properties = {**dimensions_properties, **dimension_set}

        metric_pointers: List[Dict[str, str]] = []

        metric_definitions = {
            "Dimensions": dimension_keys,
            "Metrics": metric_pointers,
            "Namespace": context.namespace,
        }
        cloud_watch_metrics = [metric_definitions]

        body: Dict[str, Any] = {
            **dimensions_properties,
            **context.properties,
            "_aws": {
                **context.meta, "CloudWatchMetrics": cloud_watch_metrics
            },
        }

        for metric_name, metric in context.metrics.items():

            if len(metric.values) == 1:
                body[metric_name] = metric.values[0]
            else:
                body[metric_name] = metric.values

            metric_pointers.append({"Name": metric_name, "Unit": metric.unit})

        return json.dumps(body)
    def serialize(context: MetricsContext) -> List[str]:
        config = get_config()

        dimension_keys = []
        dimensions_properties: Dict[str, str] = {}

        for dimension_set in context.get_dimensions():
            keys = list(dimension_set.keys())
            dimension_keys.append(keys[0:MAX_DIMENSIONS])
            dimensions_properties = {**dimensions_properties, **dimension_set}

        def create_body() -> Dict[str, Any]:
            body: Dict[str, Any] = {
                **dimensions_properties,
                **context.properties,
            }
            if not config.disable_metric_extraction:
                body["_aws"] = {
                    **context.meta,
                    "CloudWatchMetrics": [
                        {
                            "Dimensions": dimension_keys,
                            "Metrics": [],
                            "Namespace": context.namespace,
                        },
                    ],
                }
            return body

        current_body: Dict[str, Any] = {}
        event_batches: List[str] = []
        num_metrics_in_current_body = 0

        # Track if any given metric has data remaining to be serialized
        remaining_data = True

        # Track batch number to know where to slice metric data
        i = 0

        while remaining_data:
            remaining_data = False
            current_body = create_body()

            for metric_name, metric in context.metrics.items():

                if len(metric.values) == 1:
                    current_body[metric_name] = metric.values[0]
                else:
                    # Slice metric data as each batch cannot contain more than
                    # MAX_DATAPOINTS_PER_METRIC entries for a given metric
                    start_index = i * MAX_DATAPOINTS_PER_METRIC
                    end_index = (i + 1) * MAX_DATAPOINTS_PER_METRIC
                    current_body[metric_name] = metric.values[
                        start_index:end_index]

                    # Make sure to consume remaining values if we sliced before the end
                    # of the metric value list
                    if len(metric.values) > end_index:
                        remaining_data = True

                if not config.disable_metric_extraction:
                    current_body["_aws"]["CloudWatchMetrics"][0][
                        "Metrics"].append({
                            "Name": metric_name,
                            "Unit": metric.unit
                        })
                num_metrics_in_current_body += 1

                if (num_metrics_in_current_body == MAX_METRICS_PER_EVENT):
                    event_batches.append(json.dumps(current_body))
                    current_body = create_body()
                    num_metrics_in_current_body = 0

            # iter over missing datapoints
            i += 1
            if not event_batches or num_metrics_in_current_body > 0:
                event_batches.append(json.dumps(current_body))

        return event_batches