Esempio n. 1
0
    def test_with_timeserieswithdatapoints(self):
        from typing import List

        timeseries_with_100_datapoints = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(100)])
        timeseries_with_200_datapoints = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(200)])
        timeseries_with_300_datapoints = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(300)])

        all_timeseries = [
            timeseries_with_100_datapoints,
            timeseries_with_200_datapoints,
            timeseries_with_300_datapoints,
        ]

        result = utils.first_fit(list_items=all_timeseries,
                                 max_size=300,
                                 get_count=lambda x: len(x.datapoints))

        assert len(result) == 2
    def test_post_multitag_datapoints(self):
        timeseries_with_too_many_datapoints = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(100001)]
        )
        timeseries_with_99999_datapoints = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(99999)]
        )

        with mock.patch.object(APIClient, "_post") as post_request_mock:
            post_request_mock = post_request_mock

            client.datapoints.post_multi_time_series_datapoints([timeseries_with_too_many_datapoints])
            assert post_request_mock.call_count == 2

        with mock.patch.object(APIClient, "_post") as post_request_mock:
            post_request_mock = post_request_mock

            client.datapoints.post_multi_time_series_datapoints(
                [timeseries_with_99999_datapoints, timeseries_with_too_many_datapoints]
            )
            assert post_request_mock.call_count == 2
    def test_split_TimeseriesWithDatapoints_if_over_limit(self):
        timeseries_with_datapoints_over_limit = TimeseriesWithDatapoints(
            name="test", datapoints=[Datapoint(x, x) for x in range(1000)])

        result = client.datapoints._split_TimeseriesWithDatapoints_if_over_limit(
            timeseries_with_datapoints_over_limit, 100)

        assert isinstance(result[0], TimeseriesWithDatapoints)
        assert len(result) == 10

        result = client.datapoints._split_TimeseriesWithDatapoints_if_over_limit(
            timeseries_with_datapoints_over_limit, 1000)

        assert isinstance(result[0], TimeseriesWithDatapoints)
        assert len(result) == 1
Esempio n. 4
0
    async def process_data(path):
        nonlocal current_time_series
        df = parse_csv(path)
        if df is not None:
            timestamps = [int(o) * 1000 for o in df.index.tolist()]
            count_of_data_points = 0

            for col in df:
                if len(current_time_series) >= BATCH_MAX:
                    post_datapoints()

                name = str(col.rpartition(":")[2].strip())
                external_id = str(col.rpartition(":")[0].strip())

                if external_id in existing_timeseries:
                    data_points = []

                    for i, value in enumerate(df[col].tolist()):
                        if pandas.notnull(value):
                            value = convert_float(value)
                            if value is not None:
                                data_points.append(
                                    Datapoint(timestamp=timestamps[i],
                                              value=value))

                    if data_points:
                        current_time_series.append(
                            TimeseriesWithDatapoints(
                                name=existing_timeseries[external_id],
                                datapoints=data_points))
                        count_of_data_points += len(data_points)
                else:
                    print(name + " does not exist")
                    # To do: create time series

            if current_time_series:
                post_datapoints()

            logger.info("Processed {} datapoints from {}".format(
                count_of_data_points, path))