def _get_dps_results(self) -> List[DatapointsList]:
        def custom_sort_key(x):
            if x.timestamp:
                return x.timestamp[0]
            return 0

        dps_lists = [DatapointsList([], cognite_client=self.client._cognite_client)] * len(self.query_ids)
        for q_id, dps_objects in self.query_id_to_datapoints_objects.items():
            ts_id_to_dps_objects = defaultdict(lambda: [])
            for dps_object in dps_objects:
                ts_id_to_dps_objects[dps_object.id].append(dps_object)

            dps_list = DatapointsList([], cognite_client=self.client._cognite_client)
            for ts_id, dps_objects in ts_id_to_dps_objects.items():
                dps = Datapoints()
                for dps_object in sorted(dps_objects, key=custom_sort_key):
                    dps._extend(dps_object)
                if self.query_id_to_include_outside_points[q_id]:
                    dps = self._remove_duplicates(dps)
                query_limit = self.query_id_to_limit[q_id]
                if query_limit and len(dps) > query_limit:
                    dps = dps[:query_limit]
                dps_list.append(dps)
            dps_list = self._sort_dps_list_by_task_order(dps_list, self.query_id_to_tasks[q_id])
            dps_lists[self.query_ids.index(q_id)] = dps_list
        return dps_lists
    def _get_datapoints_with_paging(
        self,
        start: int,
        end: int,
        ts_item: Dict[str, Any],
        aggregates: List[str],
        granularity: str,
        include_outside_points: bool,
        limit: int,
    ) -> Datapoints:
        is_aggregated = aggregates or "aggregates" in ts_item
        per_request_limit = self.client._DPS_LIMIT_AGG if is_aggregated else self.client._DPS_LIMIT
        limit_next_request = per_request_limit
        next_start = start
        datapoints = Datapoints()
        all_datapoints = Datapoints()
        while (
            (len(all_datapoints) == 0 or len(datapoints) == per_request_limit)
            and end > next_start
            and len(all_datapoints) < (limit or float("inf"))
        ):
            datapoints = self._get_datapoints(
                next_start, end, ts_item, aggregates, granularity, include_outside_points, limit_next_request
            )
            if len(datapoints) == 0:
                break

            if limit:
                remaining_datapoints = limit - len(datapoints)
                if remaining_datapoints < per_request_limit:
                    limit_next_request = remaining_datapoints
            latest_timestamp = int(datapoints.timestamp[-1])
            next_start = latest_timestamp + (
                cognite.client.utils._time.granularity_to_ms(granularity) if granularity else 1
            )
            all_datapoints._extend(datapoints)
        return all_datapoints
    def retrieve(
        self, expression: str, start: Union[int, str, datetime], end: Union[int, str, datetime], limit: int = None
    ) -> Datapoints:
        """Calculate the result of a function on time series.

        Args:
            expression (str): Function to be calculated.
            start (Union[int, str, datetime]): Inclusive start.
            end (Union[int, str, datetime]): Exclusive end.

        Returns:
            Datapoints: A Datapoints object containing the calculated data.

        Examples:

                >>> from cognite.client.experimental import CogniteClient
                >>> c = CogniteClient()
                >>> dps = c.datapoints.synthetic.retrieve(expression="TS{id:123} + TS{externalId:'abc'}", start="2w-ago", end="now")
            """
        if limit is None or limit == -1:
            limit = float("inf")
        query = {
            "expression": expression,
            "start": cognite.client.utils._time.timestamp_to_ms(start),
            "end": cognite.client.utils._time.timestamp_to_ms(end),
        }
        datapoints = Datapoints()
        while True:
            query["limit"] = min(limit, self._DPS_LIMIT)
            resp = self._post(url_path=self._SYNTHETIC_RESOURCE_PATH + "/query", json={"items": [query]})
            data = resp.json()["items"][0]
            datapoints._extend(Datapoints._load(data, expected_fields=["value"]))
            limit -= len(data["datapoints"])
            if len(data["datapoints"]) < self._DPS_LIMIT or limit <= 0:
                break
            query["start"] = data["datapoints"][-1]["timestamp"] + 1
        return datapoints
    def retrieve(
        self,
        expression: Union[str, "sympy.Expr"],
        start: Union[int, str, datetime],
        end: Union[int, str, datetime],
        limit: int = None,
        variables: Dict[str, Union[str, TimeSeries]] = None,
        aggregate: str = None,
        granularity: str = None,
    ) -> Datapoints:
        """Calculate the result of a function on time series.

        Args:
            expression (Union[str,sympy.Expr]): Function to be calculated. Supports both strings and sympy expressions. Strings can have either the API `ts{}` syntax, or contain variable names to be replaced using the `variables` parameter.
            start (Union[int, str, datetime]): Inclusive start.
            end (Union[int, str, datetime]): Exclusive end.
            limit (int): Number of datapoints to retrieve.
            variables (Dict[str,Union[str,TimeSeries]]): An optional map of symbol replacements.
            aggregate (str): use this aggregate when replacing entries from `variables`, does not affect time series given in the `ts{}` syntax.
            granularity (str): use this granularity with the aggregate.

        Returns:
            Datapoints: A Datapoints object containing the calculated data.

        Examples:

            Request a synthetic time series query with direct syntax

                >>> from cognite.client.experimental import CogniteClient
                >>> c = CogniteClient()
                >>> dps = c.datapoints.synthetic.retrieve(expression="TS{id:123} + TS{externalId:'abc'}", start="2w-ago", end="now")

            Use variables to re-use an expression:

                >>> from cognite.client.experimental import CogniteClient
                >>> c = CogniteClient()
                >>> vars = {"A": "my_ts_external_id", "B": client.time_series.retrieve(id=1)}
                >>> dps = c.datapoints.synthetic.retrieve(expression="A+B", start="2w-ago", end="now", variables=vars)

            Use sympy to build complex expressions:

                >>> from cognite.client.experimental import CogniteClient
                >>> c = CogniteClient()
                >>> from sympy import symbols, cos, pi
                >>> a = sympy.symbols('a')
                >>> dps = c.datapoints.synthetic.retrieve(pi * cos(a), start="2w-ago", end="now", variables={"a": "my_ts_external_id"},aggregate='interpolation',granularity='1m')
            """
        if limit is None or limit == -1:
            limit = float("inf")
        expression, short_expression = SyntheticDatapointsAPI._build_expression(
            expression, variables, aggregate, granularity)
        query = {
            "expression": expression,
            "start": cognite.client.utils._time.timestamp_to_ms(start),
            "end": cognite.client.utils._time.timestamp_to_ms(end),
        }
        datapoints = Datapoints(value=[], error=[])
        datapoints.external_id = short_expression  # for dataframe readability
        while True:
            query["limit"] = min(limit, self._DPS_LIMIT)
            resp = self._post(url_path=self._SYNTHETIC_RESOURCE_PATH +
                              "/query",
                              json={"items": [query]})
            data = resp.json()["items"][0]
            datapoints._extend(
                Datapoints._load(data, expected_fields=["value", "error"]))
            limit -= len(data["datapoints"])
            if len(data["datapoints"]) < self._DPS_LIMIT or limit <= 0:
                break
            query["start"] = data["datapoints"][-1]["timestamp"] + 1
        return datapoints