def _get_datapoints( self, start: int, end: int, ts_item: Dict[str, Any], aggregates: List[str], granularity: str, include_outside_points: bool, limit: int, ) -> Datapoints: is_aggregated = aggregates or "aggregates" in ts_item payload = { "items": [ts_item], "start": start, "end": end, "aggregates": aggregates, "granularity": granularity, "includeOutsidePoints": include_outside_points, "limit": limit or (self.client._DPS_LIMIT_AGG if is_aggregated else self.client._DPS_LIMIT), } res = self.client._post(self.client._RESOURCE_PATH + "/list", json=payload).json()["items"][0] aggs = ts_item.get("aggregates", aggregates) expected_fields = [a for a in aggs] if aggs is not None else ["value"] dps = Datapoints._load(res, expected_fields, cognite_client=self.client._cognite_client) return dps
def retrieve_latest( self, id: Union[int, List[int]] = None, external_id: Union[str, List[str]] = None, before: Union[int, str, datetime] = None, ) -> Union[Datapoints, DatapointsList]: """`Get the latest datapoint for one or more time series <https://docs.cognite.com/api/v1/#operation/getLatest>`_ Args: id (Union[int, List[int]]: Id or list of ids. external_id (Union[str, List[str]): External id or list of external ids. before: Union[int, str, datetime]: Get latest datapoint before this time. Returns: Union[Datapoints, DatapointsList]: A Datapoints object containing the requested data, or a list of such objects. Examples: Getting the latest datapoint in a time series. This method returns a Datapoints object, so the datapoint will be the first element:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(id=1)[0] You can also get the first datapoint before a specific time:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(id=1, before="2d-ago")[0] If you need the latest datapoint for multiple time series simply give a list of ids. Note that we are using external ids here, but either will work:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(external_id=["abc", "def"]) >>> latest_abc = res[0][0] >>> latest_def = res[1][0] """ before = cognite.client.utils._time.timestamp_to_ms(before) if before else None all_ids = self._process_ids(id, external_id, wrap_ids=True) is_single_id = self._is_single_identifier(id, external_id) if before: for id in all_ids: id.update({"before": before}) tasks = [ {"url_path": self._RESOURCE_PATH + "/latest", "json": {"items": chunk}} for chunk in utils._auxiliary.split_into_chunks(all_ids, self._RETRIEVE_LATEST_LIMIT) ] tasks_summary = utils._concurrency.execute_tasks_concurrently( self._post, tasks, max_workers=self._config.max_workers ) if tasks_summary.exceptions: raise tasks_summary.exceptions[0] res = tasks_summary.joined_results(lambda res: res.json()["items"]) if is_single_id: return Datapoints._load(res[0], cognite_client=self._cognite_client) return DatapointsList._load(res, cognite_client=self._cognite_client)
def retrieve_latest( self, id: Union[int, List[int]] = None, external_id: Union[str, List[str]] = None, before: Union[int, str, datetime] = None, ) -> Union[Datapoints, DatapointsList]: """Get the latest datapoint for one or more time series Args: id (Union[int, List[int]]: Id or list of ids. external_id (Union[str, List[str]): External id or list of external ids. before: Union[int, str, datetime]: Get latest datapoint before this time. Returns: Union[Datapoints, DatapointsList]: A Datapoints object containing the requested data, or a list of such objects. Examples: Getting the latest datapoint in a time series. This method returns a Datapoints object, so the datapoint will be the first element:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(id=1)[0] You can also get the first datapoint before a specific time:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(id=1, before="2d-ago")[0] If you need the latest datapoint for multiple time series simply give a list of ids. Note that we are using external ids here, but either will work:: >>> from cognite.client import CogniteClient >>> c = CogniteClient() >>> res = c.datapoints.retrieve_latest(external_id=["abc", "def"]) >>> latest_abc = res[0][0] >>> latest_def = res[1][0] """ before = cognite.client.utils._time.timestamp_to_ms(before) if before else None all_ids = self._process_ids(id, external_id, wrap_ids=True) is_single_id = self._is_single_identifier(id, external_id) if before: for id in all_ids: id.update({"before": before}) res = self._post(url_path=self._RESOURCE_PATH + "/latest", json={"items": all_ids}).json()["items"] if is_single_id: return Datapoints._load(res[0], cognite_client=self._cognite_client) return DatapointsList._load(res, cognite_client=self._cognite_client)
def store_partial_result(self, raw_data, start, end): expected_fields = self.aggregates or ["value"] if self.include_outside_points and raw_data["datapoints"]: # assumes first query has full start/end range copy_data = copy.copy(raw_data) # shallow copy if raw_data["datapoints"][0]["timestamp"] < start: if not self.point_before: copy_data["datapoints"] = raw_data["datapoints"][:1] self.point_before = Datapoints._load( copy_data, expected_fields, cognite_client=self.client._cognite_client ) raw_data["datapoints"] = raw_data["datapoints"][1:] if raw_data["datapoints"] and raw_data["datapoints"][-1]["timestamp"] >= end: if not self.point_after: copy_data["datapoints"] = raw_data["datapoints"][-1:] self.point_after = Datapoints._load( copy_data, expected_fields, cognite_client=self.client._cognite_client ) raw_data["datapoints"] = raw_data["datapoints"][:-1] self.results.append(Datapoints._load(raw_data, expected_fields, cognite_client=self.client._cognite_client)) last_timestamp = raw_data["datapoints"] and raw_data["datapoints"][-1]["timestamp"] return len(raw_data["datapoints"]), last_timestamp
def retrieve( self, expression: str, start: Union[int, str, datetime], end: Union[int, str, datetime], limit: int = None ) -> Datapoints: """Calculate the result of a function on time series. Args: expression (str): Function to be calculated. start (Union[int, str, datetime]): Inclusive start. end (Union[int, str, datetime]): Exclusive end. Returns: Datapoints: A Datapoints object containing the calculated data. Examples: >>> from cognite.client.experimental import CogniteClient >>> c = CogniteClient() >>> dps = c.datapoints.synthetic.retrieve(expression="TS{id:123} + TS{externalId:'abc'}", start="2w-ago", end="now") """ if limit is None or limit == -1: limit = float("inf") query = { "expression": expression, "start": cognite.client.utils._time.timestamp_to_ms(start), "end": cognite.client.utils._time.timestamp_to_ms(end), } datapoints = Datapoints() while True: query["limit"] = min(limit, self._DPS_LIMIT) resp = self._post(url_path=self._SYNTHETIC_RESOURCE_PATH + "/query", json={"items": [query]}) data = resp.json()["items"][0] datapoints._extend(Datapoints._load(data, expected_fields=["value"])) limit -= len(data["datapoints"]) if len(data["datapoints"]) < self._DPS_LIMIT or limit <= 0: break query["start"] = data["datapoints"][-1]["timestamp"] + 1 return datapoints
def retrieve( self, expression: Union[str, "sympy.Expr"], start: Union[int, str, datetime], end: Union[int, str, datetime], limit: int = None, variables: Dict[str, Union[str, TimeSeries]] = None, aggregate: str = None, granularity: str = None, ) -> Datapoints: """Calculate the result of a function on time series. Args: expression (Union[str,sympy.Expr]): Function to be calculated. Supports both strings and sympy expressions. Strings can have either the API `ts{}` syntax, or contain variable names to be replaced using the `variables` parameter. start (Union[int, str, datetime]): Inclusive start. end (Union[int, str, datetime]): Exclusive end. limit (int): Number of datapoints to retrieve. variables (Dict[str,Union[str,TimeSeries]]): An optional map of symbol replacements. aggregate (str): use this aggregate when replacing entries from `variables`, does not affect time series given in the `ts{}` syntax. granularity (str): use this granularity with the aggregate. Returns: Datapoints: A Datapoints object containing the calculated data. Examples: Request a synthetic time series query with direct syntax >>> from cognite.client.experimental import CogniteClient >>> c = CogniteClient() >>> dps = c.datapoints.synthetic.retrieve(expression="TS{id:123} + TS{externalId:'abc'}", start="2w-ago", end="now") Use variables to re-use an expression: >>> from cognite.client.experimental import CogniteClient >>> c = CogniteClient() >>> vars = {"A": "my_ts_external_id", "B": client.time_series.retrieve(id=1)} >>> dps = c.datapoints.synthetic.retrieve(expression="A+B", start="2w-ago", end="now", variables=vars) Use sympy to build complex expressions: >>> from cognite.client.experimental import CogniteClient >>> c = CogniteClient() >>> from sympy import symbols, cos, pi >>> a = sympy.symbols('a') >>> dps = c.datapoints.synthetic.retrieve(pi * cos(a), start="2w-ago", end="now", variables={"a": "my_ts_external_id"},aggregate='interpolation',granularity='1m') """ if limit is None or limit == -1: limit = float("inf") expression, short_expression = SyntheticDatapointsAPI._build_expression( expression, variables, aggregate, granularity) query = { "expression": expression, "start": cognite.client.utils._time.timestamp_to_ms(start), "end": cognite.client.utils._time.timestamp_to_ms(end), } datapoints = Datapoints(value=[], error=[]) datapoints.external_id = short_expression # for dataframe readability while True: query["limit"] = min(limit, self._DPS_LIMIT) resp = self._post(url_path=self._SYNTHETIC_RESOURCE_PATH + "/query", json={"items": [query]}) data = resp.json()["items"][0] datapoints._extend( Datapoints._load(data, expected_fields=["value", "error"])) limit -= len(data["datapoints"]) if len(data["datapoints"]) < self._DPS_LIMIT or limit <= 0: break query["start"] = data["datapoints"][-1]["timestamp"] + 1 return datapoints