def reset_depth_series(name, **kwargs): """Delete all datapoints for a depthseries. Args: name (str): Name of depthseries to delete. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5 ) + "/projects/{}/timeseries/{}?timestampInclusiveBegin=0?timestampInclusiveEnd={}".format( project, quote_plus(name), sys.maxsize) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.delete_request(url, headers=headers) if res == {}: url = config.get_base_url( api_version=0.5 ) + "/projects/{}/timeseries/{}?timestampInclusiveBegin=0?timestampInclusiveEnd={}".format( project, quote_plus(_generateIndexName(name)), sys.maxsize) res = _utils.delete_request(url, headers=headers) return res.json()
def delete_assets(asset_ids: List[int], **kwargs): """Delete a list of assets. Args: asset_ids (list[int]): List of IDs of assets to delete. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/assets/delete".format(project) body = {"items": asset_ids} headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } res = utils.post_request(url, body=body, headers=headers, cookies=config.get_cookies()) return res.json()
def update_time_series(time_series: List[TimeSeries], **kwargs): """Update an existing time series. For each field that can be updated, a null value indicates that nothing should be done. Args: time_series (list[v05.dto.TimeSeries]): List of time series data transfer objects to update. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries".format(project) body = {"items": [ts.__dict__ for ts in time_series]} headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } res = _utils.put_request(url, body=body, headers=headers) return res.json()
def download_file(id, get_contents=False, **kwargs): """Get list of files matching query. Args: id (int): Path to file to upload, if omitted a upload link will be returned. get_contents (bool, optional): Boolean to determince whether or not to return file contents as string. Default is False and download url is returned. Keyword Args: api_key (str, optional): Your api-key. project (str, optional): Project name. Returns: str: Download link if get_contents is False else file contents. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/storage/{}".format(project, id) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.get_request(url=url, headers=headers, cookies=config.get_cookies()) if get_contents: dl_link = res.json()["data"] res = requests.get(dl_link) return res.content return res.json()["data"]
def delete_files(file_ids, **kwargs): """Delete Args: file_ids (list[int]): List of IDs of files to delete. Keyword Args: api_key (str): Your api key. project (str): Your project. Returns: list: List of files deleted and files that failed to delete. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/storage/delete".format(project) headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } body = {"items": file_ids} res = _utils.post_request(url, body=body, headers=headers) return res.json()["data"]
def get_row(database_name: str = None, table_name: str = None, row_key: str = None, api_key=None, project=None): """Returns a RawResponse Object containing a list of rows. Args: database_name (str): The database name to retrieve rows from. table_name (str): The table name to retrieve rows from. row_key (str): The key of the row to fetch. api_key (str): Your api-key. project (str): Project name. Returns: v04.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url(api_version=0.4) + "/projects/{}/raw/{}/{}/{}".format( project, database_name, table_name, row_key ) params = dict() headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.get_request(url=url, params=params, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def tag_matching(tag_ids, fuzzy_threshold=0, platform=None, **kwargs): """Returns a TagMatchingObject containing a list of matched tags for the given query. This method takes an arbitrary string as argument and performs fuzzy matching with a user defined threshold toward tag ids in the system. Args: tag_ids (list): The tag_ids to retrieve matches for. fuzzy_threshold (int): The threshold to use when searching for matches. A fuzzy threshold of 0 means you only want to accept perfect matches. Must be >= 0. platform (str): The platform to search on. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: v04.dto.TagMatchingResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url(api_version=0.4) + "/projects/{}/tagmatching".format(project) body = {"tagIds": tag_ids, "metadata": {"fuzzyThreshold": fuzzy_threshold, "platform": platform}} headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return TagMatchingResponse(res.json())
def post_assets(assets: List[Asset], **kwargs): """Insert a list of assets. Args: assets (list[v05.dto.Asset]): List of asset data transfer objects. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.AssetListResponse: A data object containing the posted assets with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/assets".format(project) body = {"items": [asset.__dict__ for asset in assets]} headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } res = utils.post_request(url, body=body, headers=headers, cookies=config.get_cookies()) return AssetListResponse(res.json())
def get_latest(name, **kwargs): """Returns a LatestDatapointObject containing the latest datapoint for the given timeseries. Args: name (str): The name of the timeseries to retrieve data for. Keyword Arguments: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.LatestDatapointsResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries/latest/{}".format( project, quote(name, safe="")) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.get_request(url, headers=headers, cookies=config.get_cookies()) return LatestDatapointResponse(res.json())
def get_tables(database_name: str = None, limit: int = None, cursor: str = None, api_key=None, project=None): """Returns a RawObject containing a list of tables in a raw database. Args: database_name (str): The database name to retrieve tables from. limit (int): A limit on the amount of results to return. cursor (str): A cursor can be provided to navigate through pages of results. api_key (str): Your api-key. project (str): Project name. Returns: v04.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url(api_version=0.4) + "/projects/{}/raw/{}".format(project, database_name) params = dict() if not limit: params["limit"] = limit if not cursor: params["cursor"] = cursor headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.get_request(url=url, params=params, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def get_asset_subtree(asset_id="", depth=None, **kwargs): """Returns assets with provided assetId. Args: asset_id (str): The asset id of the top asset to get. depth (int): Get subassets this many levels below the top asset. Keyword Arguments: limit (int): The maximum nuber of assets to be returned. cursor (str): Cursor to use for paging through results. api_key (str): Your api-key. project (str): Project name. Returns: v04.dto.AssetResponse: A data object containing the requested assets with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/assets/{}".format(project, asset_id) params = { "depth": depth, "limit": kwargs.get("limit", constants.LIMIT), "cursor": kwargs.get("cursor") } headers = {"api-key": api_key, "accept": "application/json"} res = utils.get_request(url, params=params, headers=headers, cookies=config.get_cookies()) return AssetResponse(res.json())
def create_databases(database_names: list, api_key=None, project=None): """Creates databases in the Raw API and returns the created databases. Args: database_names (list): A list of databases to create. api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw/create".format(project) body = { "items": [{ "dbName": "{}".format(database_name) } for database_name in database_names] } headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def post_time_series(time_series: List[TimeSeries], **kwargs): """Create a new time series. Args: time_series (list[v05.dto.TimeSeries]): List of time series data transfer objects to create. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries".format(project) body = {"items": [ts.__dict__ for ts in time_series]} headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } res = _utils.post_request(url, body=body, headers=headers) return res.json()
def delete_rows( database_name: str = None, table_name: str = None, rows: List[RawRow] = None, api_key=None, project=None ): """Deletes rows in the Raw API. Args: database_name (str): The database to create tables in. table_name (str): The table name where the rows are at. rows (list): The rows to delete. api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url(api_version=0.4) + "/projects/{}/raw/{}/{}/delete".format( project, database_name, table_name ) body = {"items": [{"key": "{}".format(row.key), "columns": row.columns} for row in rows]} headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return res.json()
def post_multi_tag_datapoints( timeseries_with_datapoints: List[TimeseriesWithDatapoints], **kwargs): """Insert data into multiple timeseries. Args: timeseries_with_datapoints (List[v05.dto.TimeseriesWithDatapoints]): The timeseries with data to insert. Keyword Args: api_key (str): Your api-key. project (str): Project name. use_gzip (bool): Whether or not to gzip the request Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/timeseries/data".format(project) use_gzip = kwargs.get("use_gzip", False) headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } ul_dps_limit = 100000 # Make sure we only work with TimeseriesWithDatapoints objects that has a max number of datapoints timeseries_with_datapoints_limited = [] for entry in timeseries_with_datapoints: timeseries_with_datapoints_limited.extend( _split_TimeseriesWithDatapoints_if_over_limit(entry, ul_dps_limit)) # Group these TimeseriesWithDatapoints if possible so that we upload as much as possible in each call to the API timeseries_to_upload_binned = _utils.first_fit( list_items=timeseries_with_datapoints_limited, max_size=ul_dps_limit, get_count=lambda x: len(x.datapoints)) for bin in timeseries_to_upload_binned: body = { "items": [{ "tagId": ts_with_data.name, "datapoints": [dp.__dict__ for dp in ts_with_data.datapoints] } for ts_with_data in bin] } res = _utils.post_request(url, body=body, headers=headers, use_gzip=use_gzip) return res.json()
def post_datapoints(name, depthdatapoints: List[DatapointDepth], **kwargs): """Insert a list of datapoints. Args: name (str): Name of timeseries to insert to. datapoints (list[v05.data_objects.Datapoint): List of datapoint data transfer objects to insert. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) offset = 0 # Random timestamp to start the time series url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries/data".format(project) headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } datapoints = [] depthpoints = [] for datapoint in depthdatapoints: datapoints.append(Datapoint(offset, datapoint.value)) depthpoints.append(Datapoint(offset, datapoint.depth)) offset += MS_INCREMENT ul_dps_limit = 100000 i = 0 while i < len(datapoints): body = { "items": [ { "name": name, "datapoints": [dp.__dict__ for dp in datapoints[i:i + ul_dps_limit]] }, { "name": _generateIndexName(name), "datapoints": [dp.__dict__ for dp in depthpoints[i:i + ul_dps_limit]], }, ] } _utils.post_request(url, body=body, headers=headers) i += ul_dps_limit return {}
def get_assets(name=None, path=None, description=None, metadata=None, depth=None, fuzziness=None, **kwargs): """Returns assets matching provided description. Args: name (str): The name of the asset(s) to get. path (str): The path of the subtree to search in. description (str): Search query. metadata (dict): The metadata values used to filter the results. depth (int): Get sub assets up oto this many levels below the specified path. fuzziness (int): The degree of fuzziness in the name matching. Keyword Arguments: limit (int): The maximum number of assets to be returned. cursor (str): Cursor to use for paging through results. api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.AssetListResponse: A data object containing the requested assets with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/assets".format(project) params = { "name": name, "description": description, "path": path, "metadata": str(metadata) if metadata else None, "depth": depth, "fuzziness": fuzziness, "cursor": kwargs.get("cursor"), "limit": kwargs.get("limit", constants.LIMIT), } headers = {"api-key": api_key, "accept": "application/json"} res = utils.get_request(url, params=params, headers=headers, cookies=config.get_cookies()) return AssetListResponse(res.json())
def create_rows( database_name: str = None, table_name: str = None, rows: List[RawRow] = None, api_key=None, project=None, ensure_parent=False, use_gzip=False, ): """Creates tables in the given Raw API database. Args: database_name (str): The database to create rows in. table_name (str): The table names to create rows in. rows (list[v04.dto.RawRow]): The rows to create. api_key (str): Your api-key. project (str): Project name. ensure_parent (bool): Create database/table if it doesn't exist already use_gzip (bool): Compress content using gzip Returns: An empty response """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url(api_version=0.4) + "/projects/{}/raw/{}/{}/create".format( project, database_name, table_name ) headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} if ensure_parent: params = {"ensureParent": "true"} else: params = {} ul_row_limit = 1000 i = 0 while i < len(rows): body = {"items": [{"key": "{}".format(row.key), "columns": row.columns} for row in rows[i : i + ul_row_limit]]} res = _utils.post_request( url=url, body=body, headers=headers, params=params, cookies=config.get_cookies(), use_gzip=use_gzip ) i += ul_row_limit return res.json()
def get_event(event_id, **kwargs): """Returns a EventResponse containing an event matching the id. Args: event_id (int): The event id. Keyword Arguments: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.EventResponse: A data object containing the requested event. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url(api_version=0.5) + "/projects/{}/events/{}".format(project, event_id) headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} res = _utils.get_request(url, headers=headers, cookies=config.get_cookies()) return EventResponse(res.json())
def update_depth_series(depth_series: List[TimeSeries], **kwargs): """Update an existing time series. For each field that can be updated, a null value indicates that nothing should be done. Args: depth_series (list[v05.dto.TimeSeries]): List of time series data transfer objects to update. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries".format(project) body = {"items": [ts.__dict__ for ts in depth_series]} headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } res = _utils.put_request(url, body=body, headers=headers) if res.json() == {}: for dsdto in depth_series: dsdto.name = _generateIndexName(dsdto.name) dsdto.isString = None dsdto.unit = None items = [ ts.__dict__ for ts in depth_series if _has_depth_index_changes(ts) ] body = {"items": items} if len(items) > 0: res = _utils.put_request(url, body=body, headers=headers) return res.json()
def delete_databases(database_names: list, recursive: bool = False, api_key=None, project=None): """Deletes databases in the Raw API. Args: database_names (list): A list of databases to delete. api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url(api_version=0.4) + "/projects/{}/raw/delete".format(project) body = {"items": [{"dbName": "{}".format(database_name)} for database_name in database_names]} params = {"recursive": recursive} headers = {"api-key": api_key, "content-type": "*/*", "accept": "application/json"} res = _utils.post_request(url=url, body=body, params=params, headers=headers, cookies=config.get_cookies()) return res.json()
def get_file_info(id, **kwargs): """Returns information about a file. Args: id (int): Id of the file. Keyword Args: api_key (str, optional): Your api-key. project (str, optional): Project name. Returns: v04.dto.FileInfoResponse: A data object containing the requested file information. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/storage/{}/info".format(project, id) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.get_request(url, headers=headers) return FileInfoResponse(res.json())
def post_events(events, **kwargs): """Adds a list of events and returns an EventListResponse object containing created events. Args: events (List[v05.dto.Event]): List of events to create. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.EventListResponse """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url(api_version=0.5) + "/projects/{}/events".format(project) headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} body = {"items": [event.__dict__ for event in events]} res = _utils.post_request(url, body=body, headers=headers) return EventListResponse(res.json())
def get_asset(asset_id, **kwargs): """Returns the asset with the provided assetId. Args: asset_id (int): The asset id of the top asset to get. Keyword Arguments: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.AssetResponse: A data object containing the requested assets with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/assets/{}/subtree".format( project, asset_id) headers = {"api-key": api_key, "accept": "application/json"} res = utils.get_request(url, headers=headers, cookies=config.get_cookies()) return AssetResponse(res.json())
def delete_tables(database_name: str = None, table_names: list = None, api_key=None, project=None): """Deletes databases in the Raw API. Args: database_name (str): The database to create tables in. table_names (list): The table names to create. api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw/{}/delete".format( project, database_name) body = { "items": [{ "tableName": "{}".format(table_name) } for table_name in table_names] } headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return res.json()
def post_datapoints(tag_id, datapoints: List[Datapoint], **kwargs): """Insert a list of datapoints. Args: tag_id (str): ID of timeseries to insert to. datapoints (list[v04.dto.Datapoint): List of datapoint data transfer objects to insert. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/timeseries/data/{}".format( project, quote(tag_id, safe="")) headers = { "api-key": api_key, "content-type": "application/json", "accept": "application/json" } ul_dps_limit = 100000 i = 0 while i < len(datapoints): body = { "items": [dp.__dict__ for dp in datapoints[i:i + ul_dps_limit]] } res = _utils.post_request(url, body=body, headers=headers) i += ul_dps_limit return res.json()
def get_databases(limit: int = None, cursor: str = None, api_key=None, project=None): """Returns a RawObject containing a list of raw databases. Args: limit (int): A limit on the amount of results to return. cursor (str): A cursor can be provided to navigate through pages of results. api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw".format(project) params = {"limit": limit, "cursor": cursor} headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.get_request(url=url, params=params, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def delete_time_series(name, **kwargs): """Delete a timeseries. Args: name (str): Name of timeseries to delete. Keyword Args: api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries/{}".format( project, quote(name, safe="")) headers = {"api-key": api_key, "accept": "application/json"} res = _utils.delete_request(url, headers=headers) return res.json()
def get_timeseries(prefix=None, description=None, include_metadata=False, asset_id=None, path=None, **kwargs): """Returns a TimeseriesObject containing the requested timeseries. Args: prefix (str): List timeseries with this prefix in the name. description (str): Filter timeseries taht contains this string in its description. include_metadata (bool): Decide if the metadata field should be returned or not. Defaults to False. asset_id (int): Get timeseries related to this asset. path (str): Get timeseries under this asset path branch. Keyword Arguments: limit (int): Number of results to return. api_key (str): Your api-key. project (str): Project name. autopaging (bool): Whether or not to automatically page through results. If set to true, limit will be disregarded. Defaults to False. Returns: v05.dto.TimeSeriesResponse: A data object containing the requested timeseries with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries".format(project) headers = {"api-key": api_key, "accept": "application/json"} params = { "q": prefix, "description": description, "includeMetadata": include_metadata, "assetId": asset_id, "path": path, "limit": kwargs.get("limit", 10000) if not kwargs.get("autopaging") else 10000, } time_series = [] res = _utils.get_request(url=url, headers=headers, params=params, cookies=config.get_cookies()) time_series.extend(res.json()["data"]["items"]) next_cursor = res.json()["data"].get("nextCursor") while next_cursor and kwargs.get("autopaging"): params["cursor"] = next_cursor res = _utils.get_request(url=url, headers=headers, params=params, cookies=config.get_cookies()) time_series.extend(res.json()["data"]["items"]) next_cursor = res.json()["data"].get("nextCursor") return TimeSeriesResponse({ "data": { "nextCursor": next_cursor, "previousCursor": res.json()["data"].get("previousCursor"), "items": time_series, } })
def _get_datapoints_frame_user_defined_limit(time_series, aggregates, granularity, start, end, limit, **kwargs): """Returns a DatapointsResponse object with the requested data. No paging or parallelizing is done. Args: time_series (str): The list of timeseries names to retrieve data for. Each timeseries can be either a string containing the ts name or a dictionary containing the ts name and a list of specific aggregate functions. aggregates (list): The list of aggregate functions you wish to apply to the data. Valid aggregate functions are: 'average/avg, max, min, count, sum, interpolation/int, stepinterpolation/step'. granularity (str): The granularity of the aggregate values. Valid entries are : 'day/d, hour/h, minute/m, second/s', or a multiple of these indicated by a number as a prefix e.g. '12hour'. start (Union[str, int, datetime]): Get datapoints after this time. Format is N[timeunit]-ago where timeunit is w,d,h,m,s. E.g. '2d-ago' will get everything that is up to 2 days old. Can also send time in ms since epoch or a datetime object which will be converted to ms since epoch UTC. end (Union[str, int, datetime]): Get datapoints up to this time. Same format as for start. limit (int): Max number of rows to retrieve. Max is 100,000. Keyword Arguments: api_key (str): Your api-key. Obligatory in this helper method. project (str): Project name. Obligatory in this helper method. Returns: v05.dto.DatapointsResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = kwargs.get("api_key"), kwargs.get("project") cookies = kwargs.get("cookies") url = config.get_base_url( api_version=0.5) + "/projects/{}/timeseries/dataframe".format(project) body = { "items": [{ "name": "{}".format(ts) } if isinstance(ts, str) else { "name": "{}".format(ts["name"]), "aggregates": ts.get("aggregates", []) } for ts in time_series], "aggregates": aggregates, "granularity": granularity, "start": start, "end": end, "limit": limit, } headers = { "api-key": api_key, "content-type": "application/json", "accept": "text/csv" } res = _utils.post_request(url=url, body=body, headers=headers, cookies=cookies) df = pd.read_csv( io.StringIO( res.content.decode( res.encoding if res.encoding else res.apparent_encoding))) return df