def get_asset_subtree(asset_id="", depth=None, **kwargs): """Returns assets with provided assetId. Args: asset_id (str): The asset id of the top asset to get. depth (int): Get subassets this many levels below the top asset. Keyword Arguments: limit (int): The maximum nuber of assets to be returned. cursor (str): Cursor to use for paging through results. api_key (str): Your api-key. project (str): Project name. Returns: v04.dto.AssetResponse: A data object containing the requested assets with several getter methods with different output formats. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url( api_version=0.4) + "/projects/{}/assets/{}".format(project, asset_id) params = { "depth": depth, "limit": kwargs.get("limit", constants.LIMIT), "cursor": kwargs.get("cursor") } headers = {"api-key": api_key, "accept": "application/json"} res = utils.get_request(url, params=params, headers=headers, cookies=config.get_cookies()) return AssetResponse(res.json())
def create_databases(database_names: list, api_key=None, project=None): """Creates databases in the Raw API and returns the created databases. Args: database_names (list): A list of databases to create. api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw/create".format(project) body = { "items": [{ "dbName": "{}".format(database_name) } for database_name in database_names] } headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def get_event(event_id, **kwargs): """Returns a EventResponse containing an event matching the id. Args: event_id (int): The event id. Keyword Arguments: api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.EventResponse: A data object containing the requested event. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url(api_version=0.5) + "/projects/{}/events/{}".format(project, event_id) headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} res = _utils.get_request(url, headers=headers, cookies=config.get_cookies()) return EventResponse(res.json())
def delete_tables(database_name: str = None, table_names: list = None, api_key=None, project=None): """Deletes databases in the Raw API. Args: database_name (str): The database to create tables in. table_names (list): The table names to create. api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw/{}/delete".format( project, database_name) body = { "items": [{ "tableName": "{}".format(table_name) } for table_name in table_names] } headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) return res.json()
def delete_databases(database_names: list, recursive: bool = False, api_key=None, project=None): """Deletes databases in the Raw API. Args: database_names (list): A list of databases to delete. api_key (str): Your api-key. project (str): Project name. Returns: An empty response. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw/delete".format(project) body = { "items": [{ "dbName": "{}".format(database_name) } for database_name in database_names] } params = {"recursive": recursive} headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.post_request(url=url, body=body, params=params, headers=headers, cookies=config.get_cookies()) return res.json()
def get_databases(limit: int = None, cursor: str = None, api_key=None, project=None): """Returns a RawObject containing a list of raw databases. Args: limit (int): A limit on the amount of results to return. cursor (str): A cursor can be provided to navigate through pages of results. api_key (str): Your api-key. project (str): Project name. Returns: v05.dto.RawResponse: A data object containing the requested data with several getter methods with different output formats. """ api_key, project = config.get_config_variables(api_key, project) url = config.get_base_url( api_version=0.5) + "/projects/{}/raw".format(project) params = {"limit": limit, "cursor": cursor} headers = { "api-key": api_key, "content-type": "*/*", "accept": "application/json" } res = _utils.get_request(url=url, params=params, headers=headers, cookies=config.get_cookies()) return RawResponse(res.json())
def get_events(type=None, sub_type=None, asset_id=None, **kwargs): """Returns an EventListReponse object containing events matching the query. Args: type (str): Type (class) of event, e.g. 'failure'. sub_type (str): Sub-type of event, e.g. 'electrical'. asset_id (str): Return events associated with this assetId. Keyword Arguments: sort (str): Sort descending or ascending. Default 'ASC'. cursor (str): Cursor to use for paging through results. limit (int): Return up to this many results. Maximum is 10000. Default is 25. has_description (bool): Return only events that have a textual description. Default null. False gives only those without description. min_start_time (string): Only return events from after this time. max_start_time (string): Only return events form before this time. api_key (str): Your api-key. project (str): Project name. autopaging (bool): Whether or not to automatically page through results. If set to true, limit will be disregarded. Defaults to False. Returns: v05.dto.EventListResponse: A data object containing the requested event. """ api_key, project = config.get_config_variables(kwargs.get("api_key"), kwargs.get("project")) url = config.get_base_url(api_version=0.5) + "/projects/{}/events".format(project) headers = {"api-key": api_key, "content-type": "application/json", "accept": "application/json"} if asset_id: params = { "assetId": asset_id, "sort": kwargs.get("sort"), "cursor": kwargs.get("cursor"), "limit": kwargs.get("limit", 25) if not kwargs.get("autopaging") else _constants.LIMIT_AGG, } else: params = { "type": type, "subtype": sub_type, "assetId": asset_id, "sort": kwargs.get("sort"), "cursor": kwargs.get("cursor"), "limit": kwargs.get("limit", 25) if not kwargs.get("autopaging") else _constants.LIMIT_AGG, "hasDescription": kwargs.get("has_description"), "minStartTime": kwargs.get("min_start_time"), "maxStartTime": kwargs.get("max_start_time"), } res = _utils.get_request(url, headers=headers, params=params, cookies=config.get_cookies()) events = [] events.extend(res.json()["data"]["items"]) next_cursor = res.json()["data"].get("nextCursor") while next_cursor and kwargs.get("autopaging"): params["cursor"] = next_cursor res = _utils.get_request(url=url, headers=headers, params=params, cookies=config.get_cookies()) events.extend(res.json()["data"]["items"]) next_cursor = res.json()["data"].get("nextCursor") return EventListResponse( { "data": { "nextCursor": next_cursor, "previousCursor": res.json()["data"].get("previousCursor"), "items": events, } } )
def _get_datapoints_frame_helper(tag_ids, aggregates, granularity, start=None, end=None, **kwargs): """Returns a pandas dataframe of datapoints for the given tag_ids all on the same timestamps. This method will automate paging for the user and return all data for the given time period. Args: tag_ids (list): The list of tag_ids to retrieve data for. Each tag_id can be either a string containing the tag_id or a dictionary containing the tag_id and a list of specific aggregate functions. aggregates (list): The list of aggregate functions you wish to apply to the data for which you have not specified an aggregate function. Valid aggregate functions are: 'average/avg, max, min, count, sum, interpolation/int, stepinterpolation/step'. granularity (str): The granularity of the aggregate values. Valid entries are : 'day/d, hour/h, minute/m, second/s', or a multiple of these indicated by a number as a prefix e.g. '12hour'. start (Union[str, int, datetime]): Get datapoints after this time. Format is N[timeunit]-ago where timeunit is w,d,h,m,s. E.g. '2d-ago' will get everything that is up to 2 days old. Can also send time in ms since epoch or a datetime object which will be converted to ms since epoch UTC. end (Union[str, int, datetime]): Get datapoints up to this time. Same format as for start. Keyword Arguments: api_key (str): Your api-key. project (str): Project name. Returns: pandas.DataFrame: A pandas dataframe containing the datapoints for the given tag_ids. The datapoints for all the tag_ids will all be on the same timestamps. Note: The ``tag_ids`` parameter can take a list of strings and/or dicts on the following formats:: Using strings: ['<tag_id1>', '<tag_id2>'] Using dicts: [{'tagId': '<tag_id1>', 'aggregates': ['<aggfunc1>', '<aggfunc2>']}, {'tagId': '<tag_id2>', 'aggregates': []}] Using both: ['<tagid1>', {'tagId': '<tag_id2>', 'aggregates': ['<aggfunc1>', '<aggfunc2>']}] """ api_key, project = kwargs.get("api_key"), kwargs.get("project") url = config.get_base_url( api_version=0.4) + "/projects/{}/timeseries/dataframe".format(project) num_aggregates = 0 for tag in tag_ids: if isinstance(tag, str) or tag.get("aggregates") is None: num_aggregates += len(aggregates) else: num_aggregates += len(tag["aggregates"]) per_tag_limit = int(_constants.LIMIT / num_aggregates) body = { "items": [{ "tagId": "{}".format(tag_id) } if isinstance(tag_id, str) else { "tagId": "{}".format(tag_id["tagId"]), "aggregates": tag_id.get("aggregates", []) } for tag_id in tag_ids], "aggregates": aggregates, "granularity": granularity, "start": start, "end": end, "limit": per_tag_limit, } headers = { "api-key": api_key, "content-type": "application/json", "accept": "text/csv" } dataframes = [] while (not dataframes or dataframes[-1].shape[0] == per_tag_limit) and body["end"] > body["start"]: res = _utils.post_request(url=url, body=body, headers=headers, cookies=config.get_cookies()) dataframes.append( pd.read_csv( io.StringIO( res.content.decode(res.encoding if res.encoding else res. apparent_encoding)))) if dataframes[-1].empty: warning = "An interval with no data has been requested ({}, {}).".format( body["start"], body["end"]) warnings.warn(warning) break latest_timestamp = int(dataframes[-1].iloc[-1, 0]) body["start"] = latest_timestamp + _utils.granularity_to_ms( granularity) return pd.concat(dataframes).reset_index(drop=True)