async def influx_fetch_limit_1(influx_clt: InfluxDBClient, measurement, is_near_now: bool, database=None): sql = f"SELECT * FROM {measurement} ORDER BY time {'DESC' if is_near_now else 'ASC'} LIMIT 1" rs: ResultSet = influx_clt.query(sql) points = list(rs.get_points(measurement)) if len(points) == 1: return points[-1]
def getDataFromDb(slurm_start, slurm_end, user, database): """ Connecting to Influx database """ start_date = encodeTime(slurm_start) end_date = encodeTime(slurm_end) client = InfluxDBClient(config.db_host, config.db_port, config.db_user, config.db_pass) qry_str = 'SELECT "value", "jobid" FROM ' + database + ' WHERE time > \'' + start_date + '\' AND time < \'' + end_date + '\' AND "user"=\'' + user + '\'' qry_res = client.query(qry_str) return qry_res
def update_data(client: InfluxDBClient, measurement: str, tag_info: dict, time: str, fields: dict, keep=True): if keep == False: raise NotImplementedError sql_query = (f"SELECT *::field FROM {measurement} " f"WHERE time='{time}' ") for key, value in tag_info.items(): sql_query = sql_query + f" and {key}='{value}'" # print(f"sql_query = {sql_query}") results = client.query(sql_query) # print(f"results: {results}") points = results.get_points() # print(f"points: {points}") size = 0 for _point in points: size += 1 if size == 1: point = _point if size == 0: print("sql query returns empty result set") raise Exception # print(f"point = {point}") ''' update point point sample: point = { 'time': '2021-01-23T15:58:30.750000Z', 'acceleration': 0.005427043900637734, 'gyroscope': 0.009184479105195894, 'latitude': 31.143920631980222, 'longitude': 121.36646195555223, 'velocity': 0.0 } ''' updated_point = { "measurement": measurement, "time": time, "tags": tag_info, } updated_fields = {} # update fields for old point for key, value in point.items(): if key == 'time': continue if key in fields.keys(): updated_fields[key] = fields[key] else: updated_fields[key] = value # add fields who does not exist in old point for key, value in fields.items(): if key not in point.keys(): updated_fields[key] = value updated_point["fields"] = updated_fields body = [updated_point] # print(body) client.write_points(body)
database=influxdb) groupings = {"other.other": []} for iserie in influxclient.get_list_series(influxdb): for ilist in iserie["tags"]: if (not ilist["key"].startswith("cpu.trace")): tags = split(split(ilist["key"], sep=",")[0], sep=".") if (len(tags) >= 2): if (tags[0] + "." + tags[1]) in groupings: groupings[tags[0] + "." + tags[1]].append(ilist["key"]) else: groupings[tags[0] + "." + tags[1]] = [ilist["key"]] else: groupings["other.other"].append(ilist["key"]) for igroupname in groupings: figs = [] for iserie in groupings[igroupname]: currname = split(iserie, sep=',')[0] currpid = split(split(iserie, sep="pid=")[1], sep=",")[0] icurve = SingleFigure(currname + "." + currpid) iset = influxclient.query("SELECT * FROM \"" + currname + "\" ORDER BY time") for i in iset.get_points(): if (i["pid"] == currpid): icurve.add( float( long( timeparser.parse(i["time"]).strftime("%s%f")[:-3]) - hbshift) / 1000., i["value"]) figs.append(icurve) plotGraph(igroupname, hbdata, figs)