def grab_data(file_directory, toner_file_list): """ This function: 1. Goes through each file in the target directory `for file in os.listdir(file_directory)` 2. If the file is in toner_file_list `if file in toner_file_list` then it goes to step 3 but if it isn't it goes to the next file 3. If the file is in file_directory and toner_file_list then it joins the file and file_directory to access it`os.path.join(file_directory, file)` and assigns it to rrd_files 4. Goes through the rrd files and gets there value and filename 5. Puts the information in a list and returns it """ rrd_files = [ os.path.join(file_directory, file) for file in os.listdir(file_directory) if file in toner_file_list ] data_container = [] for rrd_file in rrd_files: # loop through each files in rrd_files # this gets the LAST value in the file (which is actually the current value) data = rrdtool.lastupdate(rrd_file) # gets the filename and extension but splits it so only the filename remains file_name = os.path.basename(rrd_file).split(".")[0] # adds the filename and value to data_container data_container.extend([file_name, data.values()]) return separate_data(data_container)
def lastupdate(self) -> RrdLastUpdate: args = [self.rrd_file] if self.daemon: args.extend(["--daemon", self.daemon]) data = rrdtool.lastupdate(*args) return RrdLastUpdate(date=data["date"], row=self.row._make(data["ds"].values()))
def get_temp(self): """Retrieve latest time-averaged temp reading""" vals = rrdtool.lastupdate("values.rrd") # maybe should use .fetch() to get a 5-min avg? temp = vals['ds']['temp'] logging.debug("Controller.get_temp(): %s %s", vals['date'], temp) return temp
def GET(self): lastUpdate = rrdtool.lastupdate(rrdPath) current = { "date": datetime.isoformat(lastUpdate["date"]) + "Z", "temp": round(lastUpdate["ds"]["temp"], 1), "humidity": round(lastUpdate["ds"]["humidity"], 0) } currentJSON = json.dumps(current) return currentJSON
def get_lastupdate_rrd(stname): lastupdate = rrdtool.lastupdate(config['rrdpath'] + "/" + stname + ".rrd") lastupdate_time = lastupdate["date"] lastupdate_dtime = datetime.utcnow() - lastupdate_time lastupdate_dtime -= timedelta(microseconds=lastupdate_dtime.microseconds) lastupdate_time_str = lastupdate_time.strftime("%Y-%m-%d %H:%M:%S UTC") hours = int(lastupdate_dtime.seconds // (60 * 60)) alert = False if hours > 1: alert = True return [lastupdate_time_str, lastupdate_dtime, alert]
def get_last_updated(sensor_name): ret = '' dbname = str(sensor_name).lower().replace(' ', '') db_file = '%s/%s.rrd' % (root_folder, dbname) try: r = str(rrdtool.lastupdate(db_file)) except Exception: app.logger.warn('Could not read last update: %s', sys.exc_info()[1]) else: d = r.replace('datetime.datetime', '') ret = ast.literal_eval('[' + d + ']') return ret
async def pingport(loop: asyncio.AbstractEventLoop, host: str, interval: int, argv): started = loop.time() elapsed = connected = 0 if host != IP_CACHE[host]: hostname = f"{host}:{argv.port} ({IP_CACHE[host]})" else: hostname = f"{host}:{argv.port}" with async_timeout.timeout(argv.timeout + 1): try: conn = asyncio.open_connection(IP_CACHE[host], argv.port, loop=loop) reader, writer = await asyncio.wait_for(conn, timeout=argv.timeout) except KeyboardInterrupt: print("Ok, boss, lets call it a day.") sys.exit(0) except Exception as e: if argv.verbose: print( f"Ping {hostname} failed... ({e.__class__.__name__}: {str(e)!r})" ) else: if argv.verbose: print(f"Ping {hostname} OK...") conn.close() connected = argv.timeout elapsed = loop.time() - started db = host2filename(argv, host) # Check previous pings if not connected: last_update = rrdtool.lastupdate(db) if 'ds' in last_update and last_update['ds']['connect']: sys.stderr.write(f"{hostname} is flappy\n") else: last = rrdtool.fetch(db, "MIN", "--start", str(-argv.interval * 6)) last_connections = [int(c) for c, t in last[2] if c is not None][-5:] if len(last_connections) == 5 and not any(last_connections): sys.stderr.write(f"{hostname} is down\n") # Record this attempt rrdtool.update( db, f"N:{int(connected)}:{elapsed}", ) return await asyncio.sleep(interval)
def __init__(self, name_host, description, path_to_database, file_name, start_point, end_point, type_command, height, width): self.name_host = name_host self.description = description self.path_to_database = path_to_database self.file_name = file_name self.file = self.path_to_database + "/" + self.file_name self.start_point = start_point self.end_point = end_point self.first = rrdtool.first(self.file) self.last = rrdtool.last(self.file) self.lastupdate = rrdtool.lastupdate(self.file) self.type_command = type_command self.height = height self.width = width self.list_ds = self.parse_ds self.list_menu = []
def get_last_update_rrd(rrdfile_name): """ get_last_update_rrd: get the last real value within the db (rrd) Args: rrdfile_name (string): it is the name of your file/rrd Returns: Returns: a dict with the value and a status_message last_value (dict): last updated value of rrd (is necessary for creating a png) """ last_value = [] get_last_update_status_msg = "" try: db_last_value = rrdtool.lastupdate(rrdfile_name) get_last_update_status_msg = f"success: last value of {rrdfile_name} was found" except Exception as e: get_last_update_status_msg = f"error: get_last_value_rrd({rrdfile_name}) was not possible: {sys.exc_info()[1]} \n{e}" get_last_value_msg = dict() get_last_value_msg['last_value'] = db_last_value get_last_value_msg['status'] = get_last_update_status_msg return get_last_value_msg
default=1, type=int, help="Timezone for which the data series has been collected") parser.add_argument("-o", "--output", help="JSON file to be written") parser.add_argument("rrdfile", nargs='*', default=RRDFILE, help="RRD file holding all time series") args = parser.parse_args() # if not set, use default output file if not args.output: args.output = DATAPATH + "/RTdata_lastupdate.json" result = rrdtool.lastupdate(args.rrdfile) # Result comes as dictionary with timestamp and the data as separate # dictionary. For simplicity reasons, we merge it into one single dictionary data = result['ds'] last = result['date'] data['timestamp'] = int(time.mktime(last.timetuple()) * 1000) output = open(args.output, 'w') output.write( json.dumps(data, indent=2, separators=(',', ':'), sort_keys=True, ignore_nan=True))
def GetLastAnalogDataFromDatabase(): result = rrdtool.lastupdate(config.database_path_analog) datasource = result['ds'] data = np.array([[datasource[name] for name in config.channel_names_analog]],dtype=np.float32) return data
def rrd2svg(fname_list, title, start_time=None, end_time=None): all_colors = palettable.tableau.Tableau_10.hex_colors # if len(fname_list) > len(color_list): # logger.warning(f"Skipping {title}, too many files ({fname_list})") # return "" # assert len(fname_list) <= len(color_list), fname_list with tempfile.NamedTemporaryFile() as fd: # pre-assemble names to adjust their lengths name_dict = generate_legend_names(fname_list) # assemble color list color_dict = {} for key, list_ in name_dict.items(): if len(all_colors[: len(list_)]) != len(list_): logger.error(f"Exhausted color list with {title}") color_dict[key] = all_colors[: len(list_)] del all_colors[: len(list_)] # parse input files def_list = itertools.chain.from_iterable( [ assemble_definitions(fname, color_dict[fname], name_dict[fname]) for fname in fname_list ] ) # determine timeframe time_spec = [] if start_time is not None: time_spec.extend(["--start", str(int(start_time.timestamp()))]) if end_time is not None: time_spec.extend(["--end", str(int(end_time.timestamp()))]) # get timestamp of latest data point timestamp_list = [ rrdtool.lastupdate(str(fname))["date"] for fname in fname_list ] if len(set(timestamp_list)) > 1: logger.warning( f"Single plot has multiple last timepoints: {timestamp_list}" ) last_update_ts = max(timestamp_list).strftime(r"%Y-%m-%d %H\:%M\:%S") # generate graph width, height, _ = rrdtool.graph( fd.name, "--imgformat", "PNG", # "SVG", "--title", title, "--width", "400", "--height", "100", "--watermark", f"monipy {__version__}", "--alt-autoscale", "--slope-mode", *time_spec, *def_list, "TEXTALIGN:right", rf"COMMENT:Last update\: {last_update_ts}", ) return b64encode(fd.read()).decode()
def handle_message(message): (time, temp, hum, pressure) = buttonsrrd.parse_rrd_record(rrdtool.lastupdate(rrd_path)) bot.send_message(message.from_user.id, tgdraws.print_day_weather(time, temp, hum, pressure))
if tday != 0: t += "%dD" % tday if thur != 0: t += "%dH" & thur if tmin != 0: t += "%dD" % tmin if tsec != 0: t += "%dH" & tsec if t == "": t = "0" return t price = {'market.ethusdt.detail': -1, 'market.zecusdt.detail': -1} ws = create_connection("wss://api.huobipro.com/ws") lastupdate = rrdtool.lastupdate("../miner_state.rrd") ds = lastupdate['ds'] miners = int(ds['miners']) query_num = 0 for i in range(miners): trade_key = get_trade_key(ds['m_type' + str(i)]) if trade_key in price.keys() and price[trade_key] == -1: price[trade_key] = 0 query_str = '{"req": "' + trade_key + '", "id": "id12"}' ws.send(query_str) query_num += 1 gpus = int(ds['gpus']) lu_date = lastupdate['date'] now_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') rrdtool.graph('hash.png', '--lazy', '--title', 'The Hash Power', '--width', '500', '--height', '240',
def _last_update(self): return rrdtool.lastupdate(self.file)
def __init__(self, db): self._db = db self._data = rrdtool.lastupdate(db) self._images = {}
def retrieve_lastupdate(self): last_values = rrdtool.lastupdate(self.file_path) return last_values