def takesnap(path='/var/www/webcam/images/', filename='current.jpg', quality=75, width=None, timeout=2000): # import picamera import subprocess import os from iiutilities.datalib import timestringtoseconds from iiutilities.datalib import gettimestring # camera = picamera.PiCamera() imagepath = path + filename timestamp = gettimestring() timestamppath = imagepath + '.timestamp' time1 = gettimestring() if width: height = int(float(width) / 1.33333) subprocess.call(['raspistill','-q', str(quality), '--width', str(width), '--height', str(height), '-t', str(timeout), '-o', imagepath]) else: width = 2592 height = 1944 subprocess.call(['raspistill','-q', str(quality), '-t', str(timeout), '-o', imagepath]) with open(timestamppath,'w') as f: f.write(timestamp) f.close() # camera.capture(path + filename) time2 = gettimestring() elapsedtime = timestringtoseconds(time2) - timestringtoseconds(time1) try: imagesize = os.path.getsize(imagepath) except: imagesize = 0 return {'elapsedtime':elapsedtime, 'imagepath':imagepath, 'timestamp':timestamp, 'timestamppath': timestamppath, 'imageheight':height, 'imagewidth':width, 'imagesize':imagesize}
def copy_log_to_archive(log_name, **kwargs): settings = { 'archive_name': None, 'force_extension': True, 'extension':'.db', 'directory': dirs.archive } settings.update(kwargs) from iiutilities.datalib import gettimestring if not settings['archive_name']: settings['archive_name'] = log_name + gettimestring() + '.db' if settings['force_suffix'] and settings['archive_name'][-3:] != settings['suffix']: settings['archive_name'] += '.db' # Determine type by log name from iiutilities.datalib import gettimestring archive_db = dblib.sqliteDatabase(settings['directory'] + settings['archive_name']) logs_db = dblib.sqliteDatabase(dirs.dbs.log) existing_table = logs_db.read_table(log_name) existing_schema = logs_db.get_schema(log_name) archive_db.create_table('data', existing_schema, queue=True) archive_db.insert('data', existing_table, queue=True) archive_db.create_table('info', schema.data_items, queue=True) archive_db.insert('info', {'valuename': 'created', 'value': gettimestring()}, queue=True) archive_db.insert('info', {'valuename': 'name', 'value': log_name}, queue=True) archive_db.execute_queue()
def takesnap(path='/var/www/webcam/images/', filename='current.jpg', quality=75, width=None, timeout=2000): # import picamera import subprocess import os from iiutilities.datalib import timestringtoseconds from iiutilities.datalib import gettimestring # camera = picamera.PiCamera() imagepath = path + filename timestamp = gettimestring() timestamppath = imagepath + '.timestamp' time1 = gettimestring() if width: height = int(float(width) / 1.33333) subprocess.call([ 'raspistill', '-q', str(quality), '--width', str(width), '--height', str(height), '-t', str(timeout), '-o', imagepath ]) else: width = 2592 height = 1944 subprocess.call([ 'raspistill', '-q', str(quality), '-t', str(timeout), '-o', imagepath ]) with open(timestamppath, 'w') as f: f.write(timestamp) f.close() # camera.capture(path + filename) time2 = gettimestring() elapsedtime = timestringtoseconds(time2) - timestringtoseconds(time1) try: imagesize = os.path.getsize(imagepath) except: imagesize = 0 return { 'elapsedtime': elapsedtime, 'imagepath': imagepath, 'timestamp': timestamp, 'timestamppath': timestamppath, 'imageheight': height, 'imagewidth': width, 'imagesize': imagesize }
def readU6Counter(counternumber=0): """ We need to see what state the counter IO is in. If it's already a counter, we just read it. If it is not yet a counter, we instantiate it and return count of 0 Also, this is not safe when other timer and counter inputs are used. For now, this is just one singular counter """ import u6 from iiutilities.datalib import gettimestring device = u6.U6() result = {'readtime': gettimestring()} # When passed no argument, just reads. currentconfig = device.configIO() try: device.getCalibrationData() if currentconfig['Counter0Enabled'] or currentconfig[ 'NumberTimersEnabled'] != 1: device.configIO(EnableCounter0=True, NumberTimersEnabled=1) device.getFeedback(u6.Timer0Config(TimerMode=6, Value=1)) result['value'] = device.getFeedback(u6.Timer0())[0] except: pass # Error handling .... device.close() return result
def readU6Counter(counternumber=0): """ We need to see what state the counter IO is in. If it's already a counter, we just read it. If it is not yet a counter, we instantiate it and return count of 0 Also, this is not safe when other timer and counter inputs are used. For now, this is just one singular counter """ import u6 from iiutilities.datalib import gettimestring device = u6.U6() result = {'readtime':gettimestring()} # When passed no argument, just reads. currentconfig = device.configIO() try: device.getCalibrationData() if currentconfig['Counter0Enabled'] or currentconfig['NumberTimersEnabled'] != 1: device.configIO(EnableCounter0=True, NumberTimersEnabled=1) device.getFeedback( u6.Timer0Config(TimerMode = 6, Value = 1) ) result['value'] = device.getFeedback(u6.Timer0())[0] except: pass # Error handling .... device.close() return result
def rebuildstockdb(tablelist=None): from iiutilities.dblib import sqlitemultquery from inventory.inventorylib import sysvars from iiutilities import datalib dbpath = sysvars.dirs.dbs.stock thetime = datalib.gettimestring() if not tablelist: tablelist = ['stock'] # Create databases entries or leave them empty? addentries = True querylist = [] runquery = False ### Stock table = 'stock' if table in tablelist: runquery = True querylist.append('drop table if exists ' + table) querylist.append( "create table " + table + " (partid text primary key, status text default 'active'," + "description text default '', qtyunit text default 'each', qtystock real default 0, qtyonorder real default 0," + "qtyreserved real default 0, qtyavailable real default 0, qtystatus text default '', cost real default 0," + "stockcost real default 0, onordercost real default 0, totalcost real default 0," + "stockprice real default 0, onorderprice real default 0, totalprice real default 0," + "costqty real default 1, costqtyunit text default 'each', supplier text default '', " + "supplierpart text default '', manufacturer text default '', manufacturerpart text default '', " + "notes text default '', partdata text default '', datasheet text default '', inuse integer default 1, datecreated text default '', " + "createdby text default '', inventory integer text default 'std', minqty text default 0, type text default parts," + "marginmethod text default 'type', margin real default 0, price real default 0 )") if addentries: querylist.append("insert into " + table + " values ('A001', 'active', 'WIEGMANN 12x12x6', 'each', 1, 1, 1, \ 0, '', 149.97, 0,0,0,0,0,0,1, 'each', \ 'Cascade Controls', 'N412121206C', 'Wiegmann', 'N412121206C', '', '', '', 1, '" + thetime + "', 'CCR', \ 'std', 0, 'parts', 'type', 0, 0)") querylist.append("insert into " + table + " values ('A002', 'active', 'WIEGMANN 16x16x6', 'each', 1, 1, 2, \ 0, '', 172, 0,0,0,0,0,0,1, 'each', \ 'Cascade Controls', 'N412121206C', 'Wiegmann', 'N412121206C', '', '', '', 1, '" + thetime + "', 'CCR', \ 'std', 0, 'parts', 'type', 0, 0)") querylist.append("insert into " + table + " values ('A003', 'active', 'WIEGMANN 20x20x6', 'each', 1, 1, 2, \ 0, '', 204, 0,0,0,0,0,0,1, 'each', \ 'Cascade Controls', 'N412121206C', 'Wiegmann', 'N412121206C', '', '', '', 1, '" + thetime + "', 'CCR', 'std', 0, 'parts', 'type', 0, 0)") querylist.append("insert into " + table + " values ('A004', 'active', 'WIEGMANN 20x24x6', 'each', 1, 1, 2,\ 0,'', 233, 0,0,0,0,0,0,1, 'each', \ 'Cascade Controls', 'N412121206C', 'Wiegmann', 'N412121206C', '', '', '', 1, '" + thetime + "', 'CCR', 'std', 0, 'parts', 'type', 0, 0)") querylist.append("insert into " + table + " values ('L001', 'active', 'Shop Labor', 'hour', 1, 1, 2,\ 0, '', 42, 0,0,0,0,0,0,1, 'hour', \ '', '', '', '', '', '', '', 1, '" + thetime + "', 'CCR', 'std', 0, 'labor', 'type', 0, 0)") print(querylist) print(dbpath) sqlitemultquery(dbpath, querylist)
def copy_log_to_archive(log_name, **kwargs): settings = { 'archive_name': None, 'force_extension': True, 'extension': '.db', 'directory': dirs.archive } settings.update(kwargs) from iiutilities.datalib import gettimestring if not settings['archive_name']: settings['archive_name'] = log_name + gettimestring() + '.db' if settings['force_suffix'] and settings['archive_name'][-3:] != settings[ 'suffix']: settings['archive_name'] += '.db' # Determine type by log name from iiutilities.datalib import gettimestring archive_db = dblib.sqliteDatabase(settings['directory'] + settings['archive_name']) logs_db = dblib.sqliteDatabase(dirs.dbs.log) existing_table = logs_db.read_table(log_name) existing_schema = logs_db.get_schema(log_name) archive_db.create_table('data', existing_schema, queue=True) archive_db.insert('data', existing_table, queue=True) archive_db.create_table('info', schema.data_items, queue=True) archive_db.insert('info', { 'valuename': 'created', 'value': gettimestring() }, queue=True) archive_db.insert('info', { 'valuename': 'name', 'value': log_name }, queue=True) archive_db.execute_queue()
def addversionentry(database_path, tablename, entrydict): import iiutilities.dblib as dblib from iiutilities.datalib import gettimestring versions_db = dblib.sqliteDatabase(database_path) tablenames = versions_db.get_table_names() if not tablename in tablenames: versions_schema = dblib.sqliteTableSchema([ {'name': 'item', 'primary': True}, {'name': 'version'}, {'name': 'versiontime'}, {'name': 'updatetime'} ]) versions_db.create_table(tablename, versions_schema) insert = {'item':entrydict['repo'], 'version':entrydict['headcommithexsha'], 'versiontime':gettimestring(entrydict['headcommitdate']), 'updatetime': gettimestring() } versions_db.insert(tablename, insert)
def runpicontrol(runonce=False): from time import sleep from iiutilities import utility from cupid import pilib from iiutilities import datalib control_db = pilib.cupidDatabase(pilib.dirs.dbs.control) log_db = pilib.cupidDatabase(pilib.dirs.dbs.log) system_db = pilib.cupidDatabase(pilib.dirs.dbs.system) systemstatus = system_db.read_table_row('systemstatus')[0] while systemstatus['picontrolenabled']: utility.log(pilib.dirs.logs.system, 'Running picontrol', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.control, 'Running picontrol', 3, pilib.loglevels.control) # Set poll date. While intuitively we might want to set this # after the poll is complete, if we error below, we will know # from this stamp when it barfed. This is arguably more valuable # then 'last time we didn't barf' system_db.set_single_value('systemstatus', 'lastpicontrolpoll', datalib.gettimestring()) channels = control_db.read_table('channels') # Cycle through channels and set action based on setpoint # and algorithm if set to auto mode log_tablenames = log_db.get_table_names() for channel in channels: process_channel(channel=channel) # We do this system status again to refresh settings systemstatus = system_db.read_table_row('systemstatus')[0] # Note that these are also processed in cupiddaemon to catch things like whether this script is running # actions.processactions() # Wait for delay time #print('sleeping') # spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators') if runonce: break utility.log(pilib.dirs.logs.system, 'Picontrol Sleeping for ' + str(systemstatus['picontrolfreq']), 2, pilib.loglevels.system) utility.log(pilib.dirs.logs.control, 'Picontrol Sleeping for ' + str(systemstatus['picontrolfreq']), 2, pilib.loglevels.system) print('sleeping') sleep(systemstatus['picontrolfreq']) print('done sleeping') utility.log(pilib.dirs.logs.system, 'picontrol not enabled. exiting.', 1, pilib.loglevels.system)
def handle_unit_tests(**kwargs): settings = { 'notifications':[] } settings.update(kwargs) from iiutilities import utility import cupidunittests import pilib from iiutilities import datalib system_database = pilib.dbs.system notifications_database = pilib.dbs.notifications unittestresults = cupidunittests.runalltests() # print('** Unit TEST RESULTS ** ') # print(unittestresults['totalerrorcount'],unittestresults['totalfailurecount']) if unittestresults['totalerrorcount'] > 0 or unittestresults['totalfailurecount'] > 0: unitnotify = next((item for item in settings['notifications'] if item['item'] == 'unittests' and int(item['enabled'])), None) if unitnotify: options = datalib.parseoptions(unitnotify['options']) if 'type' in options: if options['type'] == 'email' and 'email' in options: currenttime = datalib.gettimestring() lastnotificationtime = unitnotify['lastnotification'] # default frequency = 600 if 'frequency' in options: try: frequency = float(options['frequency']) except: pass elapsedtime = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds( lastnotificationtime) # print(elapsedtime,frequency) if elapsedtime > frequency: # Queue a message indicating we had to restart the systemstatus daemon message = 'CuPID has failed unittests. Details follow:\r\n\r\n' message += unittestresults['stringresult'].replace('\'', '"') # message += '\r\n\r\n' # message += subject = 'CuPID : ' + hostname + ' : unittests' notification_database.insert('queuednotifications', {'type': 'email', 'message': message, 'options': 'email:' + options['email'] + ',subject:' + subject, 'queuedtime': currenttime}) system_database.set_single_value('notifications', 'lastnotification', currenttime, condition="item='unittests'")
def test_post_client_data(): from iiutilities.datalib import gettimestring from decimal import Decimal data = { 'test_value': { 'time':gettimestring(), 'value':Decimal(99) } } response = post_client_data(**{'post_data':data}) return response
def log(logfile, message, reqloglevel=1, currloglevel=1): # Allow passing None for logfile if logfile: from iiutilities.datalib import gettimestring if currloglevel >= reqloglevel: logfile = open(logfile, 'a') logfile.writelines([gettimestring() + ' : ' + message + '\n']) logfile.close() if currloglevel >= 9: print(message) else: print('NO LOG FILE')
def log(logfile, message, reqloglevel=1, currloglevel=1): # Allow passing None for logfile if logfile: from iiutilities.datalib import gettimestring if currloglevel >= reqloglevel: logfile = open(logfile, 'a') logfile.writelines([gettimestring() + ' : ' + message + '\n']) logfile.close() if currloglevel >= 9: print(message) else: print('NO LOG FILE')
def lograwmessages(message): from cupid.pilib import dirs from iiutilities.datalib import gettimestring from iiutilities.dblib import sqliteinsertsingle, size_sqlite_table # try: strmessage = str(message).replace('\x00', '').strip() if strmessage not in ['END RECEIVED', 'BEGIN RECEIVED']: # print('publishing message: ' + strmessage) # print(repr(strmessage)) sqliteinsertsingle(dirs.dbs.motes, 'read', [gettimestring(), strmessage]) size_sqlite_table(dirs.dbs.motes, 'read', size=1000)
def updateiwstatus(): from iiutilities.netfun import getiwstatus from iiutilities.datalib import gettimestring from cupid.pilib import dirs from iiutilities.dblib import insertstringdicttablelist iwdict = getiwstatus() iwdict['updatetime'] = gettimestring() # put into database insertstringdicttablelist(dirs.dbs.system, 'iwstatus', [iwdict], droptable=True)
def updatehardwareinfo(databasename='systemdb'): from subprocess import check_output from cupid import pilib from iiutilities import datalib from iiutilities import dblib import json data = check_output(['cat', '/proc/cpuinfo']).decode('utf-8') items = data.split('\n') hw_dict = {} for item in items: try: hw_dict[item.split(':')[0].strip()] = item.split(':')[1].strip() except: pass # print('HW DICT') # print(hw_dict) dictstring = json.dumps(hw_dict) dbpath = None try: dbpath = pilib.dbnametopath(databasename) # print(dbpath) except: pass if dbpath: time = datalib.gettimestring() dblib.sqliteinsertsingle( dbpath, 'versions', ['cpuinfo', dictstring, time, ''], ['item', 'version', 'updatetime', 'versiontime'], ) if 'Revision' in hw_dict and dbpath: versiondetail = piversiontoversionname(hw_dict['Revision']) dblib.sqliteinsertsingle( dbpath, 'versions', ['versionname', versiondetail['versionname'], time, ''], ['item', 'version', 'updatetime', 'versiontime'], ) dblib.sqliteinsertsingle( dbpath, 'versions', ['memory', versiondetail['memory'], time, ''], ['item', 'version', 'updatetime', 'versiontime'], ) return dictstring
def updateiwstatus(): from iiutilities.netfun import getiwstatus from iiutilities.datalib import gettimestring from cupid.pilib import dirs from iiutilities.dblib import insertstringdicttablelist iwdict = getiwstatus() iwdict['updatetime'] = gettimestring() # put into database insertstringdicttablelist(dirs.dbs.system, 'iwstatus', [iwdict], droptable=True)
def addversionentry(database_path, tablename, entrydict): import iiutilities.dblib as dblib from iiutilities.datalib import gettimestring versions_db = dblib.sqliteDatabase(database_path) tablenames = versions_db.get_table_names() if not tablename in tablenames: versions_schema = dblib.sqliteTableSchema([{ 'name': 'item', 'primary': True }, { 'name': 'version' }, { 'name': 'versiontime' }, { 'name': 'updatetime' }]) versions_db.create_table(tablename, versions_schema) insert = { 'item': entrydict['repo'], 'version': entrydict['headcommithexsha'], 'versiontime': gettimestring(entrydict['headcommitdate']), 'updatetime': gettimestring() } versions_db.insert(tablename, insert)
def updatehamachistatus(): from pilib import dirs from iiutilities.dblib import insertstringdicttablelist from iiutilities import netfun from iiutilities.datalib import gettimestring try: hamdicts = netfun.gethamachidata() except: pass else: for index, dict in enumerate(hamdicts): hamdicts[index]['updatetime'] = gettimestring() # put into database insertstringdicttablelist(dirs.dbs.system, 'hamachistatus', hamdicts, droptable=True)
def runsendhandler(ser): from iiutilities import dblib, datalib from iiutilities import utility # print('looking for message to send') motes_db = pilib.dbs.motes try: last_queued_message = motes_db.get_first_time_row( 'queued', 'queuedtime') except: import traceback utility.log( pilib.dirs.logs.serial, 'Error getting queued message : {}'.format(traceback.format_exc()), 1, pilib.loglevels.serial) else: try: utility.log( pilib.dirs.logs.serial, 'Sending message : {}'.format(last_queued_message['message']), 3, pilib.loglevels.serial) ser.write(last_queued_message['message'].encode()) # sendserialmessage(ser, lastqueuedmessage['message']) except: utility.log(pilib.dirs.logs.serial, 'Error sending message', 1, 1) else: utility.log(pilib.dirs.logs.serial, 'Success sending message', 1, 1) conditionnames = ['queuedtime', 'message'] conditionvalues = [ last_queued_message['queuedtime'], last_queued_message['message'] ] delquery = dblib.makedeletesinglevaluequery( 'queued', { 'conditionnames': conditionnames, 'conditionvalues': conditionvalues }) dblib.sqlitequery(pilib.dirs.dbs.motes, delquery) dblib.sqliteinsertsingle(pilib.dirs.dbs.motes, 'sent', [ last_queued_message['queuedtime'], datalib.gettimestring(), last_queued_message['message'] ]) dblib.size_sqlite_table(pilib.dirs.dbs.motes, 'sent', 1000) return
def processnotification(notification): from iiutilities import datalib from iiutilities import utility from iiutilities.netfun import pingstatus senttime = datalib.gettimestring() result = {'status': 1, 'senttime': senttime} if notification['type'] == 'email': # Check to make sure we're online. pingresult = pingstatus() if not pingresult['status']: utility.log(dirs.logs.notifications, 'WAN access is ok, so processing notification') options = datalib.parseoptions(notification['options']) message = notification['message'] if 'subject' in options: subject = options['subject'] else: subject = 'CuPID Notification Email' message += '\r\n\r\n' message += 'Message queued:\t ' + notification[ 'queuedtime'] + '\r\n' message += 'Message sent:\t ' + senttime + '\r\n' if 'email' in options: try: email = options['email'] actionmail = utility.gmail(message=message, subject=subject, recipient=email) actionmail.send() except: pass else: result['status'] = 0 else: utility.log( dirs.logs.notifications, 'WAN access does not appear to be ok. Status is: ' + str(pingresult['status'])) return result
def updatehamachistatus(): from pilib import dirs from iiutilities.dblib import insertstringdicttablelist from iiutilities import netfun from iiutilities.datalib import gettimestring try: hamdicts = netfun.gethamachidata() except: pass else: for index, dict in enumerate(hamdicts): hamdicts[index]['updatetime'] = gettimestring() # put into database insertstringdicttablelist(dirs.dbs.system, 'hamachistatus', hamdicts, droptable=True)
def readU6Analog(positiveChannel, resolutionIndex=0, gainIndex=0, settlingFactor=0, differential=False): """ Name: U6.getAIN(positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False) Args: positiveChannel, the positive channel to read from resolutionIndex, the resolution index. 0 = default, 1-8 = high-speed ADC, 9-12 = high-res ADC (U6-Pro only). gainIndex, the gain index. 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange. settlingFactor, the settling factor. 0=Auto, 1=20us, 2=50us, 3=100us, 4=200us, 5=500us, 6=1ms, 7=2ms, 8=5ms, 9=10ms. differential, set to True for differential reading. Negative channel is positiveChannel+1. Desc: Reads an AIN and applies the calibration constants to it. >>> myU6.getAIN(14) 299.87723471224308 strRanges = ["+/- 10", "+/- 1", "+/- 0.1", "+/- 0.01"] """ from iiutilities.datalib import gettimestring import u6 result = {} device = u6.U6() result['readtime'] = gettimestring() try: device.getCalibrationData() result['value'] = device.getAIN(positiveChannel, resolutionIndex, gainIndex, settlingFactor, differential) except: #handle stuff ... pass device.close() return result
def get_and_log_netstats(path=netstats_dbpath, **kwargs): import speedtest from iiutilities import dblib from iiutilities.datalib import gettimestring results = speedtest.call_tester(**kwargs) the_database = dblib.sqliteDatabase(path) download = round(results.download / 1000000, 2) upload = round(results.upload / 1000000, 2) if 'wired' not in the_database.get_table_names(): the_database.create_table('wired', netspeed_schema) the_database.insert( 'wired', { 'time': gettimestring(), 'download': download, 'upload': upload, 'ping': round(results.ping, 2) })
def processnotification(notification): from iiutilities import datalib from iiutilities import utility from iiutilities.netfun import pingstatus senttime = datalib.gettimestring() result = {'status':1, 'senttime':senttime} if notification['type'] == 'email': # Check to make sure we're online. pingresult = pingstatus() if not pingresult['status']: utility.log(dirs.logs.notifications, 'WAN access is ok, so processing notification') options = datalib.parseoptions(notification['options']) message = notification['message'] if 'subject' in options: subject = options['subject'] else: subject = 'CuPID Notification Email' message += '\r\n\r\n' message += 'Message queued:\t ' + notification['queuedtime'] + '\r\n' message += 'Message sent:\t ' + senttime + '\r\n' if 'email' in options: try: email = options['email'] actionmail = utility.gmail(message=message, subject=subject, recipient=email) actionmail.send() except: pass else: result['status'] = 0 else: utility.log(dirs.logs.notifications, 'WAN access does not appear to be ok. Status is: ' + str(pingresult['status'])) return result
def readU6Analog(positiveChannel, resolutionIndex=0, gainIndex=0, settlingFactor=0, differential=False): """ Name: U6.getAIN(positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False) Args: positiveChannel, the positive channel to read from resolutionIndex, the resolution index. 0 = default, 1-8 = high-speed ADC, 9-12 = high-res ADC (U6-Pro only). gainIndex, the gain index. 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange. settlingFactor, the settling factor. 0=Auto, 1=20us, 2=50us, 3=100us, 4=200us, 5=500us, 6=1ms, 7=2ms, 8=5ms, 9=10ms. differential, set to True for differential reading. Negative channel is positiveChannel+1. Desc: Reads an AIN and applies the calibration constants to it. >>> myU6.getAIN(14) 299.87723471224308 strRanges = ["+/- 10", "+/- 1", "+/- 0.1", "+/- 0.01"] """ from iiutilities.datalib import gettimestring import u6 result = {} device = u6.U6() result['readtime'] = gettimestring() try: device.getCalibrationData() result['value'] = device.getAIN(positiveChannel, resolutionIndex, gainIndex, settlingFactor, differential) except: #handle stuff ... pass device.close() return result
def updatehardwareinfo(databasename='systemdb'): from subprocess import check_output from cupid import pilib from iiutilities import datalib from iiutilities import dblib import json data = check_output(['cat','/proc/cpuinfo']).decode('utf-8') items = data.split('\n') hw_dict = {} for item in items: try: hw_dict[item.split(':')[0].strip()] = item.split(':')[1].strip() except: pass # print('HW DICT') # print(hw_dict) dictstring = json.dumps(hw_dict) dbpath = None try: dbpath = pilib.dbnametopath(databasename) # print(dbpath) except: pass if dbpath: time = datalib.gettimestring() dblib.sqliteinsertsingle(dbpath,'versions',['cpuinfo', dictstring, time, ''],['item', 'version', 'updatetime', 'versiontime'],) if 'Revision' in hw_dict and dbpath: versiondetail = piversiontoversionname(hw_dict['Revision']) dblib.sqliteinsertsingle(dbpath, 'versions', ['versionname', versiondetail['versionname'], time, ''],['item', 'version', 'updatetime', 'versiontime'],) dblib.sqliteinsertsingle(dbpath, 'versions', ['memory', versiondetail['memory'], time, ''],['item', 'version', 'updatetime', 'versiontime'],) return dictstring
def readU6(registers): import u6 from iiutilities.datalib import gettimestring device = u6.U6() resultslist = [] for register in registers: try: value = device.readRegister(register) status = 0 except: value = '' status = 1 resultslist.append({ 'register': register, 'value': value, 'status': status, 'time': gettimestring() }) print(resultslist) return resultslist
def recordspidata(database, valuedict, execute=False): from iiutilities import dblib, datalib # This is incomplete and hardcoded partially querylist = [] for key, value in valuedict.items(): querylist.append(dblib.makesqliteinsert('inputs', valuelist=[key, 'SPI1', 'TC', '1', 'SPITC1', value, 'F', datalib.gettimestring(), 1, '',''])) querylist.append(dblib.makesqliteinsert('ioinfo', valuelist=[key, key, ''])) if execute: dblib.sqlitemultquery(database, querylist) return querylist
def watchdognetstatus(allnetstatus={}): from iiutilities import utility from cupid import pilib from iiutilities import datalib from cupid import netconfig from iiutilities import dblib """ And now comes the checking of configuration specific statuses and restarting them if enabled and necessary We are getting updated status information for each interface. We have configuration info for interfaces. We compare the two based on mode and decide if we need to run a netconfig on each interface. We do this by running through, interface by interface on netconfigstatus, and comparing. We then add the name to interfaces we need to reconfig and pass to netconfig(). We ignore interfaces we don't have a config for so we ignore things like hamachi interfaces, loopback, GSM, etc. """ if 'ifaces_config' not in allnetstatus or 'ifaces_status' not in allnetstatus: allnetstatus = update_net_status() netconfig_data = allnetstatus['netconfig_data'] netstatus = allnetstatus['netstatusdict'] ifaces_config = allnetstatus['ifaces_config'] ifaces_status = allnetstatus['ifaces_status'] statusmsg = '' currenttime = datalib.gettimestring() reconfig_interfaces = [] for iface_name, iface_status in ifaces_status.items(): utility.log(pilib.dirs.logs.network, 'Checking status of interface {}. '.format(iface_name, 3, pilib.loglevels.network)) if iface_status['status'] == 'fail': reconfig_interfaces.append(iface_name) utility.log(pilib.dirs.logs.network, 'Interface has fail status. Setting reconfig for {}. '.format(iface_name, 1, pilib.loglevels.network)) # Now do some sleuthing if we are being stringent about WAN access. Have to be careful about this if we are on a # private network run_WAN_reconfig = False if netconfig_data['requireWANaccess']: utility.log(pilib.dirs.logs.network, 'Requiring WAN access. Checking status and times. ', 3, pilib.loglevels.network) # print('NETSTATUS') # print(netstatus) if not netstatus['WANaccess']: utility.log(pilib.dirs.logs.network, 'No WANaccess. Checking offline time. ', 2, pilib.loglevels.network) try: offlinetime = netstatus['offlinetime'] except: # print('netstatus ERROR') utility.log(pilib.dirs.logs.network, 'Error getting offlinetime. ', 2, pilib.loglevels.network) offlineperiod = datalib.timestringtoseconds(datalib.gettimestring()) - datalib.timestringtoseconds(offlinetime) utility.log(pilib.dirs.logs.network, 'We have been offline for ' + str(offlineperiod)) # When did we last restart the network config? Is it time to again? timesincelastnetrestart = datalib.timestringtoseconds( datalib.gettimestring()) - datalib.timestringtoseconds(netstatus['lastnetreconfig']) utility.log(pilib.dirs.logs.network, 'It has been ' + str(timesincelastnetrestart) + ' seconds since we last restarted the network configuration. ') # Require that offline time is greater than WANretrytime if timesincelastnetrestart > int(netconfig_data['WANretrytime']) and offlineperiod > int(netconfig_data['WANretrytime']): utility.log(pilib.dirs.logs.network, 'We are not online, and it has been long enough, exceeding retry time of ' + str(int(netconfig_data['WANretrytime']))) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'lastnetreconfig', datalib.gettimestring()) # We do reset the WAN offline time in the reboot sequence, hwoever. restarts = int(dblib.getsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'WANaccessrestarts')) restarts += 1 dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'WANaccessrestarts', restarts) utility.log(pilib.dirs.logs.network, 'Going to run netconfig to correct WAN access.') run_WAN_reconfig = True else: utility.log(pilib.dirs.logs.network, 'Not yet time to run netconfig to correct WAN access. Retry time set at ' + str(netconfig_data['WANretrytime'])) else: utility.log(pilib.dirs.logs.network, 'WANAccess is fine. ') if run_WAN_reconfig: # Set bad status in netstatus dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstate', 0) # Set ok time to '' to trigger rewrite next time status is ok lastoktime = dblib.getsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstateoktime') if not lastoktime: dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstateoktime', datalib.gettimestring()) else: if netconfig_data['rebootonfail']: offlinetime = datalib.timestringtoseconds(datalib.gettimestring()) - datalib.timestringtoseconds(lastoktime) if offlinetime > int(netconfig_data['rebootonfailperiod']): # Set to '' so we get another full fail period before rebooting again dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstateoktime', '') # Same thing for WAN offline time dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'offlinetime', '') bootcounts = int(dblib.getsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netrebootcounter')) bootcounts += 1 dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netrebootcounter', str(bootcounts)) # Set system flag to reboot utility.log(pilib.dirs.logs.system, 'REBOOTING to try to fix network', 0, pilib.loglevels.system) dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemflags', 'reboot', 1) # Figure out which interfaces to restart to fix WAN issues for interface_name, interface in ifaces_config.items(): utility.log(pilib.dirs.logs.network, 'Adding interface {} to reconfig list'.format(interface_name), 1, pilib.loglevels.network) if interface['mode'] in ['status', 'station', 'dhcp']: reconfig_interfaces.append(interface_name) else: # Clear bad status in netstatus and set netoktime dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'statusmsg', 'Mode appears to be set.') dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstate', 1) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'netstateoktime', datalib.gettimestring()) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'statusmsg', statusmsg) if reconfig_interfaces: utility.log(pilib.dirs.logs.network, 'Running netreconfig on list: {}'.format(reconfig_interfaces), 1, pilib.loglevels.network) netconfig.runconfig(ifaces_to_configure=reconfig_interfaces, config=ifaces_config, config_all=False)
def application(environ, start_response): import cgi import json import os,sys,inspect # Set top folder to allow import of modules top_folder = os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])))[0] if top_folder not in sys.path: sys.path.insert(0,top_folder) from iiutilities.datalib import gettimestring from iiutilities.dblib import sqlitequery # post_env = environ.copy() # post_env['QUERY_STRING'] = '' # post = cgi.FieldStorage( # fp=environ['wsgi.input'], # environ=post_env, # keep_blank_values=True # ) # # formname=post.getvalue('name') # # post={} # for k in post.keys(): # post[k] = post.getvalue(k) try: request_body_size = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: request_body_size = 0 request_body = environ['wsgi.input'].read(request_body_size) post = json.loads(request_body.decode('utf-8')) output = {} output['message'] = '' status = '200 OK' if 'sessionid' in post.keys() and 'event' in post.keys() and 'realIP' in post.keys() and 'apparentIP' in post.keys(): # sessionid contains the session id sessionid = post.getvalue('sessionid') if post.getvalue('event') == 'access': accesstime = gettimestring() username = post.getvalue('username') apparentIP = post.getvalue('apparentIP') realIP = post.getvalue('realIP') sqlitequery('/var/www/data/authlog.db',"insert into sessionlog values ( \'" + username + "\',\'" + sessionid + "\',\'" + accesstime + "\'," + "\'access\' ,\'" + apparentIP + "\',\'" + realIP + "\' )") output = "Output processed for " + realIP + " & " + apparentIP else: output = 'error: no session field sent' response_headers = [('Content-type', 'text/plain'), ('Content-Length',str(len(output)))] start_response(status,response_headers) return [output]
def application(environ, start_response): import cgi import json import os, sys, inspect # Set top folder to allow import of modules top_folder = os.path.split( os.path.realpath( os.path.abspath( os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0] if top_folder not in sys.path: sys.path.insert(0, top_folder) from cupid import pilib, controllib from iiutilities import dblib, utility, datalib # post_env = environ.copy() # post_env['QUERY_STRING'] = '' # post = cgi.FieldStorage( # fp=environ['wsgi.input'], # environ=post_env, # keep_blank_values=True # ) # # formname=post.getvalue('name') # output = {} # output['message'] = 'Output Message: ' # for k in post.keys(): # d[k] = post.getvalue(k) try: request_body_size = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: request_body_size = 0 request_body = environ['wsgi.input'].read(request_body_size) post = json.loads(request_body.decode('utf-8')) output = {} output['message'] = '' status = '200 OK' wsgiauth = True authverified = False if wsgiauth: # Verfiy that session login information is legit: hashed password, with salt and username, match # hash stored in database. import hashlib safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.users) if 'username' in post and post['username']: output['message'] += 'Session user is ' + post['username'] + '. ' else: output['message'] += 'No session user found. ' post['username'] = '' if post['username']: try: condition = "name='" + post['username'] + "'" user_data = safe_database.read_table_row( 'users', condition=condition)[0] except: output[ 'message'] += 'Error in user sqlite query for session user "' + post[ 'username'] + '". ' output[ 'message'] += 'Condition: ' + condition + '. Path: ' + pilib.dirs.dbs.safe user_data = {'accesskeywords': 'demo', 'admin': False} else: # Get session hpass to verify credentials hashedpassword = post['hpass'] hname = hashlib.new('sha1') hname.update(post['username']) hashedname = hname.hexdigest() hentry = hashlib.new('md5') hentry.update(hashedname + pilib.salt + hashedpassword) hashedentry = hentry.hexdigest() if hashedentry == user_data['password']: # successful auth output['message'] += 'Password verified. ' authverified = True # TODO: implement usermeta else: # Demo status authverified = True user_data = {'authlevel': 0} else: output['message'] += 'WSGI authorization not enabled. ' if authverified or not wsgiauth: output['authorized'] = True try: action = post['action'] except KeyError: output['message'] = 'no action in request' action = '' else: output['message'] += '{} action keyword found'.format(action) if output['authorized'] and action: output['action_allowed'] = pilib.check_action_auths( action, user_data['authlevel']) else: output['action_allowed'] = False if output['authorized'] and output['action_allowed']: output['message'] += 'Found action. ' if action == 'testdbvn': from iiutilities.dblib import dbvntovalue try: output['data'] = dbvntovalue(post['dbvn']) except: output['message'] += 'Error in dbvn evaluation. ' output['data'] = 'error' else: output['message'] += 'Seems to have worked out. ' elif action == 'testlogical': from iiutilities.datalib import evaldbvnformula try: output['data'] = evaldbvnformula(post['logical']) except: output['message'] += 'Error in logical evaluation. ' output['data'] = 'error' else: output['message'] += 'Seems to have worked out. ' elif action == 'testmodule': output['message'] += 'Testing module: ' if 'modulename' in post: import cupid.cupidunittests output['message'] += post['modulename'] output['data'] = cupid.cupidunittests.testmodule( post['modulename']) else: output['message'] += 'Modulename not found. ' elif action == 'testfunction': output['message'] += 'Testing function: ' if 'testname' in post: import cupid.cupidunittests output['message'] += post['testname'] # output['data'] = cupid.tests.testfunction(d['testname']) output['data'] = cupid.cupidunittests.testfunction( post['testname']) # output['data'] = str(cupid.tests.testfunction('systemstatus')) else: output['message'] += 'Testname not found. ' elif action == 'modifychannelalarm': controllib.handle_modify_channel_alarm(post, output) from cupid.actions import processactions # process only this action. processactions(name=post['actionname']) elif action == 'modifychannel': controllib.handle_modify_channel(post, output) elif action == 'getalarmscount': control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control) actions = control_db.read_table('actions') output['data'] = { 'totalalarms': len(actions), 'channelalarms': 0, 'activealarms': 0, 'activechannelalarms': 0 } for action in actions: if action['conditiontype'] == 'channel': output['data']['channelalarms'] += 1 if action['active']: output['data']['activechannelalarms'] += 1 if action['active']: output['data']['activealarms'] += 1 elif action == 'copy_log_to_archive': pilib.app_copy_log_to_archive(post, output) elif action == 'getlogscount': logtablenames = dblib.sqliteDatabase( pilib.dirs.dbs.log).get_table_names() output['data'] = {'logscount': len(logtablenames)} elif action == 'test_action': output['message'] += 'Testing action. ' controldb = dblib.sqliteDatabase(pilib.dirs.dbs.control) actiondict = controldb.read_table('actions', condition='"name"=\'' + post['actionname'] + "'")[0] from cupid.actions import action test_action = action(actiondict) test_action.test() elif action == 'update_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) safe_database.set_single_value('wireless', 'password', post['password'], "SSID='" + post['ssid'] + "'") elif action == 'add_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) insert = {'SSID': post['ssid'], 'auto': 1, 'priority': 1} if 'password' in post: insert['password'] = post['password'] safe_database.insert('wireless', insert) elif action == 'delete_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) safe_database.delete('wireless', "SSID='" + post['ssid'] + "'") # elif action == 'dump': # # this has to go. # if 'database' in d: # dbpath = pilib.dbnametopath(d['database']) # if dbpath: # if 'tablelist' in d and 'outputfile' in d: # dbpath = pilib.dbnametopath(d['database']) # dblib.sqlitedatadump(dbpath, d['tablelist'], d['outputfile']) # output['message'] = 'data dumped' # elif 'tablename' in d and 'outputfile' in d: # dblib.sqlitedatadump(dbpath, [d['tablename']], d['outputfile']) # output['message'] = 'data dumped. ' # else: # output['message'] += 'keys not present for dump. ' # else: # output['message'] += 'keys not present for dump. ' # else: # output['message'] += 'keys not present for dump. ' elif action in ['userdelete', 'useradd', 'usermodify']: """ This needs to be consolidate with the other useradd, modify algorithm written already. Probably do this when we update the user permissions interface. """ # Ensure that we are authorized for this action if action == 'userdelete': try: dblib.sqlitequery( pilib.dirs.dbs.users, "delete from users where name='" + post['usertodelete'] + "'") except: output['message'] += 'Error in delete query. ' else: output['message'] += 'Successful delete query. ' elif action == 'usermodify': if 'usertomodify' in post: querylist = [] if 'newpass' in post: from pilib import salt # Get session hpass to verify credentials hashedpassword = post['newpass'] hname = hashlib.new('sha1') hname.update(post['usertomodify']) hashedname = hname.hexdigest() hentry = hashlib.new('md5') hentry.update(hashedname + salt + hashedpassword) hashedentry = hentry.hexdigest() querylist.append('update users set password='******'" + post['usertomodify'] + "'") if 'newemail' in post: querylist.append("update users set email='" + post['newemail'] + "' where name='" + post['usertomodify'] + "'") if 'newauthlevel' in post: querylist.append("update users set authlevel='" + post['newauthlevel'] + "' where name='" + post['usertomodify'] + "'") try: dblib.sqlitemultquery(pilib.dirs.dbs.users, querylist) except: output[ 'message'] += 'Error in modify/add query: ' + ",".join( querylist) else: output[ 'message'] += 'Successful modify/add query. ' + ",".join( querylist) else: output['message'] += 'Need usertomodify in query. ' elif action == 'useradd': try: username = post['newusername'] except: username = '******' try: newemail = post['newemail'] except: newemail = '*****@*****.**' try: newauthlevel = post['newauthlevel'] except: newauthlevel = 0 query = "insert into users values(NULL,'" + username + "','','" + newemail + "',''," + str( newauthlevel) + ")" try: dblib.sqlitequery(pilib.dirs.dbs.users, query) except: output[ 'message'] += "Error in useradd sqlite query: " + query + ' . ' else: output['message'] += "Successful query: " + query + ' . ' elif action == 'getfiletext': try: filepath = post['filepath'] if 'numlines' in post: numlines = int(post['numlines']) else: numlines = 9999 output['message'] += 'Using numlines: ' + str( numlines) + ' for read action. ' if 'startposition' in post: startposition = post['startposition'] else: startposition = 'end' output[ 'message'] += 'Reading from position ' + startposition + '. ' except KeyError: output[ 'message'] += 'Sufficient keys for action getfile text do not exist. ' except: output['message'] += 'Uncaught error in getfiletext. ' else: try: file = open(filepath) lines = file.readlines() except: output[ 'message'] += 'Error reading file in getfiletext action. ' else: output['data'] = [] if startposition == 'end': try: output['data'] = datalib.tail(file, numlines)[0] except: output['message'] += 'Error in tail read. ' else: linecount = 0 for line in lines: linecount += 1 if linecount > numlines: break else: output['data'].append(line) elif action == 'getmbtcpdata': try: clientIP = post['clientIP'] register = post['register'] length = post['length'] except KeyError: output[ 'message'] += 'Sufficient keys do not exist for the command. Requires clientIP, register, and length. ' else: from iiutilities.netfun import readMBcodedaddresses # try: output['response'] = readMBcodedaddresses( clientIP, int(register), int(length)) elif action == 'queuemessage': output['message'] += 'Queue message. ' if 'message' in post: try: dblib.sqliteinsertsingle( pilib.dirs.dbs.motes, 'queuedmessages', [datalib.gettimestring(), post['message']]) except: import traceback exc_type, exc_value, exc_traceback = sys.exc_info() output[ 'message'] += 'Error in queue insert query: {}. '.format( traceback.format_exc()) else: output['message'] += 'Message insert successful' else: output['message'] += 'No message present. ' elif action == 'setsystemflag' and 'systemflag' in post: database = pilib.dirs.dbs.system dblib.setsinglevalue(database, 'systemflags', 'value', 1, "name=\'" + post['systemflag'] + "'") elif action == 'rundaemon': from cupiddaemon import rundaemon rundaemon() # TODO: Eliminate this scary thing. elif action == 'setvalue': utility.log(pilib.dirs.logs.control, "Setting value in wsgi", 1, 1) # we use the auxiliary 'setsinglecontrolvalue' to add additional actions to update if all(k in post for k in ('database', 'table', 'valuename', 'value')): dbpath = pilib.dbnametopath(post['database']) if dbpath: output[ 'message'] += 'Carrying out setvalue for value ' + post[ 'valuename'] + ' on ' + post[ 'table'] + ' in ' + dbpath if 'condition' in post: pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], post['condition']) elif 'index' in post: condition = 'rowid= ' + post['index'] pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], condition) else: pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value']) else: output[ 'message'] += 'Problem translating dbpath from friendly name: ' + post[ 'database'] else: output['message'] += 'Insufficient data for setvalue ' elif action == 'updateioinfo': if all(k in post for k in ['database', 'ioid', 'value']): query = dblib.makesqliteinsert('ioinfo', [post['ioid'], post['value']], ['id', 'name']) try: dblib.sqlitequery(pilib.dirs.dbs.control, query) except: output[ 'message'] += 'Error in updateioinfo query execution: ' + query + '. into database: ' + pilib.dirs.dbs.control output['message'] += 'ioid: ' + post['ioid'] + ' . ' else: output['message'] += 'Executed updateioinfo query. ' else: output[ 'message'] += 'Insufficient data for updateioinfo query ! ' # TODO: properly incorporate and test channel class functions here, and then sub it. elif action == 'modify_channel': controllib.app_modify_channel(post, output) elif action == 'deletechannelbyname' and 'database' in post and 'channelname' in post: dbpath = pilib.dbnametopath(post['database']) dblib.sqlitequery( dbpath, 'delete channelname from channels where name=\"' + post['channelname'] + '\"') elif action == 'updatecameraimage': output['message'] += 'Take camera image keyword. ' import cupid.camera if 'width' in post: width = post['width'] else: width = 800 try: values = cupid.camera.takesnap(width=width) except: output['message'] += 'Error taking image. ' else: output['message'] += 'Appears successful. Path : ' + values[ 'imagepath'] + '. Timestamp : ' + values['timestamp'] + '. ' output['data'] = values elif action == 'getcurrentcamtimestamp': output['message'] += 'getcurrentcamtimestamp keyword found. ' try: with open('/var/www/webcam/images/current.jpg.timestamp') as f: data = f.read() except: output['message'] += 'Error reading file as requested. ' else: output['data'] = data else: output[ 'message'] += 'Action keyword present(' + action + '), but not handled. ' else: output[ 'message'] += 'Authentication unsuccessful or action not authorized.' status = '401 Not Authorized' foutput = json.dumps(output, indent=1) response_headers = [('Content-type', 'application/json')] start_response(status, response_headers) return [foutput]
def offact(self): from iiutilities import dblib, datalib, utility from cupid import pilib if self.actiontype == 'email': # process email action # TODO: This really needs to queue the email action in case we are not online. self.statusmsg +='Processing email alert.' email = self.actiondetail # message = 'Alert has gone inactive for ' + self.name + '. Criterion ' + self.variablename + ' in ' + self.tablename + ' has value ' + str(self.variablevalue) + ' with a criterion of ' + str(self.criterion) + ' with an operator of ' + self.operator + '. This alarm status has been of since ' + self.offtime + '.' message = 'Alert for alarm ' + self.name + ' . Off time of ' + self.offtime + '. Current time of ' \ + datalib.gettimestring() if self.conditiontype == 'value': message += ' Value: ' + str(self.value) + self.actiondatadict['operator'] + str( self.actiondatadict['criterion']) import socket hostname = socket.gethostname() subject = 'CuPID ' + hostname + ' Alert : Alarm Off - ' + self.name try: actionmail = utility.gmail(message=message, subject=subject, recipient=email) actionmail.send() except: self.statusmsg += 'Error sending email. ' else: self.statusmsg += 'Mail sent. ' elif self.actiontype == 'indicator': # process indicator action self.statusmsg +='Processing indicator off action.' indicatorname = self.actiondetail dblib.setsinglevalue(pilib.dirs.dbs.control, 'indicators', 'status', 0, 'name=\'' + indicatorname+ '\'') print('INDICATORNAME = "' + indicatorname + '"') # dblib.sqlitequery(pilib.dirs.dbs.control, 'update indicators set status=0 where name = ' + indicatorname) elif self.actiontype == 'output': self.statusmsg += 'Processing output off action. ' dblib.setsinglevalue(pilib.dirs.dbs.control, 'outputs', 'value', '0', condition='"id"=\'' + self.actiondetail + "'") # This should be the generic handler that we migrate to elif self.actiontype == 'setvalue': # to set a value, we need at minimum: # dbname, tablename, valuename, setmethod and either: # setmethod = increment, incrementvalue=1 # setmethod = value dbvndict = datalib.parsedbvn(self.actiondetail) dbpath = pilib.dbnametopath(dbvndict['dbname']) # Special set formula? if 'setvalueformula' in self.actiondatadict: # Stuff that we don't know yet. dblib.setsinglevalue(dbpath, dbvndict['tablename'], dbvndict['valuename'], 'formulastuff here', dbvndict['condition']) else: """ TODO: Fix this hack. We cannot currently single quote in the database entry because it breaks the reinsert. So for now, we have to add quotes on either side of the string literal before executing the sqlite query. """ if dbvndict['condition']: querycondition = dbvndict['condition'].split('=')[0] + "='" + dbvndict['condition'].split('=')[1] + "'" # print('FIXED CONDITION') # print(querycondition) else: querycondition = None dblib.setsinglevalue(dbpath, dbvndict['tablename'], dbvndict['valuename'], '0', querycondition)
def runboot(): import subprocess from time import sleep import pilib import spilights from iiutilities import utility, dblib, datalib try: pilib.set_all_wal(False) except: print('error setting wal mode') interfaces = pilib.dbs.control.read_table('interfaces') # Clear out status bits, if for no other reason to see the LEDs come on for statusvalue in ['systemstatusstatus', 'hamachistatus', 'picontrolstatus', 'updateiostatus', 'serialhandlerstatus' ]: dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus', statusvalue, 0) systemstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'systemstatus')[0] # Queue a message indicating we are rebooting # TODO: Make this an actions option, or put it somewhere. # try: import socket hostname = socket.gethostname() message = 'CuPID is booting:\r\n\r\n' notifications_email = '*****@*****.**' subject = 'CuPID : ' + hostname + ' : booting' notification_database = pilib.cupidDatabase(pilib.dirs.dbs.notifications) system_database = pilib.cupidDatabase(pilib.dirs.dbs.system) currenttime = datalib.gettimestring() notification_database.insert('queued', {'type': 'email', 'message': message, 'options': 'email:' + notifications_email + ',subject:' + subject, 'queuedtime': currenttime}) system_database.set_single_value('notifications', 'lastnotification', currenttime, condition="item='boot'") # except Exception as e: # error_message = 'EXCEPTION in notification: {}'.format(e.message) # print (error_message) # utility.log(pilib.dirs.logs.system, error_message) # else: # utility.log(pilib.dirs.logs.system, 'Boot notificaiton complete. ') # Start pigpiod subprocess.call(['killall','pigpiod']) sleep(1) utility.log(pilib.dirs.logs.system, 'boot: starting pigpio daemon', 3, pilib.loglevels.system) subprocess.call(['/usr/local/bin/pigpiod']) # Start webserver subprocess.call(['killall','nginx']) subprocess.call(['killall','uwsgi']) subprocess.call(['killall','apache2']) if systemstatus['webserver'] == 'apache': utility.log(pilib.dirs.logs.system, 'boot: starting apache', 3, pilib.loglevels.system) subprocess.call(['service', 'apache2', 'start']) elif systemstatus['webserver'] == 'nginx': utility.log(pilib.dirs.logs.system, 'boot: starting nginx', 3, pilib.loglevels.system) subprocess.call(['service', 'nginx', 'start']) # Run uwsgi daemon if nginx is running try: result = subprocess.check_output(['service', 'nginx', 'status']).decode('utf-8') except subprocess.CalledProcessError as e: result = '' # print('I AM FAILING') # print e.output if result: utility.log(pilib.dirs.logs.system, 'boot: starting uwsgi based on nginx call', 0) subprocess.call(['uwsgi', '--emperor', '/usr/lib/iicontrollibs/wsgi/', '--daemonize', '/var/log/cupid/uwsgi.log']) else: # print(' I KNOW NGINX IS NOT RUNNING') pass # Mount 1wire master subprocess.call(['killall','owfs']) subprocess.call(['killall','owserver']) subprocess.call(['killall','owhttpd']) runi2cowfs = True runusbowfs = False temp_unit = 'C' for interface in interfaces: if interface['enabled']: from iiutilities.datalib import parseoptions options_dict = parseoptions(interface['options']) if 'tempunit' in options_dict: if options_dict['tempunit'] in ['F','f','Fahrenheit','fahrenheit']: temp_unit = 'F' if interface['interface'] == 'I2C' and interface['type'] == 'DS2483': runi2cowfs = True if interface['interface'] == 'USB' and interface['type'] == 'DS9490': runusbowfs = True if interface['interface'] == 'SPI1' and type == 'CuPIDlights': spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 1) if interface['interface'] == 'SPI0' and type == 'CuPIDlights': spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 0) if runi2cowfs or runusbowfs: if runi2cowfs: utility.log(pilib.dirs.logs.system, 'boot: Running i2c owserver', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call(['/opt/owfs/bin/owserver', '-F', '--i2c=/dev/i2c-1:ALL', '-p', '4304']) else: subprocess.call(['/opt/owfs/bin/owserver', '--i2c=/dev/i2c-1:ALL', '-p', '4304']) except: utility.log(pilib.dirs.logs.system, 'boot: error running i2c owserver', 1, pilib.loglevels.system) if runusbowfs: utility.log(pilib.dirs.logs.system, 'boot: Running usb owserver', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call(['/opt/owfs/bin/owserver', '-F', '-u', '-p', '4304']) else: subprocess.call(['/opt/owfs/bin/owserver', '-u', '-p', '4304']) except: utility.log(pilib.dirs.logs.system, 'error running usb owserver', 1, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'boot: Running owfs/owserver mount', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call(['/opt/owfs/bin/owfs', '-F', '-s', '4304', '/var/1wire/']) else: subprocess.call(['/opt/owfs/bin/owfs', '-s', '4304', '/var/1wire/']) except: utility.log(pilib.dirs.logs.system, 'boot: error running owfs', 1, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'boot: Running owhttpd/owserver mount', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call(['/opt/owfs/bin/owhttpd', '-F', '-s', '4304', '-p', '4305']) else: subprocess.call(['/opt/owfs/bin/owhttpd', '-s', '4304', '-p', '4305']) except: utility.log(pilib.dirs.logs.system, 'boot: error running owhttpd', 1, pilib.loglevels.system) else: utility.log(pilib.dirs.logs.system, 'boot: not running owfs', 3, pilib.loglevels.system) # Run netstart script if enabled if systemstatus['netconfigenabled']: from netconfig import runconfig utility.log(pilib.dirs.logs.system, 'boot: running boot netconfig', 2, pilib.loglevels.system) runconfig(onboot=True)
def runboot(): import subprocess from time import sleep import pilib import spilights from iiutilities import utility, dblib, datalib try: pilib.set_all_wal(False) except: print('error setting wal mode') interfaces = pilib.dbs.control.read_table('interfaces') # Clear out status bits, if for no other reason to see the LEDs come on for statusvalue in [ 'systemstatusstatus', 'hamachistatus', 'picontrolstatus', 'updateiostatus', 'serialhandlerstatus' ]: dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus', statusvalue, 0) systemstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'systemstatus')[0] # Queue a message indicating we are rebooting # TODO: Make this an actions option, or put it somewhere. # try: import socket hostname = socket.gethostname() message = 'CuPID is booting:\r\n\r\n' notifications_email = '*****@*****.**' subject = 'CuPID : ' + hostname + ' : booting' notification_database = pilib.cupidDatabase(pilib.dirs.dbs.notifications) system_database = pilib.cupidDatabase(pilib.dirs.dbs.system) currenttime = datalib.gettimestring() notification_database.insert( 'queued', { 'type': 'email', 'message': message, 'options': 'email:' + notifications_email + ',subject:' + subject, 'queuedtime': currenttime }) system_database.set_single_value('notifications', 'lastnotification', currenttime, condition="item='boot'") # except Exception as e: # error_message = 'EXCEPTION in notification: {}'.format(e.message) # print (error_message) # utility.log(pilib.dirs.logs.system, error_message) # else: # utility.log(pilib.dirs.logs.system, 'Boot notificaiton complete. ') # Start pigpiod subprocess.call(['killall', 'pigpiod']) sleep(1) utility.log(pilib.dirs.logs.system, 'boot: starting pigpio daemon', 3, pilib.loglevels.system) subprocess.call(['/usr/local/bin/pigpiod']) # Start webserver subprocess.call(['killall', 'nginx']) subprocess.call(['killall', 'uwsgi']) subprocess.call(['killall', 'apache2']) if systemstatus['webserver'] == 'apache': utility.log(pilib.dirs.logs.system, 'boot: starting apache', 3, pilib.loglevels.system) subprocess.call(['service', 'apache2', 'start']) elif systemstatus['webserver'] == 'nginx': utility.log(pilib.dirs.logs.system, 'boot: starting nginx', 3, pilib.loglevels.system) subprocess.call(['service', 'nginx', 'start']) # Run uwsgi daemon if nginx is running try: result = subprocess.check_output(['service', 'nginx', 'status']).decode('utf-8') except subprocess.CalledProcessError as e: result = '' # print('I AM FAILING') # print e.output if result: utility.log(pilib.dirs.logs.system, 'boot: starting uwsgi based on nginx call', 0) subprocess.call([ 'uwsgi', '--emperor', '/usr/lib/iicontrollibs/wsgi/', '--daemonize', '/var/log/cupid/uwsgi.log' ]) else: # print(' I KNOW NGINX IS NOT RUNNING') pass # Mount 1wire master subprocess.call(['killall', 'owfs']) subprocess.call(['killall', 'owserver']) subprocess.call(['killall', 'owhttpd']) runi2cowfs = True runusbowfs = False temp_unit = 'C' for interface in interfaces: if interface['enabled']: from iiutilities.datalib import parseoptions options_dict = parseoptions(interface['options']) if 'tempunit' in options_dict: if options_dict['tempunit'] in [ 'F', 'f', 'Fahrenheit', 'fahrenheit' ]: temp_unit = 'F' if interface['interface'] == 'I2C' and interface[ 'type'] == 'DS2483': runi2cowfs = True if interface['interface'] == 'USB' and interface[ 'type'] == 'DS9490': runusbowfs = True if interface['interface'] == 'SPI1' and type == 'CuPIDlights': spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 1) if interface['interface'] == 'SPI0' and type == 'CuPIDlights': spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 0) if runi2cowfs or runusbowfs: if runi2cowfs: utility.log(pilib.dirs.logs.system, 'boot: Running i2c owserver', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call([ '/opt/owfs/bin/owserver', '-F', '--i2c=/dev/i2c-1:ALL', '-p', '4304' ]) else: subprocess.call([ '/opt/owfs/bin/owserver', '--i2c=/dev/i2c-1:ALL', '-p', '4304' ]) except: utility.log(pilib.dirs.logs.system, 'boot: error running i2c owserver', 1, pilib.loglevels.system) if runusbowfs: utility.log(pilib.dirs.logs.system, 'boot: Running usb owserver', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call( ['/opt/owfs/bin/owserver', '-F', '-u', '-p', '4304']) else: subprocess.call( ['/opt/owfs/bin/owserver', '-u', '-p', '4304']) except: utility.log(pilib.dirs.logs.system, 'error running usb owserver', 1, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'boot: Running owfs/owserver mount', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call( ['/opt/owfs/bin/owfs', '-F', '-s', '4304', '/var/1wire/']) else: subprocess.call( ['/opt/owfs/bin/owfs', '-s', '4304', '/var/1wire/']) except: utility.log(pilib.dirs.logs.system, 'boot: error running owfs', 1, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'boot: Running owhttpd/owserver mount', 3, pilib.loglevels.system) try: if temp_unit == 'F': subprocess.call([ '/opt/owfs/bin/owhttpd', '-F', '-s', '4304', '-p', '4305' ]) else: subprocess.call( ['/opt/owfs/bin/owhttpd', '-s', '4304', '-p', '4305']) except: utility.log(pilib.dirs.logs.system, 'boot: error running owhttpd', 1, pilib.loglevels.system) else: utility.log(pilib.dirs.logs.system, 'boot: not running owfs', 3, pilib.loglevels.system) # Run netstart script if enabled if systemstatus['netconfigenabled']: from netconfig import runconfig utility.log(pilib.dirs.logs.system, 'boot: running boot netconfig', 2, pilib.loglevels.system) runconfig(onboot=True)
def monitor(**kwargs): settings = { 'port': None, 'baudrate': 115200, 'timeout': 1, 'checkstatus': True, 'printmessages': False, 'debug': True } settings.update(kwargs) if not settings['port']: settings['port'] = getsystemserialport() import serial from iiutilities import datalib, dblib from time import mktime, localtime from time import sleep motes_db = pilib.dbs.motes system_db = pilib.dbs.system if settings['debug']: pilib.set_debug() if settings['printmessages']: print('Message printing is enabled.') data = [] stringmessage = '' seriallog = True if seriallog: print('serial logging is enabled.') logfile = open(pilib.dirs.logs.serial, 'a', 1) logfile.write('\n' + datalib.gettimestring() + ": Initializing serial log\n") if settings['checkstatus']: systemstatus = system_db.read_table_row('systemstatus')[0] runhandler = systemstatus['serialhandlerenabled'] checktime = mktime(localtime()) checkfrequency = 15 # seconds if runhandler: utility.log( pilib.dirs.logs.io, "Starting monitoring of serial port based on check status", 1, pilib.loglevels.io) else: utility.log( pilib.dirs.logs.io, "Not starting monitoring of serial port. How did I get here?", 1, pilib.loglevels.serial) else: runhandler = True if runhandler: ser = serial.Serial(port=settings['port'], baudrate=settings['baudrate'], timeout=settings['timeout']) utility.log( pilib.dirs.logs.io, "Monitoring serial port {}, settings {}/{}".format( ser.name, settings['baudrate'], settings['timeout']), 1, pilib.loglevels.serial) else: utility.log(pilib.dirs.logs.io, 'not monitoring serial port ', 1, pilib.loglevels.serial) while runhandler: # This reading has to happen faster than the messages come, or they will all be stuck together try: ch = ser.read(1).decode('utf-8') # if ch == '\x0D': # print('carriage return') # elif ch == '\x00': # print('null character') if len(ch) == 0 or ch == '\x0D': utility.log(pilib.dirs.logs.io, 'Time to process message ', 5, pilib.loglevels.serial) # rec'd nothing print all if len( data ) > 1: # This will avoid processing endline characters and other trash. s = '' for x in data: s += '%s' % x # ord(x) # clear data data = [] # Here for diagnostics # print '%s [len = %d]' % (s, len(data)) # now process data # print(s) # print(s.split('\n')) try: utility.log( pilib.dirs.logs.serial, 'processing datadict from serial message of length {}' .format(len(data)), 3, pilib.loglevels.serial) datadicts, messages = processserialdata(s) except: import traceback message = "An exception of occurred (line 99): {}".format( traceback.format_exc()) utility.log(pilib.dirs.logs.serial, message, 1, pilib.loglevels.serial) else: for datadict, message in zip(datadicts, messages): if datadict: if (settings['printmessages']): print("datadict: ") print(datadict) # print("message: ") # print(message) # publish = False # for k in datadict: # # print(k + datadict[k]) # if k not in ['nodeid','RX_RSSI']: # pass # if 'cmd' in datadict: publish = True if publish: if (settings['printmessages']): print('publishing message: ') print(message) lograwmessages(message) motes_db.size_table('read', **{'size': 1000}) try: processremotedata(datadict, message) except: import traceback message = "An exception of occurred (line 184): {}".format( traceback.format_exc()) utility.log(pilib.dirs.logs.serial, message, 1, pilib.loglevels.serial) else: if message and settings['printmessage']: print('message: \n{}'.format(message)) print(message) # Log message if seriallog: try: logfile.write(datalib.gettimestring() + ' : ' + message + '\n') except: import traceback message = "An exception of occurred (line 198): {}".format( traceback.format_exc()) utility.log(pilib.dirs.logs.serial, message, 1, pilib.loglevels.serial) else: # no data, let's see if we should send message utility.log(pilib.dirs.logs.serial, 'No data, try sending', 1, pilib.loglevels.serial) # print('CLEARING DATA !!!') data = [] # try: # utility.log(pilib.dirs.logs.serial, "Attempting send routine", 4, pilib.loglevels.serial) # except Exception as e: # template = "An exception of type {0} occured while doing some serial logging. Arguments:\n{1!r}" # message = template.format(type(ex).__name__, ex.args) # print message # See if there are messages to send. # print('LET US TRY SEND HANDLER') try: queue_commands() except: import traceback print('ERROR IN QUEUE COMMANDS \n {}'.format( traceback.format_exc())) try: runsendhandler(ser) except: import traceback template = "An exception of in runsendhandler (line 142): {} .".format( traceback.format_exc()) utility.log(pilib.dirs.logs.serial, "Error in send routine: {}".format(template), 1, 1) # print('SEND HANDLER DONE') # # template = "An exception of type {0} occured. Arguments:\n{1!r}" # message = template.format(type(ex).__name__, ex.args) # pilib.log(pilib.dirs.logs.serial, message, 1, 1) else: # print('DATA NOT ZERO') # print(ch) data.append(ch) stringmessage += str(ch) if settings['checkstatus']: print('checking status') thetime = mktime(localtime()) if thetime - checktime > checkfrequency: print('checking control status') systemstatus = dblib.readonedbrow(pilib.dirs.dbs.control, 'systemstatus')[0] runserialhandler = systemstatus['serialhandlerenabled'] if runserialhandler: checktime = thetime utility.log( pilib.dirs.logs.io, 'Continuing serialhandler based on status check', 3, pilib.loglevels.io) else: runhandler = False utility.log( pilib.dirs.logs.io, 'Aborting serialhandler based on status check', 3, pilib.loglevels.io) except KeyboardInterrupt: print('\n Exiting on keyboard interrupt\n') logfile.close() return except: # print('no characters available!') sleep(0.5) # return #runsendhandler(ser) logfile.close() ser.close() return
def run_data_agent(**kwargs): from iiutilities.datalib import gettimestring, timestringtoseconds settings = { 'debug':False, 'agent_db_path':'/var/www/data/dataagent.db', 'inputs_db_path':'/var/www/data/control.db', 'inputs_table':'inputs', 'send_all':False } settings.update(kwargs) data_agent_db = dblib.sqliteDatabase(settings['agent_db_path']) inputs_db = dblib.sqliteDatabase(settings['inputs_db_path']) # get data_agent items data_agent_entries = data_agent_db.read_table('send_items') inputs = inputs_db.read_table('inputs') inputs_dict = {} for input in inputs: inputs_dict[input['id']] = input current_time = gettimestring() """ Loop through to find things that definitely need to be transmitted. Also, find if there are things that should be transmitted within a fixed window (bunch_period) If we are going to transmit anyway, attach these items. This way if we barely miss a transmit event, we will still send it and not waste data on two sets of headers. """ """ Data has following format: post_data = { 'post_time':current_time, 'data': [ { id : data_id, name : common name (optional) data : [ data entry, data entry, ... } ], ... } """ post_data = { 'post_time': current_time, 'data': [] } maybe_xmit = [] for agent_entry in data_agent_entries: if agent_entry['enabled']: if settings['debug']: print('{} Enabled '.format(agent_entry['id'])) options = json.loads(agent_entry['options']) da_vars.default_agent_item_options.update(options) options = da_vars.default_agent_item_options # TODO: Build in other modes besides single. # Build in modularity for other ordinates. # Create the entry if agent_entry['id'] not in inputs_dict: if settings['debug']: print('input id {} not found '.format(agent_entry['id'])) continue inputs_entry = inputs_dict[agent_entry['id']] send_entry = { 'id': agent_entry['id'] } if 'name' in inputs_dict[agent_entry['id']]: send_entry['name'] = inputs_entry['name'] if options['full_entry']: send_entry['data'] = [inputs_entry] else: send_entry['data'] = [{'id': agent_entry['id'], 'polltime':inputs_entry['polltime'], 'value': inputs_entry['value']}] send = False maybe_send = False if not agent_entry['last_transmit'] or settings['send_all']: send = True else: elapsed_since_xmit = timestringtoseconds(current_time) - timestringtoseconds(agent_entry['last_transmit']) if elapsed_since_xmit > options['transmit_period']: send = True elif (elapsed_since_xmit + options['bunch_period']) > options['transmit_period']: maybe_send = True else: if settings['debug']: print('{} Disabled '.format(agent_entry['id'])) if send: if settings['debug']: print('Sending "{}"'.format(agent_entry['id'])) post_data['data'].append(send_entry) elif maybe_send: if settings['debug']: print('Sending "{}"'.format(agent_entry['id'])) maybe_send.append(send_entry) else: if settings['debug']: print('Not sending {}'.format(agent_entry['id'])) """ Now determine whether we have data that definitely needs to be sent. If so, throw the bunch data in. """ if post_data['data']: post_data['data'].extend(maybe_xmit) if settings['debug']: print('TIME TO SEND THIS STUFF') print(post_data) try: response = post_client_data(**{'post_data':post_data}) except: import traceback trace_message = traceback.format_exc() if settings['debug']: print('Error, traceback: \n{}'.format(trace_message)) return {'status':1, 'message':trace_message} else: if settings['debug']: print('SUCCESS') # Now we need to mark entries as sent for entry in post_data['data']: data_agent_db.set_single_value('send_items', 'last_transmit', current_time, condition="id='{}'".format(entry['id']), queue=True) data_agent_db.execute_queue() return response
def runalgorithm(controldbpath, recipedbpath, channelname): from iiutilities.datalib import timestringtoseconds, gettimestring from iiutilities.dblib import sqlitequery, datarowtodict from iiutilities import dblib import time message = '' # get our details of our channel # controldb = dblib.sqliteDatabase(controldbpath) # controldb.read_table('channels', condition="name='{}'".format(channelname), queue=True) channeldata = sqlitequery(controldbpath, 'select * from channels where name=' + "'" + channelname + "'")[0] channeldict = datarowtodict(controldbpath, 'channels', channeldata) # check to see if we are running a recipe controlrecipename = channeldict['controlrecipe'] if controlrecipename and controlrecipename != 'none': # Get recipe details # If recipes get too big, we'll just get # a couple stages. For now, we make a # dictionary array #print('we are in recipe ' + controlrecipename) #print(dirs.dbs.session) recipedata = sqlitequery(recipedbpath, 'select * from \'' + controlrecipename + '\'') recipedictarray = [] for stage in recipedata: recipedict = datarowtodict(recipedbpath, controlrecipename, stage) recipedictarray.append(recipedict) # get current stage currentstagenumber = int(channeldict['recipestage']) #print('current stage is ' + str(currentstagenumber) ) # Get data for current stage stagefound = False for stage in recipedictarray: if int(stage['stagenumber']) == currentstagenumber: currentstage = stage stagefound = True break if stagefound: #print("stage found") pass else: print('error. stage not found.') # Check to see if we need to move to next stage currenttime = time.time() #print('Time') #print(currenttime) #print(gettimestring(currenttime)) if currentstagenumber == 0 or currenttime - timestringtoseconds(channeldict['recipestagestarttime']) > int( currentstage['stagelength']): print('stage time expired for stage ' + str(currentstagenumber) + '. Checking on stage advance. ') # Advance stage if there is another stage. Otherwise # update channel to be off a recipe. We assume explicitly # that the stages are sequential integers. nextstagenumber = currentstagenumber + 1 # look for next stage stagefound = False for stage in recipedictarray: if int(stage['stagenumber']) == nextstagenumber: nextstage = stage stagefound = True break if stagefound: print(' Next stage was found. Setting next stage. ') if currentstagenumber == 0: print("Stagenumber is 0. Setting recipe start time. ") # Set recipe start time sqlitequery(controldbpath, 'update channels set recipestarttime=\'' + gettimestring( currenttime) + '\' where name=\'' + channelname + '\'') # Set stage to new stage number sqlitequery(controldbpath, 'update channels set recipestage=\'' + str( nextstagenumber) + '\' where name=\'' + channelname + '\'') # Set setpointvalue sqlitequery(controldbpath, 'update channels set setpointvalue=\'' + str( nextstage['setpointvalue']) + '\' where name=\'' + channelname + '\'') # Set stage start time to now sqlitequery(controldbpath, 'update channels set recipestagestarttime=\'' + gettimestring( currenttime) + '\' where name=\'' + channelname + '\'') # Set new controlalgorithm sqlitequery(controldbpath, 'update channels set controlalgorithm=\'' + nextstage[ 'controlalgorithm'] + '\' where name=\'' + channelname + '\'') else: # Take channel off recipe sqlitequery(controldbpath, 'update channels set controlrecipe=\'none\' where name=\'' + channelname + '\'') sqlitequery(controldbpath, 'update channels set recipestate=\'0\' where name=\'' + channelname + '\'') sqlitequery(controldbpath, 'update channels set recipestage=\'0\' where name=\'' + channelname + '\'') # if lengthmode is setpoint # get current stage # check stage start against stage length # and current time # move to next stage if time and revise setpoint # adjust setpoint based on stage # set action based on setpoint else: # make sure we're not on recipe and on stage 0 sqlitequery(controldbpath, 'update channels set controlrecipe=\'none\' where name=\'' + channelname + '\'') sqlitequery(controldbpath, 'update channels set recipestate=\'0\' where name=\'' + channelname + '\'') sqlitequery(controldbpath, 'update channels set recipestage=\'0\' where name=\'' + channelname + '\'') algorithm = channeldict['controlalgorithm'] setpointvalue = float(channeldict['setpointvalue']) controlvalue = float(channeldict['controlvalue']) algorithmrows = sqlitequery(controldbpath, 'select * from controlalgorithms where name=' + "'" + algorithm + "'") algorithmrow = algorithmrows[0] algorithm = datarowtodict(controldbpath, 'controlalgorithms', algorithmrow) algtype = algorithm['type'] if algtype == 'on/off with deadband': #print(type) deadbandhigh = algorithm['deadbandhigh'] deadbandlow = algorithm['deadbandlow'] if setpointvalue > (controlvalue + deadbandhigh): action = 100 elif setpointvalue < (controlvalue - deadbandlow): action = -100 else: action = 0 #print('setpoint' + str(setpoint)) #print('controlvalue' + str(controlvalue)) #print(action) #print(message) return [action, message]
def application(environ, start_response): import cgi import json import os, sys, inspect # Set top folder to allow import of modules top_folder = os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])))[0] if top_folder not in sys.path: sys.path.insert(0,top_folder) from cupid import pilib, controllib from iiutilities import dblib, utility, datalib # post_env = environ.copy() # post_env['QUERY_STRING'] = '' # post = cgi.FieldStorage( # fp=environ['wsgi.input'], # environ=post_env, # keep_blank_values=True # ) # # formname=post.getvalue('name') # output = {} # output['message'] = 'Output Message: ' # for k in post.keys(): # d[k] = post.getvalue(k) try: request_body_size = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: request_body_size = 0 request_body = environ['wsgi.input'].read(request_body_size) post = json.loads(request_body.decode('utf-8')) output = {} output['message'] = '' status = '200 OK' wsgiauth = True authverified = False if wsgiauth: # Verfiy that session login information is legit: hashed password, with salt and username, match # hash stored in database. import hashlib safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.users) if 'username' in post and post['username']: output['message'] += 'Session user is ' + post['username'] + '. ' else: output['message'] += 'No session user found. ' post['username'] = '' if post['username']: try: condition = "name='" + post['username'] + "'" user_data = safe_database.read_table_row('users', condition=condition)[0] except: output['message'] += 'Error in user sqlite query for session user "' + post['username'] + '". ' output['message'] += 'Condition: ' + condition + '. Path: ' + pilib.dirs.dbs.safe user_data = {'accesskeywords': 'demo', 'admin': False} else: # Get session hpass to verify credentials hashedpassword = post['hpass'] hname = hashlib.new('sha1') hname.update(post['username']) hashedname = hname.hexdigest() hentry = hashlib.new('md5') hentry.update(hashedname + pilib.salt + hashedpassword) hashedentry = hentry.hexdigest() if hashedentry == user_data['password']: # successful auth output['message'] += 'Password verified. ' authverified = True # TODO: implement usermeta else: # Demo status authverified = True user_data = {'authlevel':0} else: output['message'] += 'WSGI authorization not enabled. ' if authverified or not wsgiauth: output['authorized'] = True try: action = post['action'] except KeyError: output['message'] = 'no action in request' action = '' else: output['message'] += '{} action keyword found'.format(action) if output['authorized'] and action: output['action_allowed'] = pilib.check_action_auths(action, user_data['authlevel']) else: output['action_allowed'] = False if output['authorized'] and output['action_allowed']: output['message'] += 'Found action. ' if action == 'testdbvn': from iiutilities.dblib import dbvntovalue try: output['data'] = dbvntovalue(post['dbvn']) except: output['message'] += 'Error in dbvn evaluation. ' output['data'] = 'error' else: output['message'] += 'Seems to have worked out. ' elif action == 'testlogical': from iiutilities.datalib import evaldbvnformula try: output['data'] = evaldbvnformula(post['logical']) except: output['message'] += 'Error in logical evaluation. ' output['data'] = 'error' else: output['message'] += 'Seems to have worked out. ' elif action == 'testmodule': output['message'] += 'Testing module: ' if 'modulename' in post: import cupid.cupidunittests output['message'] += post['modulename'] output['data'] = cupid.cupidunittests.testmodule(post['modulename']) else: output['message'] += 'Modulename not found. ' elif action == 'testfunction': output['message'] += 'Testing function: ' if 'testname' in post: import cupid.cupidunittests output['message'] += post['testname'] # output['data'] = cupid.tests.testfunction(d['testname']) output['data'] = cupid.cupidunittests.testfunction(post['testname']) # output['data'] = str(cupid.tests.testfunction('systemstatus')) else: output['message'] += 'Testname not found. ' elif action == 'modifychannelalarm': controllib.handle_modify_channel_alarm(post, output) from cupid.actions import processactions # process only this action. processactions(name=post['actionname']) elif action == 'modifychannel': controllib.handle_modify_channel(post, output) elif action == 'getalarmscount': control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control) actions = control_db.read_table('actions') output['data'] = {'totalalarms':len(actions),'channelalarms':0, 'activealarms':0, 'activechannelalarms':0} for action in actions: if action['conditiontype'] == 'channel': output['data']['channelalarms'] += 1 if action['active']: output['data']['activechannelalarms'] += 1 if action['active']: output['data']['activealarms'] += 1 elif action == 'copy_log_to_archive': pilib.app_copy_log_to_archive(post, output) elif action == 'getlogscount': logtablenames = dblib.sqliteDatabase(pilib.dirs.dbs.log).get_table_names() output['data'] = {'logscount':len(logtablenames)} elif action == 'test_action': output['message'] += 'Testing action. ' controldb = dblib.sqliteDatabase(pilib.dirs.dbs.control) actiondict = controldb.read_table('actions',condition='"name"=\'' + post['actionname'] + "'")[0] from cupid.actions import action test_action = action(actiondict) test_action.test() elif action == 'update_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) safe_database.set_single_value('wireless', 'password', post['password'], "SSID='" + post['ssid'] + "'") elif action == 'add_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) insert = {'SSID':post['ssid'], 'auto':1, 'priority':1} if 'password' in post: insert['password'] = post['password'] safe_database.insert('wireless',insert) elif action == 'delete_network': safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe) safe_database.delete('wireless', "SSID='" + post['ssid'] + "'") # elif action == 'dump': # # this has to go. # if 'database' in d: # dbpath = pilib.dbnametopath(d['database']) # if dbpath: # if 'tablelist' in d and 'outputfile' in d: # dbpath = pilib.dbnametopath(d['database']) # dblib.sqlitedatadump(dbpath, d['tablelist'], d['outputfile']) # output['message'] = 'data dumped' # elif 'tablename' in d and 'outputfile' in d: # dblib.sqlitedatadump(dbpath, [d['tablename']], d['outputfile']) # output['message'] = 'data dumped. ' # else: # output['message'] += 'keys not present for dump. ' # else: # output['message'] += 'keys not present for dump. ' # else: # output['message'] += 'keys not present for dump. ' elif action in ['userdelete', 'useradd', 'usermodify']: """ This needs to be consolidate with the other useradd, modify algorithm written already. Probably do this when we update the user permissions interface. """ # Ensure that we are authorized for this action if action == 'userdelete': try: dblib.sqlitequery(pilib.dirs.dbs.users, "delete from users where name='" + post['usertodelete'] + "'") except: output['message'] += 'Error in delete query. ' else: output['message'] += 'Successful delete query. ' elif action == 'usermodify': if 'usertomodify' in post: querylist=[] if 'newpass' in post: from pilib import salt # Get session hpass to verify credentials hashedpassword = post['newpass'] hname = hashlib.new('sha1') hname.update(post['usertomodify']) hashedname = hname.hexdigest() hentry = hashlib.new('md5') hentry.update(hashedname + salt + hashedpassword) hashedentry = hentry.hexdigest() querylist.append('update users set password='******'" + post['usertomodify'] + "'") if 'newemail' in post: querylist.append("update users set email='" + post['newemail'] + "' where name='" + post['usertomodify'] + "'") if 'newauthlevel' in post: querylist.append("update users set authlevel='" + post['newauthlevel'] + "' where name='" + post['usertomodify'] + "'") try: dblib.sqlitemultquery(pilib.dirs.dbs.users, querylist) except: output['message'] += 'Error in modify/add query: ' + ",".join(querylist) else: output['message'] += 'Successful modify/add query. ' + ",".join(querylist) else: output['message'] += 'Need usertomodify in query. ' elif action == 'useradd': try: username = post['newusername'] except: username = '******' try: newemail = post['newemail'] except: newemail = '*****@*****.**' try: newauthlevel = post['newauthlevel'] except: newauthlevel = 0 query = "insert into users values(NULL,'" + username + "','','" + newemail + "',''," + str(newauthlevel) + ")" try: dblib.sqlitequery(pilib.dirs.dbs.users, query) except: output['message'] += "Error in useradd sqlite query: " + query + ' . ' else: output['message'] += "Successful query: " + query + ' . ' elif action == 'getfiletext': try: filepath = post['filepath'] if 'numlines' in post: numlines = int(post['numlines']) else: numlines = 9999 output['message'] += 'Using numlines: ' + str(numlines) + ' for read action. ' if 'startposition' in post: startposition = post['startposition'] else: startposition = 'end' output['message'] += 'Reading from position ' + startposition + '. ' except KeyError: output['message'] += 'Sufficient keys for action getfile text do not exist. ' except: output['message'] += 'Uncaught error in getfiletext. ' else: try: file = open(filepath) lines = file.readlines() except: output['message'] += 'Error reading file in getfiletext action. ' else: output['data'] = [] if startposition == 'end': try: output['data'] = datalib.tail(file, numlines)[0] except: output['message'] += 'Error in tail read. ' else: linecount = 0 for line in lines: linecount += 1 if linecount > numlines: break else: output['data'].append(line) elif action == 'getmbtcpdata': try: clientIP = post['clientIP'] register = post['register'] length = post['length'] except KeyError: output['message'] += 'Sufficient keys do not exist for the command. Requires clientIP, register, and length. ' else: from iiutilities.netfun import readMBcodedaddresses # try: output['response'] = readMBcodedaddresses(clientIP, int(register), int(length)) elif action == 'queuemessage': output['message'] += 'Queue message. ' if 'message' in post: try: dblib.sqliteinsertsingle(pilib.dirs.dbs.motes, 'queuedmessages', [datalib.gettimestring(), post['message']]) except: import traceback exc_type, exc_value, exc_traceback = sys.exc_info() output['message'] += 'Error in queue insert query: {}. '.format(traceback.format_exc()) else: output['message'] += 'Message insert successful' else: output['message'] += 'No message present. ' elif action == 'setsystemflag' and 'systemflag' in post: database = pilib.dirs.dbs.system dblib.setsinglevalue(database, 'systemflags', 'value', 1, "name=\'" + post['systemflag'] + "'") elif action == 'rundaemon': from cupiddaemon import rundaemon rundaemon() # TODO: Eliminate this scary thing. elif action == 'setvalue': utility.log(pilib.dirs.logs.control, "Setting value in wsgi", 1, 1) # we use the auxiliary 'setsinglecontrolvalue' to add additional actions to update if all(k in post for k in ('database', 'table', 'valuename', 'value')): dbpath = pilib.dbnametopath(post['database']) if dbpath: output['message'] += 'Carrying out setvalue for value ' + post['valuename'] + ' on ' + post['table'] + ' in ' + dbpath if 'condition' in post: pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], post['condition']) elif 'index' in post: condition = 'rowid= ' + post['index'] pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], condition) else: pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value']) else: output['message'] += 'Problem translating dbpath from friendly name: ' + post['database'] else: output['message'] += 'Insufficient data for setvalue ' elif action == 'updateioinfo': if all(k in post for k in ['database', 'ioid', 'value']): query = dblib.makesqliteinsert('ioinfo', [post['ioid'], post['value']], ['id', 'name']) try: dblib.sqlitequery(pilib.dirs.dbs.control, query) except: output['message'] += 'Error in updateioinfo query execution: ' + query +'. into database: ' + pilib.dirs.dbs.control output['message'] += 'ioid: ' + post['ioid'] + ' . ' else: output['message'] += 'Executed updateioinfo query. ' else: output['message'] += 'Insufficient data for updateioinfo query ! ' # TODO: properly incorporate and test channel class functions here, and then sub it. elif action == 'modify_channel': controllib.app_modify_channel(post, output) elif action == 'deletechannelbyname' and 'database' in post and 'channelname' in post: dbpath = pilib.dbnametopath(post['database']) dblib.sqlitequery(dbpath, 'delete channelname from channels where name=\"' + post['channelname'] + '\"') elif action == 'updatecameraimage': output['message'] += 'Take camera image keyword. ' import cupid.camera if 'width' in post: width = post['width'] else: width = 800 try: values = cupid.camera.takesnap(width=width) except: output['message'] += 'Error taking image. ' else: output['message'] += 'Appears successful. Path : ' + values['imagepath'] + '. Timestamp : ' + values['timestamp'] + '. ' output['data'] = values elif action == 'getcurrentcamtimestamp': output['message'] += 'getcurrentcamtimestamp keyword found. ' try: with open('/var/www/webcam/images/current.jpg.timestamp') as f: data = f.read() except: output['message'] += 'Error reading file as requested. ' else: output['data'] = data else: output['message'] += 'Action keyword present(' + action + '), but not handled. ' else: output['message'] += 'Authentication unsuccessful or action not authorized.' status = '401 Not Authorized' foutput = json.dumps(output, indent=1) response_headers = [('Content-type', 'application/json')] start_response(status, response_headers) return [foutput]
def generatehamachipage(hamachidata=None, path=None): from iiutilities.netfun import gethamachidata from iiutilities.datalib import parseoptions, gettimestring if not hamachidata: hamachidata = gethamachidata() if path: file = open(path, 'w') htmlstring = ( '<!DOCTYPE html>' + '<html>' + '<head>' + '<title>Hamachi Remotes Home</title>' + '<meta name="viewport" content="width=device-width, initial-scale=1">' + '<link rel="stylesheet" href="jqm/themes/base.css" />' + '<link rel="stylesheet" href="jqm/themes/jquery.mobile.icons.min.css" />' + '<link rel="stylesheet" href="jqm/jquery.mobile.custom.structure.min.css" />' + '<!--jQuery Mobile is 1.4.5-->' + '<script src="js/jquery-1.11.1.js"></script>' + '<script src="jqm/jquery.mobile.custom.js"></script>' + '<style>' + '.online {' + 'background-color:#bbffbb' + '}' + '.offline {' + 'background-color:#ffbbbb' + '}' + '</style>' + '</head>' + '<body>' + '<div data-role="page" id="demo-page" class="my-page" data-theme="d">' + '<div role="main" class="ui-content">') htmlstring += '<ul data-role="listview" data-inset=true><li data-role="list-divider">' htmlstring += 'Updated : ' + gettimestring() + '</li></ul>' file.write(htmlstring) for network in hamachidata: if path: htmlstring = ('<ul data-role="listview" data-inset="true">' + '<li data-role="list-divider">' + network['name'] + ' : ' + network['id'] + '</li>') for client in network['clientlist']: # print(client['name'] + ' : ' + client['hamachiip']) if path: htmlstring += '<li>' htmlstring += '<fieldset class="ui-grid-a"><div class="ui-block-a" style="width:50%">' # htmlstring += client['name'] + ' : ' + client['hamachiip'] htmlstring += '<a href="https://' + client['hamachiip'] +'/">' + client['name'] + '</a> : ' + client['hamachiip'] options = parseoptions(client['options']) htmlstring+='</div>' if client['onlinestatus']: htmlstring += '<div class="online" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Online</div>' else: htmlstring += '<div class="offline" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Offline</div>' if 'monitor' in options: if options['monitor'] == '1': htmlstring += '<div class="online" style="width:70px; float:right; text-align:center; border-radius:0.4em; border-width:1px; border-style:solid; border-color:#333333; margin-right:10px">Daemon</div>' htmlstring += '</fieldset></li>\n' if path: htmlstring+='</ul>' file.write(htmlstring) if path: htmlstring = '</div></div>\n' file.write(htmlstring) file.close()
def run_system_status(**kwargs): import pilib import time from iiutilities import utility from iiutilities import dblib from iiutilities import datalib from iiutilities import gitupdatelib from iiutilities import data_agent settings = { 'debug':False, 'quiet':False, 'force':True } settings.update(kwargs) if settings['debug']: print('DEBUG MODE') settings['quiet']=False pilib.set_debug() # This doesn't update git libraries. It checks current versions and updates the database try: utility.log(pilib.dirs.logs.system, 'Checking git versions', 3, pilib.loglevels.system) gitupdatelib.updaterepoversion(pilib.dirs.baselib, pilib.dirs.dbs.system, versiontablename='versions') gitupdatelib.updaterepoversion(pilib.dirs.web, pilib.dirs.dbs.system, versiontablename='versions') except: utility.log(pilib.dirs.logs.system, 'Error in git version check', 0, pilib.loglevels.system) else: utility.log(pilib.dirs.logs.system, 'Git version check complete', 3, pilib.loglevels.system) systemstatus = pilib.dbs.system.read_table_row('systemstatus')[0] # Get hardware info updatehardwareinfo() ## Read wireless config via iwconfig # this is breaking systemstatus for some reason # updateiwstatus() ## Read current netstatus lastnetstatus={} try: lastnetstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'netstatus')[0] except: utility.log(pilib.dirs.logs.network, 'Error reading network status. ', 1, pilib.loglevels.network) else: utility.log(pilib.dirs.logs.system, 'Completed network status. ', 3, pilib.loglevels.network) # # Poll netstatus and return data # allnetstatus = updatenetstatus(lastnetstatus, quiet=settings['quiet']) # Keep reading system status? while systemstatus['systemstatusenabled'] or settings['force']: # Run notifications pilib.process_notifications_queue() try: data_agent.run_data_agent() except: utility.log(pilib.dirs.logs.system, 'Error running data agent. ', 1, pilib.loglevels.network) else: utility.log(pilib.dirs.logs.system, 'Data agent run successfully. ', 3, pilib.loglevels.network) currenttime = datalib.gettimestring() dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus', 'lastsystemstatuspoll', datalib.gettimestring()) starttime = time.time() utility.log(pilib.dirs.logs.system, 'System status routine is starting. ', 3, pilib.loglevels.system) """ Check all network statuses. The goal here is to totally decouple status read and reconfigure When we need to check all status data, we'll have it either in a dict or dict array, or in a database table This sub will read config and status and give both overall and granular interface statuses. Then, if status is not 'ok', we will reconfigure interface. """ if systemstatus['netstatusenabled']: utility.log(pilib.dirs.logs.system, 'Beginning network routines. ', 3, pilib.loglevels.system) # Update network interfaces statuses for all interfaces, in database tables as well # Check on wpa supplicant status as well. Function returns wpastatusdict try: utility.log(pilib.dirs.logs.system, 'Running updateifacestatus. ', 4, pilib.loglevels.system) utility.log(pilib.dirs.logs.network, 'Running updateifacestatus', 4, pilib.loglevels.network) allnetstatus = update_net_status(lastnetstatus, quiet=settings['quiet']) except: utility.log(pilib.dirs.logs.network, 'Exception in updateifacestatus. ') else: utility.log(pilib.dirs.logs.network, 'Updateifacestatus completed. ') utility.log(pilib.dirs.logs.system, 'Completed net status update. ', 4, pilib.loglevels.system) """ End network configuration status """ """ Do we want to autoconfig the network? If so, we analyze our netstatus data against what should be going on, and translate this into a network status. We have a list of ifaceconfigs and a list if ifacestatus """ if systemstatus['netconfigenabled'] and systemstatus['netstatusenabled']: # No need to get this fresh. We have it stored. netconfig_data = allnetstatus['netconfig_data'] # We are going to hack in a jumper that sets AP configuration. This isn't the worst thing ever. # if netconfig_data['apoverride']: # result = processapoverride(21) ''' Now we check network status depending on the configuration we have selected ''' utility.log(pilib.dirs.logs.system, 'Running interface configuration watchdog. ', 4, pilib.loglevels.system) utility.log(pilib.dirs.logs.network, 'Running interface configuration. Mode: {}'.format(netconfig_data['mode']), 4, pilib.loglevels.network) # print('ALL IFACE STATUS') # print(allnetstatus['ifaces_status']) result = watchdognetstatus(allnetstatus=allnetstatus) else: utility.log(pilib.dirs.logs.system, 'Netconfig or netstatus disabled. ', 1, pilib.loglevels.system) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'mode', 'manual') dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'statusmsg', 'netconfig is disabled') """ Check Hamachi """ if systemstatus['checkhamachistatus']: utility.log(pilib.dirs.logs.system, 'Hamachi watchdog is enabled', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.network, 'Hamachi watchdog is enabled. ', 3, pilib.loglevels.network) # Only watchdog haamchi if we are connected to the network. netstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'netstatus')[0] if netstatus['WANaccess']: utility.log(pilib.dirs.logs.system, 'We appear to be online. Checking Hamachi Status. ', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.network, 'We appear to be online. Checking Hamachi Status. ', 3, pilib.loglevels.network) watchdoghamachi(pingip='25.11.87.7') utility.log(pilib.dirs.logs.system, 'Completed checking Hamachi Status. ', 3, pilib.loglevels.network) utility.log(pilib.dirs.logs.system, 'Completed checking Hamachi Status. ', 3, pilib.loglevels.network) else: utility.log(pilib.dirs.logs.system, 'We appear to be offline. Not checking Hamachi Status, but setting to 0. ', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.network, 'We appear to be offline. Not checking Hamachi Status, but setting to 0. ', 3, pilib.loglevels.network) dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus', 'hamachistatus', 0) else: utility.log(pilib.dirs.logs.system, 'Hamachi watchdog is disabled', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'Finished interface configuration. ', 4, pilib.loglevels.system) # pilib.writedatedlogmsg(pilib.dirs.logs.network, statusmsg) utility.log(pilib.dirs.logs.system, 'Running updateifacestatus. ', 4, pilib.loglevels.system) update_net_status() utility.log(pilib.dirs.logs.system, 'Completed updateifacestatus. ', 4, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'Network routines complete. ', 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'Checking system flags. ', 3, pilib.loglevels.system) processsystemflags() utility.log(pilib.dirs.logs.system, 'System flags complete. ', 3, pilib.loglevels.system) # Get system status again systemstatus = pilib.dbs.system.read_table('systemstatus')[0] elapsedtime = int(time.time() - starttime) utility.log(pilib.dirs.logs.system, 'Status routines complete. Elapsed time: {}'.format(str(elapsedtime)), 3, pilib.loglevels.system) utility.log(pilib.dirs.logs.system, 'System status is sleeping for {} .'.format(systemstatus['systemstatusfreq']), 3, pilib.loglevels.system) # print('enabled: ' , systemstatus['systemstatusenabled']) if 'runonce' in kwargs and kwargs['runonce']: break time.sleep(systemstatus['systemstatusfreq']) else: utility.log(pilib.dirs.logs.system, 'System status is disabled. Exiting. ', 0, pilib.loglevels.system)
def split_and_trim_db_by_date(logpath, **kwargs): from iiutilities import dblib from iiutilities.datalib import gettimestring import time settings = { 'division': 'day', 'timekey': 'time', 'remove': 'true' } settings.update(kwargs) data_by_date = split_time_db(logpath, **settings) dates = [date for date in data_by_date] dates.sort(reverse=True) # print('Most recent date', dates[0]) if dates: current_date = dates[0] else: # print('NO time yet.') current_date = time.gmtime() # print(current_date) dates.reverse() log_db = dblib.sqliteDatabase(logpath) modified_dbs = [] for date in data_by_date: # Prune off time. timestring = gettimestring(time.mktime(date)).split(' ')[0] # print(timestring, 'tables: ' +str(len([tablename for tablename in data_by_date[date]]))) # for table in data_by_date[date]: # print(table) new_db_path = logpath.split('.')[0] + '_' + timestring + '.' + logpath.split('.')[1] modified_dbs.append(new_db_path) new_db = dblib.sqliteDatabase(new_db_path) # if table doesn't exist, we create it new_db.tablenames = new_db.get_table_names() # print('existing tablenames: ') # print(new_db.tablenames) for tablename in data_by_date[date]: if tablename not in new_db.tablenames: # print('creating table ' + tablename) new_db.create_table(tablename, data_by_date[date][tablename]['schema'], queue=True) # print(data_by_date[date][tablename]['data'][0]) # print(data_by_date[date][tablename]['schema'].items) new_db.insert(tablename, data_by_date[date][tablename]['data'], queue=True) # print(new_db.queued_queries) new_db.execute_queue() # Now we need to remove the old entries if date != current_date: for tablename in data_by_date[date]: for datum in data_by_date[date][tablename]['data']: log_db.delete(tablename, '"' + settings['timekey'] + '"=' + "'" + datum[settings['timekey']] + "'", queue=True) # print(log_db.queued_queries) # print('Deletes',len(log_db.queued_queries)) log_db.execute_queue() return {'modified_dbs':modified_dbs}
def application(environ, start_response): import json import hashlib # Set top folder to allow import of modules import os, sys, inspect top_folder = \ os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0] if top_folder not in sys.path: sys.path.insert(0, top_folder) import inventorylib from iiutilities import dblib, datalib from time import time try: request_body_size = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: request_body_size = 0 request_body = environ['wsgi.input'].read(request_body_size) try: post = json.loads(request_body.decode('utf-8')) except: print('Error decoding: ') print(request_body.decode('utf-8')) post = {} output = {'message': ''} status = '200 OK' try: try: output['remote_ip'] = environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip() except KeyError: output['remote_ip'] = environ['REMOTE_ADDR'] except: output['remote_ip'] = 'Error getting IP address' """ Here we verify credentials of session data against those in the database. While we authenticate in the browser, this does not stop POST queries to the API without the page provided So we take the hpass stored in the dictionary and verify. * Which databases are available are stored in users table, column accesskeywords * Which one is currently in use is stored in table usermeta, data where user=username. data is json-encoded metadata pathalias field * What path extension this corresponds to is stored in pathaliases """ # I dont' think this will be used. We will get pathalias from database. Let's deal with changing it later. # First, let's get our pathalias and translate to a path, using our path reloader # if 'pathalias' in post: # output['message'] += inventorylib.reloaddatapaths(pathalias=post['pathalias']) # else: # output['message'] += 'No pathalias found in postictionary. ' wsgiauth = True authverified = False if wsgiauth: # Verfiy that session login information is legit: hashed password, with salt and username, match # hash stored in postatabase. import hashlib safe_database = dblib.sqliteDatabase(inventorylib.sysvars.dirs.dbs.safe) if 'username' in post and post['username']: output['message'] += 'Session user is ' + post['username'] + '. ' else: output['message'] += 'No session user found. ' post['username'] = '' if post['username']: try: condition = "name='" + post['username'] + "'" user_data = safe_database.read_table_row('users', condition=condition)[0] except: output['message'] += 'error in user sqlite query for session user "' + post['username'] + '". ' user_data = {'accesskeywords':'demo','admin':False} else: # Get session hpass to verify credentials hashedpassword = post['hpass'] hname = hashlib.new('sha1') hname.update(post['username'].encode('utf-8')) hashedname = hname.hexdigest() hentry = hashlib.new('md5') hentry.update((hashedname + inventorylib.sysvars.salt + hashedpassword).encode('utf-8')) hashedentry = hentry.hexdigest() if hashedentry == user_data['password']: # successful auth output['message'] += 'Password verified. ' authverified = True # output['message'] += 'accesskeywords : ' + str(userdata) output['accesskeywords'] = user_data['accesskeywords'] if output['accesskeywords'].find(',') >= 0: accesskeywords = output['accesskeywords'].split(',') accesskeywords = [accesskeyword.strip() for accesskeyword in accesskeywords] else: accesskeywords = output['accesskeywords'].strip() path_aliases = safe_database.read_table('pathaliases') # Find usermeta entry and grab which database is selected. If one is not selected, update selection # to first that user is allowed to access try: user_meta_row = safe_database.read_table_row('usermeta', condition="user='******'username'] + "'")[0] except: print('error getting usermeta for username ' + post['username']) output['message'] += 'error getting usermeta for username ' + post['username'] user_meta_row = [] return path_alias = '' if not user_meta_row: output['message'] += 'User meta entry not found. Attempting to create. ' # assign default database default_database = accesskeywords[0] output['message'] += 'Choosing pathalias from first in keywords: ' + default_database + '. ' if any(default_database == path_alias['alias'] for path_alias in path_aliases): output['message'] += 'Verified that default alias exists in pathaliases database. ' else: output['message'] += 'ERROR: first entry in keywords (' +default_database + ') not found in aliases. ' # Insert usermeta entry. This should never happen. safe_database.insert('usermeta', {'user':post['username'], 'data':'pathalias:' + default_database}) path_alias = default_database else: output['message'] += 'User meta entry found with text ' + str(user_meta_row) + '. ' # Parse the string into json and ensure that the pathalias is in there user_meta_dict = datalib.parseoptions(user_meta_row['data']) if 'pathalias' in user_meta_dict: path_alias = user_meta_dict['pathalias'] output['message'] += 'pathalias found: ' + user_meta_dict['pathalias'] + '. ' if any(path_alias == stored_path_alias['alias'] for stored_path_alias in path_aliases): output['message'] += 'Verified that default alias exists in pathaliases database. ' if path_alias: # reload datapaths with path alias reload_message = inventorylib.reloaddatapaths(pathalias=path_alias) # DEFINITELY COMMENT THIS OUT FOR SECURITY SAKE (absolute paths are secret!!) output['message'] += reload_message else: # successful auth output['message'] += 'Failed password check. ' else: # Demo status authverified = True user_data = {'authlevel':0} else: output['message'] += 'WSGI authorization not enabled. ' if authverified or not wsgiauth: output['authorized'] = True else: output['authorized'] = False try: action = post['action'] except KeyError: output['message'] = 'no action in request' action = '' if output['authorized'] and action: output['action_allowed'] = inventorylib.check_action_auths(action, user_data['authlevel']) else: output['action_allowed'] = False if output['authorized'] and output['action_allowed']: # Stock functions if action == 'addeditpart': output['message'] += 'addpart keyword found. ' inventorylib.addeditstockpart(post, output) inventorylib.calcstockfromall() elif action == 'copypart': output['message'] += 'copypart keyword found. ' inventorylib.copystockpart(post, output) inventorylib.calcstockfromall() elif action == 'deleteparts': output['message'] += 'deleteparts keyword found. ' inventorylib.deletestockparts(post, output) inventorylib.calcstockfromall() elif action == 'gettrackedpartdata': output['message'] += 'gettrackedpartdata keyword found. ' output['data'] = inventorylib.calcstockfromall(**post)['trackedpart'] elif action =='generateorders': output['message'] += 'generate orders keyword found. ' inventorylib.generateandaddorders() # Inventory functions # Edit and add are separated, as names are autogenerated elif action == 'editinventory': output['message'] += 'editinventory keyword found. ' inventorylib.editinventory(post, output) inventorylib.calcstockfromall() elif action == 'addinventory': output['message'] += 'addinventory keyword found. ' inventorylib.createnewinventory(post, output) inventorylib.makeinventorymetadata() inventorylib.calcstockfromall() elif action == 'deleteinventories': output['message'] += 'deleteinventories keyword found. ' inventorylib.deleteinventories(post, output) inventorylib.makeinventorymetadata() inventorylib.calcstockfromall() elif action == 'addeditinventorypart': output['message'] += 'addeditinventorypart keyword found. ' inventorylib.addeditpartlist(post, output) inventorylib.makeinventorymetadata() inventorylib.calcstockfromall() elif action == 'deletepartsfrominventory': output['message'] += 'deletepartsfrominventory keyword found. ' inventorylib.deletepartsfrominventory(post, output) inventorylib.makeinventorymetadata() inventorylib.calcstockfromall() # Order functions elif action == 'editorder': output['message'] += 'editorder keyword found. ' inventorylib.editorder(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() elif action == 'addorder': output['message'] += 'addorder keyword found. ' inventorylib.createneworder(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() elif action == 'deleteorders': output['message'] += 'deleteorders keyword found. ' inventorylib.deleteorders(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() elif action == 'addeditorderpart': output['message'] += 'addeditorderpart keyword found. ' inventorylib.addeditpartlist(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() elif action == 'addeditorderparts': output['message'] += 'addeditorderparts keyword found. ' if 'partsdata' in post: post['partsdata'] = json.loads(post['partsdata']) inventorylib.addeditpartlist(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() elif action == 'deletepartsfromorder': output['message'] += 'deletepartsfromorder keyword found. ' inventorylib.deletepartsfromorder(post, output) inventorylib.makeordermetadata() inventorylib.calcstockfromall() # BOM functions elif action == 'copybom': output['message'] += 'copybom keyword found. ' inventorylib.copybom(post, output) inventorylib.makebommetadata() elif action == 'addeditbom': output['message'] += 'addeditbom keyword found. ' inventorylib.addeditbom(post, output) inventorylib.makebommetadata() elif action == 'addeditbomparts': output['message'] += 'addeditbomparts keyword found. ' # Operate on partsdata post['partsdata'] = json.loads(post['partsdata']) inventorylib.addeditpartlist(post, output) inventorylib.makebommetadata() elif action == 'getbomcalcs': output['message'] += 'getbomcalcs keyword found. ' inventorylib.calcbomprice(post, output) elif action == 'getquotecalcs': output['message'] += 'getquotecalcs keyword found. ' output['message'] += 'function not written yet. ' # inventorylib.calcbomprice(post, output) elif action == 'deletepartsfrombom': output['message'] += 'deletepartsfrombom keyword found. ' inventorylib.deletepartsfrombom(post, output) inventorylib.makebommetadata() elif action == 'deleteboms': output['message'] += 'deleteboms keyword found. ' inventorylib.deleteboms(post, output) inventorylib.makebommetadata() # Assembly functions elif action == 'copyassembly': output['message'] += 'copyassembly keyword found. ' inventorylib.copyassembly(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() elif action == 'copybomintoassembly': output['message'] += 'copybomintoassembly keyword found. ' inventorylib.copybomintoassembly(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() elif action == 'addeditassembly': output['message'] += 'addeditassembly keyword found. ' inventorylib.addeditassembly(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() elif action == 'addeditassemblyparts': output['message'] += 'addeditassemblypart keyword found. ' post['partsdata'] = json.loads(post['partsdata']) inventorylib.addeditpartlist(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() elif action == 'getassemblycalcs': output['message'] += 'getassemblycalcs keyword found. ' inventorylib.calcassemblyprice(post, output) elif action == 'deletepartsfromassembly': output['message'] += 'deletepartsfromassembly keyword found. ' inventorylib.deletepartsfromassembly(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() elif action == 'deleteassemblys': output['message'] += 'deleteassemblys keyword found. ' inventorylib.deleteassemblies(post, output) inventorylib.makeassemblymetadata() inventorylib.calcstockfromall() # Quotes elif action == 'deletequotes': output['message'] += 'deletequotes keyword found. ' inventorylib.deletequotes(post, output) inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes) elif action == 'copyquotetoboms': output['message'] += 'copyquotetoboms keyword found. ' inventorylib.copyquotetoboms(post, output) inventorylib.makebommetadata() # Export functions elif action == 'exportbomtopdf': output['message'] += 'exportbomtopdf keyword found. ' inventorylib.writepanelbomtopdf(post, output) thetime = datalib.gettimestring() cleantime = thetime.replace(' ', '_').replace(':', '_') # Get bom from boms database bom = inventorylib.sysvars.dbs.boms.read_table(post['name']) cleanbomname = post['name'].replace(' ','_').replace(':','_') filename = cleanbomname + '_' + cleantime outputroot = '/var/www/html/panelbuilder/data/downloads/' weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename inventorylib.writepanelbomtopdf(**{'bomdata': bom, 'title': 'Bom generated from ' + post['name'] + ' ' + cleantime, 'outputfile': outputroot + filename}) output['data']['weblink'] = weblink elif action == 'exportassemblytopdf': output['message'] += 'exportassemblytopdf keyword found. ' thetime = datalib.gettimestring() cleantime = thetime.replace(' ', '_').replace(':', '_') # Get bom from boms database assemblydata = inventorylib.sysvars.dbs.assemblies.read_table(post['name']) cleanname = post['name'].replace(' ','_').replace(':','_') filename = cleanname + '_' + cleantime + '.pdf' outputroot = '/var/www/html/panelbuilder/data/downloads/' weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename inventorylib.writepanelbomtopdf(**{'bomdata': assemblydata, 'title': 'Bom generated from ' + post['name'] + ' ' + thetime, 'format':'picklist','outputfile': outputroot + filename}) output['data'] = {'assemblydata':assemblydata} output['weblink'] = weblink # Panel builder elif action in ['panelcalcs', 'panelcalcsgenquote']: output['message'] += 'panelcalc keyword found. ' import panelbuilder for key,value in post.items(): # print(key, value) pass if 'paneldesc' in post: import json post['paneldesc'] = json.loads(post['paneldesc']) bomresults = panelbuilder.paneltobom(**post) output['data'] = {} # d needs to have a 'paneldesc' key with the panel spec data in it. output['data']['bomdescription'] = bomresults['bomdescription'] output['data']['options'] = bomresults['options'] output['data']['bomcalcs'] = inventorylib.calcbomprice({'bomdictarray':bomresults['bom']})['data'] output['message'] += bomresults['message'] # We don't actually want to return the full boms by default. We don't want this in the client, and it's # lot of data anyway if 'returnfullboms' not in post: for option, value in output['data']['options'].items(): if 'bom' in value: print('Deleting bom from option ' + str(option)) del output['data']['options'][option]['bom'] if 'flatbom' in value: print('Deleting flatbom from option ' + str(option)) del output['data']['options'][option]['flatbom'] if action == 'panelcalcsgenquote': thetime = datalib.gettimestring() cleantime = thetime.replace(' ','_').replace(':','_') outputroot = '/var/www/html/panelbuilder/data/downloads/' if 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'brewpanel': datedquotefilename = 'panelbuilder_brew_quote_' + cleantime + '.pdf' datedbomfilename = 'panelbuilder_brew_bom_' + cleantime + '.pdf' genericquotefilename = 'panelbuilder_brew_quote.pdf' genericbomfilename = 'panelbuilder_brew_bom.pdf' elif 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'temppanel': datedquotefilename = 'panelbuilder_temp_quote_' + cleantime + '.pdf' datedbomfilename = 'panelbuilder_temp_bom_' + cleantime + '.pdf' genericquotefilename = 'panelbuilder_temp_quote.pdf' genericbomfilename = 'panelbuilder_temp_bom.pdf' else: datedquotefilename = 'panelbuilder_quote_' + cleantime + '.pdf' datedbomfilename = 'panelbuilder_bom_' + cleantime + '.pdf' genericquotefilename = 'panelbuilder_quote.pdf' genericbomfilename = 'panelbuilder_bom.pdf' weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + datedquotefilename # until we can get this to format properly in the pdf, we are going to leave it generic # description = output['data']['bomdescription'] description = 'Control panel quote generated by panelbuilder.' datedquotes = True # Create quote pdf from BOM if datedquotes: inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'], 'title':'Quote auto-generated by panelbuilder \t\t' + datalib.gettimestring(), 'price': str(output['data']['bomcalcs']['totalprice']), 'outputfile': outputroot + datedquotefilename, 'description':description}) inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'], 'title':'Quote auto-generated by panelbuilder '+ thetime, 'price': output['data']['bomcalcs']['totalprice'], 'outputfile':outputroot + genericquotefilename}) # Create database entry BOM # Create table # print('** DATABASE') # print(panelbuilder.sysvars.dirs.dbs.quotes) bomname = 'quote_' + cleantime inventorylib.addeditbom({'bomdata':{'name':bomname}, 'database':panelbuilder.sysvars.dirs.dbs.quotes}, output) # print('** BOM **') # print(bomresults['bom']) inserts = [] for part in bomresults['bom']: inserts.append(dblib.makesqliteinsert(bomname, [part['partid'],part['qty']], ['partid','qty'])) dblib.sqlitemultquery(inventorylib.sysvars.dirs.dbs.quotes, inserts) inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes) # inventorylib.addeditpartlist(post, output) # Create pdfs if datedquotes: inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'], 'title':'Quote auto-generated by panelbuilder ' + thetime, 'outputfile': outputroot + datedbomfilename}) inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'title': 'panelbuilder BOM generated ' + thetime, 'outputfile': outputroot + genericbomfilename, 'totalprice': output['data']['bomcalcs']['totalprice']}) output['data']['quotelink'] = weblink from iiutilities.utility import gmail mymail = gmail(subject="Quote generated") mymail.message = 'Quote generated at ' + cleantime + '\r\n' if 'remote_ip' in output: mymail.message = 'IP address ' + output['remote_ip'] + '\r\n' mymail.message += bomresults['bomdescription'] mymail.recipient = '*****@*****.**' mymail.sender = 'II Panelbuilder' mymail.send() # Multi-use elif action == 'reloaditemdatafromstock': output['message'] += 'reloaditemdatafromstock keyword found. ' inventorylib.refreshpartsfromstock(post, output) if 'bomname' in post: inventorylib.recalcpartdata(bomname=post['bomname']) inventorylib.makebommetadata() elif 'assemblyame' in post: inventorylib.recalcpartdata(assemblyname=post['assemblyname']) inventorylib.makeassemblymetadata() # Generic functions elif action == 'gettablenames': dbpath = inventorylib.dbnametopath(post['database']) try: output['data'] = dblib.gettablenames(dbpath) except: output['message'] += 'Error getting table names' elif action == 'switchtablerows': dbpath = inventorylib.dbnametopath(post['database']) dblib.switchtablerows(dbpath, post['tablename'], post['row1'], post['row2'], post['uniqueindex']) elif action == 'modwsgistatus': output['processgroup'] = repr(environ['mod_wsgi.process_group']) output['multithread'] = repr(environ['wsgi.multithread']) elif action == 'gettabledata': output['message']+='Gettabledata. ' if 'database' in post: dbpath = inventorylib.dbnametopath(post['database']) if dbpath: output['message'] += 'Friendly name ' + post['database'] + ' translated to path ' + dbpath + ' successfully. ' if 'tablenames' in post: # Get multiple tables output['message'] += 'Multiple tables. ' data = [] if 'start' in post: fixedstart = int(post['start']) else: fixedstart = 0 if 'length' in post: fixedlength = int(post['length']) else: fixedlength = 1 if 'lengths' in post: lengths = map(int, post['lengths[]']) else: lengths = [] if 'starts' in post: starts = map(int, post['starts']) else: starts = [] for index, table in enumerate(post['tablenames[]']): try: length = lengths[index] except IndexError: length = fixedlength try: start = starts[index] except IndexError: start = fixedstart data.append(dblib.dynamicsqliteread(dbpath, table, start, length)) output['data']=data elif 'length' in post: # Handle table row subset output['message']+='Length keyword. ' if not 'start' in post: post['start'] = 0 thetime = time() output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['start'], post['length']) output['querytime'] = time() - thetime elif 'row' in post: # Handle table row output['message'] += 'Row keyword. ' + str(post['row']) thetime = time() output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['row']) output['querytime'] = time() - thetime elif 'tablename' in post: # Handle entire table output['message'] += 'Tablename keyword: ' + post['tablename'] + '. ' thetime = time() if 'condition' in post: if not post['condition'] == '': output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], condition=post['condition']) else: output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename']) else: try: output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename']) except: output['message'] += 'Error retrieving data. ' else: output['message'] += 'Data query appears successful. ' output['querytime'] = time() - thetime else: output['message'] += 'Friendly name ' + post['database'] + ' unsuccessfully translated. ' else: output['message'] += 'No database present in action request' else: output['message'] = 'no command matched for action "' + action + '"' else: # status = '403 Forbidden' output['message'] += 'Not authorized for this action (or perhaps at all?) ' if 'data' in output: if output['data']: newetag = hashlib.md5(str(output['data']).encode('utf-8')).hexdigest() if 'etag' in post: if newetag == post['etag']: status = '304 Not Modified' output['data'] = '' else: newetag='' else: newetag='' if 'datasize' in post: output['datasize'] = sys.getsizeof(output['data']) output['etag'] = newetag # try: foutput = json.dumps(output, indent=1) # except: # import csv # w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w")) # for key, val in output.items(): # w.writerow([key, val]) response_headers = [('Content-type', 'application/json')] response_headers.append(('Etag',newetag)) start_response(status, response_headers) return foutput.encode('utf-8')
def onact(self): from iiutilities import dblib, datalib, utility from cupid import pilib if self.actiontype == 'email': # process email action self.statusmsg += 'Processing email alert. ' email = self.actiondetail # Special messages if self.conditiontype == 'channel': message = 'Channel alarm for ' + self.name + ' is active with value of ' + str(self.value) + '. ' if 'PV_low' in self.actiondatadict: message += 'Low alarm: ' + str(self.actiondatadict['PV_low'] + '. ') if 'PV_high' in self.actiondatadict: message += 'High alarm: ' + str(self.actiondatadict['PV_high'] + '. ') elif self.conditiontype == 'value': # message = 'Alert is active for ' + self.name + '. Criterion ' + self.variablename + ' in ' + self.tablename + ' has value ' + str(self.variablevalue) + ' with a criterion of ' + str(self.criterion) + ' with an operator of ' + self.operator + '. This alarm status has been on since ' + self.ontime + '.' message = 'Alert for alarm ' + self.name + ' . On time of ' + self.ontime + '. Current time of ' \ + datalib.gettimestring() message += ' Value: ' + str(self.value) + self.actiondatadict['operator'] + str(self.actiondatadict['criterion']) else: # message = 'Alert is active for ' + self.name + '. Criterion ' + self.variablename + ' in ' + self.tablename + ' has value ' + str(self.variablevalue) + ' with a criterion of ' + str(self.criterion) + ' with an operator of ' + self.operator + '. This alarm status has been on since ' + self.ontime + '.' message = 'Alert for alarm ' + self.name + ' . On time of ' + self.ontime + '. Current time of ' \ + datalib.gettimestring() import socket hostname = socket.gethostname() subject = 'CuPID ' + hostname + ' Alert : Alarm On - ' + self.name try: actionmail = utility.gmail(message=message, subject=subject, recipient=email) actionmail.send() except: self.statusmsg += 'Error sending email. ' else: self.statusmsg += 'Mail sent. ' elif self.actiontype == 'indicator': # process indicator action self.statusmsg += 'Processing indicator on action. ' indicatorname = self.actiondetail dblib.sqlitequery(pilib.dirs.dbs.control, 'update indicators set status=1 where name = \'' + indicatorname + '\'') elif self.actiontype == 'output': self.statusmsg += 'Processing output on action. ' dblib.setsinglevalue(pilib.dirs.dbs.control, 'outputs', 'value', '1', condition='"id"=\'' + self.actiondetail + "'") elif self.actiontype == 'mote_command': settings = { 'no_duplicates':True, 'retries':5 } settings.update(self.actiondatadict) self.statusmsg += 'Processing command on action. ' destination = self.actiondatadict['destination'] message = self.actiondatadict['message'] command_id = '{}_{}'.format(destination, message) command = {'queuedtime': datalib.gettimestring(), 'destination': destination, 'status': 'new', 'message': message, 'commandid':command_id, } command['options'] = json.dumps({'retries':settings['retries']}) insert = True if settings['no_duplicates']: # Check to see if commands exist with our command id. condition = "commandid='{}'".format(command_id) matching_commands = pilib.dbs.motes.read_table('commands',condition=condition) if matching_commands: self.statusmsg += '{} matching commands are already queued. Not inserting. '.format(len(matching_commands)) insert = False if insert: self.statusmsg += 'Inserting command. ' pilib.dbs.motes.settings['quiet'] = False pilib.dbs.motes.insert('commands',command) # This should be the generic handler that we migrate to elif self.actiontype == 'setvalue': # to set a value, we need at minimum: # dbname, tablename, valuename, setmethod and either: # setmethod = increment, incrementvalue=1 # setmethod = value dbvndict = datalib.parsedbvn(self.actiondetail) dbpath = pilib.dbnametopath(dbvndict['dbname']) # Special set formula? if 'setvalueformula' in self.actiondatadict: # Stuff that we don't know yet. dblib.setsinglevalue(dbpath, dbvndict['tablename'], dbvndict['valuename'], 'formulastuff here', dbvndict['condition']) else: """ TODO: Fix this hack. We cannot currently single quote in the database entry because it breaks the reinsert. So for now, we have to add quotes on either side of the string literal before executing the sqlite query. """ if dbvndict['condition']: querycondition = dbvndict['condition'].split('=')[0] + "='" + dbvndict['condition'].split('=')[1] + "'" # print('FIXED CONDITION') # print(querycondition) else: querycondition = None dblib.setsinglevalue(dbpath, dbvndict['tablename'], dbvndict['valuename'], '1', querycondition)
def processremotedata(datadict, stringmessage): import cupid.pilib as pilib from iiutilities import dblib, datalib, utility control_db = pilib.dbs.control motes_db = pilib.dbs.motes log_db = pilib.dbs.log print('PROCESSING REMOTE DATA') print(datadict) if 'nodeid' in datadict: """ We are going to search for keywords. Message type will not be explicitly declared so as not to waste precious message space in transmission. Or we could tack these on in the gateway, but we won't yet. """ """ Then we have to construct a query where we will replace a unique item This will take the form : update or replace in remotes where nodeid=3 and msgtype='iovalue' and iopin=3 update or repalce in remotes where nodeid=2 and msgtype='owdev' and owrom='28XXXXXXXXXXXXXX' (and later which IO on this device) update or replace in remotes where nodeid=2 and msgtype='chanstat' channum=1 """ """ (need to see if all channel variables can be fit into one message: channum, sv,pv,mode,state """ runquery = False nodeid = datadict['nodeid'] # We are going to use this to filter datadict entries into remote channels. More later. allowedfieldnames = [ 'nodeid', 'sv', 'pv', 'htcool', 'run', 'treg', 'prop', 'p', 'i', 'd' ] control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control) # Command responses, including value requests # Node status values value_types = [ 'vbat', 'vout', 'autoboot', 'output', 'batterylow', 'sigbootok', 'sigshutoff' ] # sprintf(buff, "nodeid:1,vbat:%01d.%02d,vout:%01d.%02d,autoboot:%01d,output:%01d", wholevoltage, fractvoltage, # wholevoltage2, fractvoltage2, autobootenabled, outputstate); # Serial.println(buff); # sprintf(buff, "batterylow:%01d,sigbootok:%01d,sigshutoff:%01d", batteryLow, bootok, sigshutoff); for value_type in value_types: if value_type in datadict: insert = { 'nodeid': nodeid, 'msgtype': 'nodestatus', 'keyvaluename': value_type, 'keyvalue': datadict[value_type], 'data': stringmessage.replace('\x00', ''), 'time': datalib.gettimestring() } control_db.query(dblib.makedeletesinglevaluequery( 'remotes', { 'conditionnames': ['nodeid', 'keyvaluename'], 'conditionvalues': [nodeid, insert['keyvaluename']] }), queue=True) control_db.insert('remotes', insert, queue=True) # Node system events if 'event' in datadict: insert = { 'nodeid': nodeid, 'msgtype': 'event', 'keyvaluename': datadict['event'], 'keyvalue': datalib.gettimestring(), 'data': stringmessage.replace('\x00', ''), 'time': datalib.gettimestring() } control_db.query(dblib.makedeletesinglevaluequery( 'remotes', { 'conditionnames': ['nodeid', 'keyvaluename'], 'conditionvalues': [nodeid, insert['keyvaluename']] }), queue=True) control_db.insert('remotes', insert, queue=True) # Also queue an email message to cupid_status import socket hostname = socket.gethostname() message = 'CuPID system event : {} \r\n\r\n'.format( insert['keyvaluename']) notifications_email = '*****@*****.**' subject = 'CuPID : {} : {} '.format(hostname, insert['keyvaluename']) notification_database = pilib.cupidDatabase( pilib.dirs.dbs.notifications) system_database = pilib.cupidDatabase(pilib.dirs.dbs.system) currenttime = datalib.gettimestring() notification_database.insert( 'queued', { 'type': 'email', 'message': message, 'options': 'email:' + notifications_email + ',subject:' + subject, 'queuedtime': currenttime }) system_database.set_single_value('notifications', 'lastnotification', currenttime, condition="item='boot'") if 'cmd' in datadict: if datadict['cmd'] == 'lp': # Remove command key and process remaining data del datadict['cmd'] motetablename = 'node_' + nodeid + '_status' # Create table if it doesn't exist motes_db.create_table(motetablename, pilib.schema.mote, queue=True) for key in datadict: thetime = datalib.gettimestring() if key in [ 'iov', 'iov2', 'iov3', 'pv', 'pv2', 'sv', 'sv2', 'iomd', 'ioen', 'iordf', 'iorpf', 'chen', 'chmd', 'chnf', 'chpf', 'chdb', 'chsv', 'chsv2', 'chpv', 'chpv2' ]: # We need to process these specially, going back to the original message values = datadict[key] valuelist = values.split('|') print(valuelist) index = 0 if key in ['iov', 'iov2', 'iov3']: base = 'iov_' if key == 'iov2': index = 5 elif key == 'iov3': index = 9 elif key in ['pv', 'pv2']: base = 'pv_' if key == 'pv2': index = 5 elif key in ['sv', 'sv2']: base = 'sv_' if key == 'sv2': index = 5 else: base = key + '_' querylist = [] for value in valuelist: query = dblib.makesqliteinsert( motetablename, [thetime, base + str(index), value]) motes_db.query(query, queue=True) # querylist.append(dblib.makesqliteinsert(motetablename, [thetime, base + str(index), value])) index += 1 # Update table entry. Each entry has a unique key # updatetime, keyname, data else: motes_db.insert(motetablename, { 'time': thetime, 'message': key, 'value': datadict[key] }, queue=True) # print('inserted ' + thetime + ' ' + key + ' ' + datadict[key]) if motes_db.queued_queries: motes_db.execute_queue() # This is for values that are reported by the node elif 'ioval' in datadict: # check to see if entry exists with node and ionum. Need to generalize these. # Might make sense to put then into an ID to compare. Other database, compatible? # iovalue type message try: msgtype = 'iovalue' keyvalue = datadict['iopin'] keyvaluename = 'iopin' except: print('oops') else: control_db.insert() elif 'owdev' in datadict: try: msgtype = 'owdev' keyvalue = datadict['owrom'][2:] keyvaluename = 'owrom' if len(keyvalue) != 16: raise NameError('invalid ROM length') else: for romcbar in keyvalue: hexchars = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'a', 'b', 'c', 'd', 'e', 'f' ] if romcbar not in hexchars: raise NameError('Invalid ROM hex character') except: print("oops") else: runquery = True elif 'chan' in datadict: # insert or update remotes database value # first need to get existing entry if one exists msgtype = 'channel' keyvalue = str(int(datadict['chan'])) # Zeroes bad keyvaluename = str(int(datadict['chan'])) # conditions = '"nodeid"=2 and "msgtype"=\'channel\' and "keyvalue"=\'' + keyvalue + '\'"' # Should be able to offer all conditions, but it is not working for some reason, so we will # iterate over list to find correct enty # Here, get all remote entries for the specific node id conditions = '"nodeid"=\'' + datadict[ 'nodeid'] + '\' and "msgtype"=\'channel\'' chanentries = control_db.read_table('remotes', conditions) # parse through to get data from newdata newdata = {} import string printable = set(string.printable) for key, value in datadict.items(): if key not in ['chan', 'nodeid']: if key in allowedfieldnames: filteredvalue = filter(lambda x: x in printable, value) newdata[key] = filteredvalue updateddata = newdata.copy() # This does not take time into account. This should not be an issue, as there should only be one entry # Now match entry from node. Here, for example, keyvaluename could be channel, and keyvalue representing the # channel or controller on the node. for chanentry in chanentries: if (str(int(chanentry['keyvalue']))) == keyvalue: # print('I FOUND') # newdata = {'fakedatatype':'fakedata', 'anotherfakedatatype':'morefakedata'} olddata = datalib.parseoptions(chanentry['data']) olddata.update(updateddata) updateddata = olddata.copy() newqueries = [] conditions += ' and "keyvalue"=\'' + keyvalue + "\'" # Ok, so here we are. We have either added new data to old data, or we have the new data alone. # We take our dictionary and convert it back to json and put it in the text entry updatedjsonentry = datalib.dicttojson(updateddata) conditions += 'and "keyvalue"=\'' + keyvalue + '\'' deletequery = dblib.makedeletesinglevaluequery( 'remotes', conditions) # hardcode this for now, should supply valuename list. addquery = dblib.makesqliteinsert('remotes', [ datadict['nodeid'], 'channel', keyvalue, 'channel', updatedjsonentry, datalib.gettimestring() ]) print(deletequery) print(addquery) control_db.queries([deletequery, addquery]) elif 'scalevalue' in datadict: # TODO : What is this? # querylist.append('create table if not exists scalevalues (value float, time string)') # querylist.append(dblib.makesqliteinsert('scalevalues', [datadict['scalevalue'], datalib.gettimestring()], ['value', 'time'])) # log_db.queries(querylist) pass if control_db.queued_queries: control_db.execute_queue() return else: # print('not running query') pass return
def runconfig(**kwargs): from iiutilities import utility from cupid import pilib from iiutilities.datalib import gettimestring from iiutilities import dblib """ Interfaces and modes Interfaces: eth0 | wlan0 | wlan1 Modes: dhcp| ok | -- | -- static| ok | -- | -- station| -- | ok | ok ap| -- | ok | ok """ settings = { 'debug': False, 'onboot': False, 'config_all': False, 'ifaces_to_configure': ['wlan0'], 'config': { 'eth0': { 'enabled': True, 'mode': 'dhcp' }, 'wlan0': { 'enabled': True, 'mode': 'station', 'config': { 'network_select': ['name', 'strongest'], 'network': 'leHouse' } } }, 'use_default': False } settings.update(kwargs) if settings['debug']: pilib.set_debug() # Get from database # TODO : Used passed configuration data. if not settings['use_default']: import json netiface_config = pilib.dbs.system.read_table('netifaceconfig') if not netiface_config: message = 'netifaceconfig table empty or not found ! ' utility.log(pilib.dirs.logs.network, message, 0, pilib.loglevels.network) return {'status': 1, 'status_message': message} settings['config'] = {} for iface_config in netiface_config: settings['config'][iface_config['name']] = iface_config # unpack json dump of config details try: settings['config'][ iface_config['name']]['config'] = json.loads( iface_config['config']) except: message = 'Config entry for interface {} is empty or cannot be unpacked as json: {}. '.format( iface_config['name'], iface_config['config']) # print(settings['config'][iface_config['name']]) utility.log(pilib.dirs.logs.network, message, 3, pilib.loglevels.network) utility.log(pilib.dirs.logs.network, 'Updating ifconfig file. ', 0, pilib.loglevels.network) print('MAKING CONFIG FILE WITH CONFIG') print(settings['config']) make_ifconfig_file(config=settings['config']) # For now, we are going to assume that we are only using one wireless interface at most as a network station station_interface = None for interface_name, interface in settings['config'].items(): if interface['mode'] == 'station': station_interface = interface['name'] utility.log(pilib.dirs.logs.network, 'Running network reconfig (setting lastnetreconfig). ', 0, pilib.loglevels.network) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'lastnetreconfig', gettimestring()) try: netconfigdata = dblib.readonedbrow(pilib.dirs.dbs.system, 'netconfig')[0] if settings['debug']: print("NETCONFIG:\n{}".format(netconfigdata)) except: utility.log(pilib.dirs.logs.network, 'Error reading netconfig data. ', 0, pilib.loglevels.network) return { 'status': 1, 'status_message': 'Error reading netconfig data. ' } else: utility.log(pilib.dirs.logs.network, 'Successfully read netconfig data', 3, pilib.loglevels.network) dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'mode', netconfigdata['mode']) utility.log(pilib.dirs.logs.network, 'Netconfig is enabled', 3, pilib.loglevels.network) # This will grab the specified SSID and the credentials and update # the wpa_supplicant file. At the moment, it also looks to see if the network is available. if station_interface: utility.log(pilib.dirs.logs.network, 'Updating wpa_supplicant', 3, pilib.loglevels.network) updatewpasupplicant(station_interface=station_interface) if settings['config_all']: utility.log(pilib.dirs.logs.network, 'Configuring all interfaces. ', 3, pilib.loglevels.network) settings['ifaces_to_configure'] = [ interface_name for interface_name in settings['config'] ] for interface_name in settings['ifaces_to_configure']: utility.log(pilib.dirs.logs.network, 'Configuring interface: {}'.format(interface_name), 3, pilib.loglevels.network) if interface_name not in settings['config']: message = 'Configuration not present for interface {}. '.format( interface_name) utility.log(pilib.dirs.logs.network, message, 1, pilib.loglevels.network) continue this_config = settings['config'][interface_name] if settings['debug']: print('CONFIG: \n{}'.format(this_config)) if this_config['mode'] == 'ap': killapservices() reset_net_iface(interface=interface_name) startapservices(interface_name) else: reset_net_iface(interface=interface_name) # Bridges require ipv4 being enabled in /etc/sysctl.conf # Here we are going to auto-bridge, but I think we should probably manually specify that the bridge should exist mode = None if all(interface in settings['config'] for interface in ['eth0', 'wlan0']): if settings['config']['wlan0']['mode'] == 'ap': mode = 'eth0wlan0bridge' if all(interface in settings['config'] for interface in ['wlan0', 'wlan1']): if settings['config']['wlan0']['mode'] == 'dhcp' and settings[ 'config']['wlan1']['mode'] == 'ap': mode = 'wlan0wlan1bridge' if all(interface in settings['config'] for interface in ['wlan0', 'wlan1']): if settings['config']['wlan1']['mode'] == 'dhcp' and settings[ 'config']['wlan0']['mode'] == 'ap': mode = 'wlan1wlan0bridge' if mode: utility.log(pilib.dirs.logs.network, 'Setting bridge for mode {}'.format(mode), 1, pilib.loglevels.network) runIPTables(mode)
def updateowfsdevices(busdevices, myProxy=None, debug=False): from cupid import pilib from iiutilities import dblib from iiutilities import datalib from iiutilities import utility # get defaults defaults = pilib.dirs.dbs.control.read_table('defaults') default_dict={} for default_item in defaults: default_dict[default_item['valuename']] = default_item['value'] # get current entries previnputs = pilib.dirs.dbs.control.read_table('inputs') # Make list of IDs for easy indexing previnputids = [] for input in previnputs: previnputids.append(input['id']) # Iterate over devices. Determine if values exist for polltime, frequency. # If so, update the device. If not, use defaults. # Then determine whether we should update value or not (Read temperature) for index, device in enumerate(busdevices): # print(device.__dict__) if device.sensorid in previnputids: try: newpollfreq = float(previnputs[previnputids.index(device.sensorid)]['pollfreq']) except ValueError: device.pollfreq = float(default_dict['inputpollfreq']) else: if newpollfreq >= 0: device.pollfreq = float(previnputs[previnputids.index(device.sensorid)]['pollfreq']) else: device.pollfreq = float(default_dict['inputpollfreq']) device.ontime = previnputs[previnputids.index(device.sensorid)]['ontime'] device.offtime = previnputs[previnputids.index(device.sensorid)]['offtime'] device.polltime = previnputs[previnputids.index(device.sensorid)]['polltime'] device.value = previnputs[previnputids.index(device.sensorid)]['value'] device.log_options = previnputs[previnputids.index(device.sensorid)]['log_options'] else: device.pollfreq = float(default_dict['inputpollfreq']) device.ontime = '' device.offtime = '' device.polltime = '' device.value = '' """ We're going to set a name because calling things by their ids is getting a bit ridiculous, but we can't have empty name fields if we rely on them being there. They need to be unique, so we'll name them by type and increment them Not really sure why this is conditional? """ if device.type in ['DS18B20', 'DS1825']: # Get name if one exists name = dblib.sqlitedatumquery(pilib.dirs.dbs.control, 'select name from ioinfo where id=\'' + device.sensorid + '\'') # If doesn't exist, check to see if proposed name exists. If it doesn't, add it. # If it does, keep trying. if name == '': for rangeindex in range(100): # check to see if name exists name = device.type + '-' + str(int(index + 1)) # print(name) foundid = dblib.sqlitedatumquery(pilib.dirs.dbs.control, 'select id from ioinfo where name=\'' + name + '\'') # print('foundid' + foundid) if foundid: pass else: dblib.sqlitequery(pilib.dirs.dbs.control, dblib.makesqliteinsert('ioinfo', valuelist=[device.sensorid, name], valuenames=['id', 'name'])) break device.name = name device.time_since_last = datalib.timestringtoseconds(datalib.gettimestring()) - datalib.timestringtoseconds(device.polltime, defaulttozero=True) # Is it time to read temperature? if device.time_since_last > device.pollfreq: utility.log(pilib.dirs.logs.io, 'reading temperature [' + device.name + '][' + device.id + ']' , 9, pilib.loglevels.io) device.readprop('temperature', myProxy) device.polltime = datalib.gettimestring() device.value = device.temperature.decode('utf-8') else: utility.log(pilib.dirs.logs.io, 'not time to poll', 9, pilib.loglevels.io, ) # print('not time to poll') device.unit = 'F' # We update the device and send them back for other purposes. busdevices[index] = device return busdevices
def generatehamachipage(hamachidata=None, path=None): from iiutilities.netfun import gethamachidata from iiutilities.datalib import parseoptions, gettimestring if not hamachidata: hamachidata = gethamachidata() if path: file = open(path, 'w') htmlstring = ( '<!DOCTYPE html>' + '<html>' + '<head>' + '<title>Hamachi Remotes Home</title>' + '<meta name="viewport" content="width=device-width, initial-scale=1">' + '<link rel="stylesheet" href="jqm/themes/base.css" />' + '<link rel="stylesheet" href="jqm/themes/jquery.mobile.icons.min.css" />' + '<link rel="stylesheet" href="jqm/jquery.mobile.custom.structure.min.css" />' + '<!--jQuery Mobile is 1.4.5-->' + '<script src="js/jquery-1.11.1.js"></script>' + '<script src="jqm/jquery.mobile.custom.js"></script>' + '<style>' + '.online {' + 'background-color:#bbffbb' + '}' + '.offline {' + 'background-color:#ffbbbb' + '}' + '</style>' + '</head>' + '<body>' + '<div data-role="page" id="demo-page" class="my-page" data-theme="d">' + '<div role="main" class="ui-content">') htmlstring += '<ul data-role="listview" data-inset=true><li data-role="list-divider">' htmlstring += 'Updated : ' + gettimestring() + '</li></ul>' file.write(htmlstring) for network in hamachidata: if path: htmlstring = ('<ul data-role="listview" data-inset="true">' + '<li data-role="list-divider">' + network['name'] + ' : ' + network['id'] + '</li>') for client in network['clientlist']: # print(client['name'] + ' : ' + client['hamachiip']) if path: htmlstring += '<li>' htmlstring += '<fieldset class="ui-grid-a"><div class="ui-block-a" style="width:50%">' # htmlstring += client['name'] + ' : ' + client['hamachiip'] htmlstring += '<a href="https://' + client[ 'hamachiip'] + '/">' + client['name'] + '</a> : ' + client[ 'hamachiip'] options = parseoptions(client['options']) htmlstring += '</div>' if client['onlinestatus']: htmlstring += '<div class="online" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Online</div>' else: htmlstring += '<div class="offline" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Offline</div>' if 'monitor' in options: if options['monitor'] == '1': htmlstring += '<div class="online" style="width:70px; float:right; text-align:center; border-radius:0.4em; border-width:1px; border-style:solid; border-color:#333333; margin-right:10px">Daemon</div>' htmlstring += '</fieldset></li>\n' if path: htmlstring += '</ul>' file.write(htmlstring) if path: htmlstring = '</div></div>\n' file.write(htmlstring) file.close()
def analyze_and_histo_access_db(dbpath=access_dbpath): from iiutilities import dblib from iiutilities import datalib tablename = 'access_log' access_db = dblib.sqliteDatabase(dbpath) access_db_tablenames = access_db.get_table_names() access_records = access_db.read_table(tablename) access_meta = { 'total_hits': {}, 'remote_hits': {}, 'hourly_hits': {}, 'not_found': [], 'dbpath': dbpath, 'tablename': tablename } for record in access_records: analyze_access_entry(record) if not record['domain']: pass # print('no domain for entry') # print(record) if record['domain'] in access_meta['total_hits']: access_meta['total_hits'][record['domain']]['times'].append( record['time']) else: access_meta['total_hits'][record['domain']] = { 'times': [record['time']] } if not record['local']: if record['domain'] in access_meta['remote_hits']: access_meta['remote_hits'][record['domain']]['times'].append( record['time']) else: access_meta['remote_hits'][record['domain']] = { 'times': [record['time']] } if record['status'] == '404': access_meta['not_found'].append({ 'url': record['full_request'], 'time': record['time'] }) # NOw process time resolved data into tables # this should be better iterate (DRY) but this works for domain_name, domain_data in access_meta['total_hits'].items(): domain_data['times'].sort() # Find first time first_time = datalib.timestringtoseconds(domain_data['times'][0]) # Go back to last incremental hour first_hour_time_seconds = first_time - first_time % 3600 # Find last hour (this actually just means that all are within the hour following this) last_time = datalib.timestringtoseconds(domain_data['times'][-1]) last_hour_time_seconds = last_time - last_time % 3600 bin_times = [] bin_values = [] num_bins = int(last_hour_time_seconds - first_hour_time_seconds) / 3600 + 1 for i in range(num_bins): bin_times.append(first_hour_time_seconds + i * 3600) bin_values.append(0) for time in domain_data['times']: time_seconds = datalib.timestringtoseconds(time) for index, bin_time in enumerate(bin_times): if index == num_bins - 1 or time_seconds < bin_times[index + 1]: bin_values[index] += 1 break domain_data['histo_data'] = {} for bin_time, bin_value in zip(bin_times, bin_values): # Put time in middle of hour domain_data['histo_data'][datalib.gettimestring(bin_time + 1800)] = bin_value for domain_name, domain_data in access_meta['remote_hits'].items(): domain_data['times'].sort() # Find first time first_time = datalib.timestringtoseconds(domain_data['times'][0]) # Go back to last incremental hour first_hour_time_seconds = first_time - first_time % 3600 # Find last hour (this actually just means that all are within the hour following this) last_time = datalib.timestringtoseconds(domain_data['times'][-1]) last_hour_time_seconds = last_time - last_time % 3600 bin_times = [] bin_values = [] num_bins = int(last_hour_time_seconds - first_hour_time_seconds) / 3600 + 1 for i in range(num_bins): bin_times.append(first_hour_time_seconds + i * 3600) bin_values.append(0) for time in domain_data['times']: time_seconds = datalib.timestringtoseconds(time) for index, bin_time in enumerate(bin_times): if index == num_bins - 1 or time_seconds < bin_times[index + 1]: bin_values[index] += 1 break domain_data['histo_data'] = {} for bin_time, bin_value in zip(bin_times, bin_values): # Put time in middle of hour domain_data['histo_data'][datalib.gettimestring(bin_time + 1800)] = bin_value if access_db.queued_queries: access_db.execute_queue() return access_meta
def update_net_status(lastnetstatus=None, quiet=True, ifaces_config=None, netconfig_data=None): """ This function does two main things: 1. Updates netstatus table. This table contains overall status information, i.e. whether we are attached to WAN, hamachi, etc. 2. Updates netiface_status table (previously netifaces table). This does some contextual stuff like getting wpastate info if it makes sense based on mode. Trying to get wpastate data on interfaces that don't matter is not a big deal, except that it takes a little time. No need wasting time if the interface is not configured to use wpa. 3. For interfaces that have a configuration, sets status in netifaces_status based on mode """ import time from iiutilities import netfun from iiutilities import dblib from cupid import pilib from iiutilities import utility from iiutilities import datalib from iiutilities.netfun import getifconfigstatus, getwpaclientstatus if not netconfig_data: netconfig_data = dblib.readonedbrow(pilib.dirs.dbs.system, 'netconfig')[0] if not ifaces_config: # Get config data ifaces_config = pilib.dbs.system.read_table('netifaceconfig', keyed_dict=True) # Unpack config for interface_name, element in ifaces_config.items(): element['config'] = json.loads(element['config']) """ We get last netstatus so that we can save last online times, previous online status, etc. """ if not lastnetstatus: try: lastnetstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'netstatus')[0] except: utility.log(pilib.dirs.logs.system, 'Error reading netstatus. Attempting to recreate netstatus table with default values. ', 1, pilib.loglevels.network) try: dblib.emptyandsetdefaults(pilib.dirs.dbs.system, 'netstatus') lastnetstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'netstatus')[0] except: utility.log(pilib.dirs.logs.system, 'Error recreating netstatus. ', 1, pilib.loglevels.network) utility.log(pilib.dirs.logs.network, 'Reading ifaces with ifconfig status. ', 4, pilib.loglevels.network) # Returns a dictionary, config is unpacked ifaces_status = getifconfigstatus() """ We supplement with wpa status on the wlan interfaces if station mode should be set Here, we need to decide which interfaces should have a proper wpa status """ for interface_name, this_interface_config in ifaces_config.items(): this_interface_status = ifaces_status[interface_name] # Insert mode into status this_interface_status['mode'] = this_interface_config['mode'] # this_interface_status = json.loads(this_interface_status['config']) if this_interface_config['mode'] == 'station': this_interface_status['config']['wpastate'] = getwpaclientstatus(interface_name) else: this_interface_status['config']['wpastate'] = '' this_interface_status_result = check_interface_status(this_interface_config, this_interface_status) this_interface_status['status'] = this_interface_status_result['status'] this_interface_status['status_message'] = this_interface_status_result['status_message'] """ Then write it to the table TODO : Double-check no problems here with not recreating status from scratch (stale data, et.) """ utility.log(pilib.dirs.logs.network, 'Sending ifaces query \n {}. '.format(ifaces_status), 5, pilib.loglevels.network) # print(ifacesdictarray) this_schema = dblib.sqliteTableSchema([ {'name':'name', 'primary':True}, {'name':'config'}, {'name':'status'}, {'name':'status_message'}, {'name':'mode'} ]) pilib.dbs.system.create_table('netifacestatus', schema=this_schema, queue=True) from copy import deepcopy # print('IFACES') # print(ifaces_status) for interface_name, interface in ifaces_status.items(): insert = deepcopy(interface) # Pack up the interface configuration data try: insert['config'] = json.dumps(interface['config']) except: print('error with interface {}'.format(interface_name)) print(interface) pilib.dbs.system.insert('netifacestatus', insert, queue=True) """ Now we check to see if we can connect to WAN """ utility.log(pilib.dirs.logs.network, 'Checking pingtimes. ', 4, pilib.loglevels.network) okping = float(netconfig_data['pingthreshold']) pingresults = netfun.runping('8.8.8.8', quiet=quiet) # pingresults = [20, 20, 20] pingresult = sum(pingresults) / float(len(pingresults)) if pingresult == 0: wanaccess = 0 latency = 0 else: latency = pingresult if pingresult < okping: wanaccess = 1 pilib.dbs.system.set_single_value('netstatus', 'WANaccess', 1, queue=True) if lastnetstatus['WANaccess'] == 0 or not lastnetstatus['onlinetime']: lastnetstatus['onlinetime'] = datalib.gettimestring() else: wanaccess = 0 if not wanaccess: dblib.setsinglevalue(pilib.dirs.dbs.system, 'netstatus', 'WANaccess', 0) if lastnetstatus['WANaccess'] == 1 or not lastnetstatus['offlinetime']: lastnetstatus['offlinetime'] = datalib.gettimestring() # we set all the values here, so when we retreive it we get changed and also whatever else happens to be there. lastnetstatus['latency'] = latency lastnetstatus['updatetime'] = datalib.gettimestring() lastnetstatus['WANaccess'] = wanaccess # pilib.dbs.system.insert('netstatus', lastnetstatus, queue=True) utility.log(pilib.dirs.logs.network, 'Done checking pings. ', 4, pilib.loglevels.network) if netconfig_data['netstatslogenabled']: # print('going to log stuff') dblib.logtimevaluedata(pilib.dirs.dbs.log, 'system_WANping', time.time(), pingresult, 1000, netconfig_data['netstatslogfreq']) #This is kinda ugly. Should be fixed. # netstatusdict = {'WANaccess':wanaccess, 'latency': latency, 'updatetime': updatetime} pilib.dbs.system.execute_queue(debug=True) return {'netstatusdict': lastnetstatus, 'ifaces_status': ifaces_status, 'ifaces_config':ifaces_config, 'netconfig_data':netconfig_data}
def process_channel(**kwargs): systemstatus = system_db.read_table_row('systemstatus')[0] if 'channel' in kwargs: channel = kwargs['channel'] elif 'channel_name' in kwargs: channels = control_db.read_table('channels', '"name"=\'' + kwargs['channel_name'] + "'") if len(channels) == 1: channel = channels[0] else: print('wrong number of channels returned. aborting') return # channelindex = str(int(channel['channelindex'])) logtablename = 'channel' + '_' + channel['name'] + '_log' time = datalib.gettimestring() disableoutputs = True status_msg = channel['name'] + ': ' log_tablenames = log_db.get_table_names() # Channel enabled means different things for different types of channels channel_condition = '"name"=\'{}\''.format(channel['name']) # Create log if it doesn't exist if logtablename not in log_tablenames: log_db.create_table(logtablename, pilib.schema.channel_datalog) if channel['type'] == 'local': if channel['enabled']: status_msg = '' try: setpoint_value = float(channel['setpoint_value']) except: channel['enabled'] = 0 status_msg += 'Error with setpoint. Disabling' control_db.set_single_value('channels', 'enabled', 0, channel_condition) # Need to test for age of data. If stale or disconnected, invalidate try: process_value = float(channel['process_value']) except: status_msg += 'Invalid control value. Disabling channel. ' channel['enabled'] = 0 control_db.set_single_value('channels', 'enabled', 0, channel_condition) # Move forward if still enabled after error-checking if channel['enabled']: status_msg += 'Channel Enabled. ' # TODO : look at channel auto mode. if channel['mode'] == 'auto': status_msg += 'Mode:Auto. ' # print('running auto sequence') # run algorithm on channel response = controllib.runalgorithm(pilib.dirs.dbs.control, pilib.dirs.dbs.session, channel['name']) action = response[0] message = response[1] status_msg += ' ' + response[1] + ' ' status_msg += 'Action: ' + str(action) + '. ' # Set action in channel controllib.setaction(pilib.dirs.dbs.control, channel['name'], action) elif channel['mode'] == 'manual': # print('manual mode') status_msg += 'Mode:Manual. ' action = controllib.getaction(pilib.dirs.dbs.control, channel['name']) else: # print('error, mode= ' + mode) status_msg += 'Mode:Error. ' if systemstatus['enableoutputs']: status_msg += 'System outputs enabled. ' if channel['outputs_enabled']: status_msg += 'Channel outputs enabled. ' disableoutputs = False # find out whether action is positive or negative or # not at all. # and act. for now, this is binary, but in the future # this will be a duty cycle daemon outputsetnames = [] outputresetnames = [] if action > 0: print("set positive output on") outputsetnames.append(channel['positive_output']) outputresetnames.append(channel['negative_output']) elif action < 0: print("set negative output on") outputsetnames.append(channel['negative_output']) outputresetnames.append(channel['positive_output']) elif action == 0: status_msg += 'No action. ' outputresetnames.append(channel['positive_output']) outputresetnames.append(channel['negative_output']) else: status_msg += 'Algorithm error. Doing nothing.' # Check to see if outputs are ready to enable/disable # If not, pull them from list of set/reset control_algorithm = control_db.read_table('controlalgorithms', condition='"name"=\'' + channel['controlalgorithm'] + "'") if len(control_algorithm) == 1: algorithm = control_algorithm[0] else: status_msg += 'Algorithm Error: Not found (or multiple?). Using default. ' algorithm = default_control_algorithm outputstoset = [] for outputname in outputsetnames: offtime = control_db.get_single_value('outputs', 'offtime', condition='"name"=\'' + outputname + "'") if datalib.timestringtoseconds( datalib.gettimestring()) - datalib.timestringtoseconds(offtime) > algorithm[ 'minofftime']: outputstoset.append(outputname) else: status_msg += 'Output ' + outputname + ' not ready to enable. ' outputstoreset = [] for outputname in outputresetnames: ontime = control_db.get_single_value('outputs', 'ontime', condition='"name"=\'' + outputname + "'") if datalib.timestringtoseconds( datalib.gettimestring()) - datalib.timestringtoseconds(ontime) > algorithm[ 'minontime']: outputstoreset.append(outputname) else: status_msg += 'Output ' + outputname + ' not ready to disable. ' """ TODO: Change reference to controlinputs to name rather than id. Need to double-check enforcement of no duplicates.""" # Find output in list of outputs if we have one to set time = datalib.gettimestring() if len(outputstoset) > 0 or len(outputstoreset) > 0: for output in outputs: id_condition = '"id"=\'' + output['id'] + "'" if output['name'] in outputstoset: # check current status currvalue = output['value'] if not currvalue: # No need to set if otherwise. Will be different for analog out # set ontime control_db.set_single_value('outputs', 'ontime', time, id_condition, queue=True) # set value control_db.set_single_value('outputs', 'value', 1, id_condition, queue=True) status_msg += 'Output ' + output['name'] + ' enabled. ' else: status_msg += 'Output ' + output['name'] + ' already enabled. ' if output['name'] in outputstoreset: # check current status currvalue = output['value'] if currvalue: # No need to set if otherwise. Will be different for analog out # set ontime control_db.set_single_value('outputs', 'offtime', time, id_condition, queue=True) # set value control_db.set_single_value('outputs', 'value', 0, id_condition, queue=True) status_msg += 'Output ' + output['name'] + ' disabled. ' else: status_msg += 'Output ' + output['name'] + ' already disabled. ' else: status_msg += 'Channel outputs disabled. ' action = 0 else: status_msg += 'System outputs disabled. ' action = 0 # Insert entry into control log insert = {'time': time, 'process_value': channel['process_value'], 'setpoint_value': channel['setpoint_value'], 'action': channel['action'], 'algorithm': channel['algorithm_name'], 'enabled': channel['enabled'], 'status_msg': status_msg} control_db.insert(logtablename, insert, queue=True) log_options = datalib.parseoptions(channel['log_options']) log_db.size_table(logtablename, **log_options) else: # Chanel is disabled. Need to do active disable here. pass elif channel['type'] == 'remote': status_msg += 'Remote channel. ' if channel['pending']: from iiutilities.datalib import parseoptions, dicttojson status_msg += 'Processing pending action. ' pending = parseoptions(channel['pending']) if 'setpoint_value' in pending: status_msg += 'processing setpoint_value. ' # Get control output and have a look at it. input_name = channel['sv_input'] # try: inputs = control_db.read_table('inputs', '"name"=\'' + input_name + "'") # except: # status_msg += 'Inputs query error. ' # return status_msg if len(inputs) == 1: input = inputs[0] else: status_msg += 'wrong number of query items returned, length: ' + str(len(inputs)) + ' for query on input name: ' + input_name print('ERROR: ' + status_msg) return status_msg # write_to_input(input, value) if input['type'] == 'MBTCP': input_id = input['id'] # Now, using this id, we can determine uniquely which MBTCP entry it came from splits = input_id.split('_') interfaceid = splits[0] register = splits[1] length = splits[2] string_condition = dblib.string_condition_from_lists(['interfaceid', 'register', 'length'], [interfaceid, register, length]) input_mb_entry = control_db.read_table('modbustcp', string_condition)[0] # Get IP address address = control_db.get_single_value('interfaces', 'address', '"id"=\'' + input_mb_entry['interfaceid'] + "'") from iiutilities import netfun if input_mb_entry['options']: input_options = parseoptions(input_mb_entry['options']) if 'scale' in input_options: pending['setpoint_value'] = float(pending['setpoint_value'])/float(input_options['scale']) try: result = netfun.writeMBcodedaddresses(address, register, [float(pending['setpoint_value'])], convert=input_mb_entry['format']) except: status_msg += 'Error in modbus' else: if result['statuscode'] == 0: # Clear pending setpoint_value pending.pop('setpoint_value', None) pending_string = dicttojson(pending) print('setting pending in setpointvaleu mbtcp') control_db.set_single_value('channels','pending',pending_string, channel_condition) else: status_msg += 'modbus write operation returned a non-zero status of ' + str(result['status']) elif input['type'] == 'MOTE': mote_node = input['address'].split(':')[0] mote_address = input['address'].split(':')[1] if mote_node == '1': message = '~setsv;' + mote_address + ';' + str(pending['setpoint_value']) else: message = '~sendmsg;' + str(mote_node) + ';;~setsv;' + mote_address + ';' + str(pending['setpoint_value']) motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes) from time import sleep for i in range(2): time = datalib.gettimestring(datalib.timestringtoseconds(datalib.gettimestring()) + i) motes_db.insert('queued', {'queuedtime':time, 'message':message}) # Clear pending setpoint_value pending.pop('setpoint_value', None) pending_string = dicttojson(pending) print('setting pending in setpoint_value mote') control_db.set_single_value('channels', 'pending', pending_string, channel_condition) if 'enabled' in pending: status_msg += 'processing enabledvalue. ' # Get control output and have a look at it. input_name = channel['enabled_input'] try: inputs = control_db.read_table('inputs', '"name"=\'' + input_name + "'") except: status_msg += 'Inputs query error. ' return status_msg if len(inputs) == 1: input = inputs[0] else: status_msg += 'wrong number of query items returned, length: ' + str(len(inputs)) + '. ' return status_msg # write_to_input(input, value) if input['type'] == 'MBTCP': input_id = input['id'] # Now, using this id, we can determine uniquely which MBTCP entry it came from splits = input_id.split('_') interfaceid = splits[0] register = splits[1] length = splits[2] string_condition = dblib.string_condition_from_lists( ['interfaceid', 'register', 'length'], [interfaceid, register, length]) input_mb_entry = control_db.read_table('modbustcp', string_condition)[0] # Get IP address address = control_db.get_single_value('interfaces', 'address', '"id"=\'' + input_mb_entry['interfaceid'] + "'") from iiutilities import netfun # print(address, register,input_mb_entry['format'], int(pending['enabled'])) if input_mb_entry['options']: input_options = parseoptions(input_mb_entry['options']) try: result = netfun.writeMBcodedaddresses(address, register, [int(pending['enabled'])], convert=input_mb_entry['format']) except: status_msg += 'Error in modbus' else: if result['statuscode'] == 0: status_msg += 'That seems to have worked ok?' # Clear pending setpoint_value pending.pop('enabled', None) pending_string = dicttojson(pending) print('setting pending in enabled mbtcp') control_db.set_single_value('channels', 'pending', pending_string, channel_condition) else: status_msg += 'modbus write operation returned a non-zero status of ' + str( result['status']) elif input['type'] == 'MOTE': mote_node = input['address'].split(':')[0] mote_address = input['address'].split(':')[1] if mote_node == '1': message = '~setrun;' + mote_address + ';' + str(pending['enabled']) else: message = '~sendmsg;' + str(mote_node) + ';;~setrun;' + mote_address + ';' + str( pending['enabled']) motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes) from time import sleep for i in range(2): time = datalib.gettimestring(datalib.timestringtoseconds(datalib.gettimestring() + i)) motes_db.insert('queued', {'queuedtime': time, 'message': message}) # Clear pending setpoint_value pending.pop('enabled', None) pending_string = dicttojson(pending) control_db.set_single_value('channels', 'pending', pending_string, channel_condition) # Insert entry into control log insert = {'time': time, 'process_value': channel['process_value'], 'setpoint_value': channel['setpoint_value'], 'action': channel['action'], 'algorithm': channel['control_algorithm'], 'enabled': channel['enabled'], 'status_msg': status_msg} # print(insert) log_db.insert(logtablename, insert) # Size log log_options = datalib.parseoptions(channel['log_options']) log_db.size_table(logtablename, **log_options) # If active reset and we didn't set channel modes, disable outputs # Active reset is not yet explicitly declared, but implied if disableoutputs and channel['type'] not in ['remote']: status_msg += 'Disabling Outputs. ' for id in [channel['positive_output'], channel['negative_output']]: control_db.set_single_value('outputs','value',0,'"id"=\'' + id + "'", queue=True) status_msg += 'Outputs disabled for id=' + id + '. ' # Set status message for channel control_db.set_single_value('channels', 'status_message', status_msg, channel_condition, queue=True) # Set update time for channel control_db.set_single_value('channels', 'control_updatetime', time, channel_condition, queue=True) # Execute query control_db.execute_queue() return status_msg
def readU6(registers): import u6 from iiutilities.datalib import gettimestring device = u6.U6() resultslist = [] for register in registers: try: value = device.readRegister(register) status = 0 except: value = '' status = 1 resultslist.append({'register':register, 'value':value, 'status':status, 'time': gettimestring()}) print(resultslist) return resultslist
def split_and_trim_db_by_date(logpath, **kwargs): from iiutilities import dblib from iiutilities.datalib import gettimestring import time settings = {'division': 'day', 'timekey': 'time', 'remove': 'true'} settings.update(kwargs) data_by_date = split_time_db(logpath, **settings) dates = [date for date in data_by_date] dates.sort(reverse=True) # print('Most recent date', dates[0]) if dates: current_date = dates[0] else: # print('NO time yet.') current_date = time.gmtime() # print(current_date) dates.reverse() log_db = dblib.sqliteDatabase(logpath) modified_dbs = [] for date in data_by_date: # Prune off time. timestring = gettimestring(time.mktime(date)).split(' ')[0] # print(timestring, 'tables: ' +str(len([tablename for tablename in data_by_date[date]]))) # for table in data_by_date[date]: # print(table) new_db_path = logpath.split( '.')[0] + '_' + timestring + '.' + logpath.split('.')[1] modified_dbs.append(new_db_path) new_db = dblib.sqliteDatabase(new_db_path) # if table doesn't exist, we create it new_db.tablenames = new_db.get_table_names() # print('existing tablenames: ') # print(new_db.tablenames) for tablename in data_by_date[date]: if tablename not in new_db.tablenames: # print('creating table ' + tablename) new_db.create_table(tablename, data_by_date[date][tablename]['schema'], queue=True) # print(data_by_date[date][tablename]['data'][0]) # print(data_by_date[date][tablename]['schema'].items) new_db.insert(tablename, data_by_date[date][tablename]['data'], queue=True) # print(new_db.queued_queries) new_db.execute_queue() # Now we need to remove the old entries if date != current_date: for tablename in data_by_date[date]: for datum in data_by_date[date][tablename]['data']: log_db.delete(tablename, '"' + settings['timekey'] + '"=' + "'" + datum[settings['timekey']] + "'", queue=True) # print(log_db.queued_queries) # print('Deletes',len(log_db.queued_queries)) log_db.execute_queue() return {'modified_dbs': modified_dbs}
def process(self): from iiutilities import datalib # TODO: Always determine status. This loads the value into the indicators, etc. if self.enabled: act = False self.statusmsg = datalib.gettimestring() + ' : Enabled and processing. ' last_status = bool(self.status) # Update status. self.determine_status() # retrofit. i lazy. currstatus = bool(self.status) if last_status: self.statusmsg += 'Last status is ' + str(last_status) + '. Currstatus is ' + str(currstatus) + '. ' else: self.statusmsg += 'Last status is ' + str(last_status) + '. Currstatus is ' + str(currstatus) + '. ' currenttime = datalib.gettimestring() # if status is true and current status is false, set ontime (or if on/off time field is empty) if currstatus and (not last_status or not self.ontime): # print(str(curstatus) + ' ' + str(self.status)) self.statusmsg += 'Setting status ontime. ' self.ontime = datalib.gettimestring() elif not currstatus and (last_status or not self.offtime): self.statusmsg += 'Setting status offtime. ' self.offtime = datalib.gettimestring() # print('CURR STATUS',currstatus) # print('SELF.ACTIVE',self.active) # if status is true and alarm isn't yet active, see if ondelay exceeded if currstatus and not self.active: # print(pilib.timestringtoseconds(currenttime)) statusontime = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(self.ontime, defaulttozero=True) # print(statusontime) if statusontime >= float(self.ondelay): self.statusmsg += 'Setting action active. ' self.active = 1 else: self.statusmsg += 'On delay not reached. ' # print('on',self.ontime) # print('now',currenttime) # if status is not true and alarm is active, see if offdelay exceeded if not currstatus and self.active: statusofftime = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(self.offtime, defaulttozero=True) if statusofftime >= float(self.offdelay): self.statusmsg += 'Setting action inactive. ' self.active = 0 # act on inactive transition # Send an alert / reset indicator if activereset is on if self.activereset: time_since_last_action = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds( self.lastactiontime, defaulttozero=True) if time_since_last_action >= float(self.actionfrequency): act = True self.statusmsg += "Time to act on activereset. " + str( time_since_last_action) + ' since last action, with action frequency of ' + str( self.actionfrequency) + '. ' else: act = False self.statusmsg += "Not yet time to act." else: self.statusmsg += 'Off delay not reached. ' # test to see if it is time to alert, based on delay ond alert time # print(self.statusmsg) if self.active: # check to see if it is time to alert # For things like outputs, actionfrequency should be zero to always enforce that action is on. # print(pilib.timestringtoseconds(currenttime)) # print(pilib.timestringtoseconds(self.lastactiontime)) # print(float(self.actionfrequency)) # print(pilib.timestringtoseconds(currenttime)-pilib.timestringtoseconds(self.lastactiontime)) time_since_last_action = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(self.lastactiontime, defaulttozero=True) if time_since_last_action >= float(self.actionfrequency): act = True self.statusmsg += "Time to act. " + str(time_since_last_action) + ' since last action, with action frequency of ' + str(self.actionfrequency) + '. ' else: act = False self.statusmsg += "Not yet time to act." else: # Active reset only happens on the transition. pass if act: # We're ready to alert or alert again. self.lastactiontime = currenttime if currstatus: self.onact() else: self.offact() else: self.statusmsg += 'Action disabled.' self.status = 0