def singleRun(args): """ Takes in a string of arguments that are required to have a ".phot" and ".fake" file followed by optional MATCH flags or a ".param" file. This will return, in a list, a string MATCH command to be sent off. """ args = " ".join(args) if ".fake" not in args and ".phot" not in args: print("Missing \".phot\" and/or \".fake\" file(s)") sys.exit(1) fakeFile = None photFile = None paramFile = None fitName = None workingD = os.getcwd() + "/" # gets the directory the executable has been invoked in # parse arguments to extract them args = args.split() print("Arguements:", args) idx = [] # indices to delete after extracting the file names needed to run MATCH for i, arg in enumerate(args): if ".fake" in arg: print("Found fake file:", arg) fakeFile = arg idx.append(i) if ".phot" in arg: print("Found photometry file:", arg) photFile = arg idx.append(i) if ".param" in arg: print("Found parameter file:", arg) paramFile = arg idx.append(i) if "fit" in arg: print("Found fit name:", arg) fitName = arg idx.append(i) # delete extracted file names in args args = [args[i] for i in xrange(len(args)) if i not in set(idx)] print("Remaining arguements:", args) # process any other arguements flags = None if len(arg) > 0: flags = parse(args) print("Retrieved flags:", flags) # if there is not passed in ".param" file then generate one based off the default one in the executable directory param = None if paramFile is None: # generate ".param" file and save it in working directory. # sys.argv[0] gives the location of the executable param = MatchParam(toExecutable + "/default.param", workingD + photFile, workingD + fakeFile) if "-ssp" in flags: param.ssp = True background = param.get("background") if background is not None and "default" in background: back = raw_input("Specify Background: ") param.change("background", back) if param.get("scale") == "scale": scale = raw_input("Specify scale: ") try: scale = float(scale) except ValueError: print("Not a float try again...") scale = raw_input("Specify scale: ") param.change("scale=", float(scale)) param.save() paramFile = param.name # make symbolic link here if not os.path.isfile(workingD + "parameters.param"): subprocess.call(["ln", "-s", param.savedTo, workingD + "parameters.param"]) else: # passed in parameter file no need to call a save on a MatchParam object (mostly used to scan for zinc) if os.path.isfile(workingD + paramFile): param = MatchParam(workingD + paramFile, workingD + photFile, workingD + fakeFile) if "-ssp" in flags: param.ssp = True if param.zinc and param.ssp: # can't have zinc and ssp both true need to make a new file otherwise print(paramFile.split(".")) newFileName = paramFile.split(".") newFileName[0] += "_ssp" newFileName = ".".join(newFileName) answer = raw_input("Found zinc and ssp flags does the user want to create a new parameter file with name %s? (y/n) " % newFileName) if answer in ['Y', 'y']: # change the logZmin param.change("logZmin", -1.5) param.save(name=newFileName) else: print("Zinc and ssp flags both specified...exiting") sys.exit(1) if param._calculateMaxOrMin: # Had to calculate filter mins or maxes and so we save over the file param.save(workingD, paramFile) else: answer = raw_input("User specified parameter file but we did not find it, make one with this name %s? (y/n) " % paramFile) print("answer:", answer) if answer in ['Y', 'y']: param = MatchParam(toExecutable + "/default.param", workingD + photFile, workingD + fakeFile) if "-ssp" in flags: param.ssp = True background = param.get("background") print("BACKGROUND:", background) if background is not None and "default" in background: back = raw_input("Specify Background: ") param.change("background", back) if param.get("scale") == "scale": scale = raw_input("Specify scale: ") try: scale = float(scale) except ValueError: print("Not a float try again...") scale = raw_input("Specify scale: ") param.change("scale", float(scale)) param.save(name=paramFile) else: print("Specified parameter name that does not exit in current directory...") sys.exit(1) if param.zinc and not param.ssp and "-zinc" not in flags: flags.append("-zinc") command = "" # MATCH command that will be sent to server # build command (explicitely shown) command += "calcsfh " command += workingD + paramFile + " " command += workingD + photFile + " " command += workingD + fakeFile + " " # get next fit name if fitName is None: fitName = getFitName() command += workingD + fitName # append flags to command for flag in flags: command += " " + flag # add forwarding to file in command command += " &> " + workingD + fitName + ".co" # write in logging log = MyLogger.myLogger("generate commands", toExecutable + "logs/generated_commands") # create stripped down command (ie no working directory included) stripCommand = "calcsfh " + paramFile + " " + photFile + " " + fakeFile + " " + fitName + " " + " ".join(flags) \ + " &> " + fitName + ".co" # create empty file so getFitName can iterated to another fit number #subprocess.call('touch %s' % (os.getcwd() + "/" + fitName), shell=True) f = open(workingD + fitName, "w") f.close() log.info("Generated command (%s): %s" % (os.getcwd(), stripCommand)) #print(command) return [command]
def Add(conf): rec = { 'time': int(time()), conf['fields'][0]: None, conf['fields'][1]: None } temp = None humidity = None # avoid condense in the chip if not 'alarm' in conf.keys(): conf['alarm'] = True conf['errors'] = 0 try: if (conf['fd'] != None) and (conf['Ada_import'] != None): try: try: if conf['alarm'] and (not conf['fd'].is_heater_active()): conf['fd'].set_heater(True) sleep(5) except: pass temp = conf['fd'].read_temperature() humidity = conf['fd'].read_humidity() try: if conf['fd'].is_heater_active(): conf['fd'].set_heater(False) conf['alarm'] = False except: pass except ValueError: MyLogger.log(modulename, 'ATTENT', "Read error") return rec else: MyLogger.log(modulename, 'ERROR', "Configuration error.") return rec except: MyLogger.log(modulename, 'ERROR', "i2c access error. /dev/gpiomem permissions correct?") return rec if (temp == float("nan")) or (humidity == float("nan")): MyLogger.log(modulename, 'Arning', "SHT31 read error, heating chip") conf['fd'].reset() conf['fd'].clear_status() conf['fd'].set_heater(True) conf['errors'] += 1 if conf['errors'] > 25: MyLogger.log( modulename, 'ERROR', "Too many SHT31 %s read errors" % ('temp' if temp == float("nan") else 'hum')) raise IOError("SHT31 %s mailfunctioning" ('temp' if temp == float("nan") else 'hum')) return rec if (temp == None) or (humidity == None): MyLogger.log(modulename, 'ERROR', "Access error. Connection problem?") raise IOError("SHT31 lost connection.") return rec conf['errors'] = 0 if type(temp) is float: if conf['debug']: MyLogger.log(modulename, 'DEBUG', "Temperature : %5.2f oC not calibrated" % temp) if (temp < -20) or (temp > 40.0): conf['alarm'] = True else: MyLogger.log(modulename, 'DEBUG', "Temperature : None") if type(humidity) is float: if conf['debug']: MyLogger.log(modulename, 'DEBUG', "Rel.Humidity: %5.2f %% not calibrated" % humidity) if humidity > 95.0: conf['alarm'] = True else: MyLogger.log(modulename, 'DEBUG', "Rel.Humidity: None") if conf['fd'].is_command_error() or conf['fd'].is_data_crc_error(): conf['fd'].reset() conf['fd'].clear_status() return rec if ('raw' in conf.keys()) and (type(Conf['raw']) is module): conf['raw'].publish(tag='%s' % conf['type'].lower(), data='temp=%.1f,rh=%.1f' % (temp * 1.0, humidity * 1.0)) temp = calibrate(0, conf, temp) humidity = calibrate(1, conf, humidity) rec = { 'time': int(time()), conf['fields'][0]: round(temp, 2), conf['fields'][1]: int(humidity) } return rec
__author__ = "Tristan J. Hillis" """ Comment on documentation: When reading the doc strings if "Pre:" is present then this stands for "precondition", or the conditions in order to invoke something. Oppositely, "Post:" stands for "postcondition" and states what is returned by the method. File Description: This file contains a set of functions that the GUI or server code calls throughout. """ ## Global Variables # Deprecated import MyLogger logger = MyLogger.myLogger("AddLinearSpacer.py", "client") # Get gregorian date, local #d = date.today() #logFile = open("/home/mro/ScienceCamera/gui/logs/log_gui_" + d.strftime("%Y%m%d") + ".log", "a") def AddLinearSpacer(boxsizer, pixelSpacing): """ A one-dimensional spacer along only the major axis for any BoxSizer Found this on a wxPython tutorial and it has proved to be very handy. It takes in a box sizer as boxsizer and the spacing as pixelSpacing and expands the passed in sizer along its major axis. This is used over boxsizer.AddSpacer(...) as this expands along both axises causing untold issues. It also is more intuitive than boxsizer.AddSpacer((..,..)) where you pass a 0 to the width or height to obtain the same
help='parameter group name to use') grp = parser.add_argument_group('AgriMET related options') grp.add_argument('--force', action='store_true', help='run once fetching information') grp.add_argument('--forceStats', action='store_true', help='run stats generation once ') grp.add_argument('--earliestDate', type=str, help='Earliest date to fetch') grp.add_argument('--input', type=str, help='Read from this file instead of fetching URL') grp.add_argument('--output', type=str, help='Write page fetched from URL to this file') MyLogger.addArgs(parser) # Add log related options args = parser.parse_args() logger = MyLogger.mkLogger(args, __name__) logger.info('Args=%s', args) try: params = Params.load(args.db, args.group, logger) logger.info('Params=%s', params) qExcept = queue.Queue() # Thread exceptions are sent here thrFetch = Fetcher(args, params, logger, qExcept) # Fetch Agrimet information thrStats = Stats(args, params, logger, qExcept) # Build summary information
workQueue = Queue() activeThreads = { } # this should only every be one more larger than the number of CPUs on the system. # main thread handles incoming data and one thread waits on events and the other threads # execute the bash commands. dAvRangeThreads = {} dAvRangeGroup = {} # dictionary that holds a dictionary of commands doneThreads = Queue() # Events for watchdogs watcherEvent = threading.Event( ) # a single thread waits for set to join a certain thread condorEvent = threading.Event() # activates the condor thread log = MyLogger.myLogger("MatchServer", "server") def getThreadNumber(): print(activeThreads) num = None if len(activeThreads) == 0: # no threads yet num = '1' else: # get the thread number that is missing in the range of 1 to 8 keys = activeThreads.keys() print(keys) keys = map(int, keys) keys = sorted(keys, key=int) print(keys) count = 1 for name in keys:
def on_subscribe(client, obj, MiD, granted_qos): global waiting, mid mid = MiD MyLogger.log(modulename,'DEBUG',"mid: " + str(mid) + ",qos:" + str(granted_qos))
def Add(conf): def PPB2ugm3(gas, ppb, temp): mol = { 'so2': 64.0, # RIVM 2.71, ? 'no2': 46.0, # RIVM 1.95, ? 'no': 30.01, # RIVM 1.27, ? 'o3': 48.0, # RIVM 2.03, ? 'co': 28.01, # RIVM 1.18, ? 'co2': 44.01, # RIVM 1.85, ? 'nh3': 17.031, } if not gas in mol.keys(): raise ValueError, "%s unknown gas" % gas if ppb < 0: return 0 return round(ppb * 12.187 * mol[gas] / (273.15 + temp), 2) try: temp = conf['meteo']['temp'] except: temp = 25.0 # default temp if (conf['fd'] == None) or (not len(conf['fd'])): return rec rawData = [] for gas in range(0, len(conf['fd'])): value = None if conf['fd'][gas] != None: try: (value, reference) = conf['fd'][gas].adc_read() rawData.append("%s=%d/%d" % (conf['fields'][gas], value, reference)) mAval = value * conf['sensitivity'][gas][1] / reference - conf[ 'sensitivity'][gas][0] if mAval < 0: mAval = 0 # V=I2R -> I = sqrt(V/R) ??? # R = 400/reference ??? ppmval = (mAval / conf['sensitivity'][gas][1] ) * conf['sensitivity'][gas][2] if conf['debug']: print( "NH3: %d/%d (%.3f mV)" % (value, reference, 1000.0 * float(value) / reference)) print("NH3 converted: %.1f mA, %.1f PPM" % (mAval, ppmval)) if conf['units'][gas].lower() == 'ppm': value = calibrate(gas, conf, ppmval) elif conf['units'][gas].lower() == 'ug/m3': value = calibrate(gas, conf, ppmval) value = PPB2ugm3(conf['units'][gas].lower(), value, temp) elif conf['units'][gas].lower() == 'ma': value = mAval elif type(value) is int: value = float(value) except ValueError: MyLogger.log(modulename, 'ERROR', "Read or config error") continue else: MyLogger.log(modulename, 'ERROR', "Configuration error.") continue if value == None: MyLogger.log(modulename, 'ERROR', "Access error. Connection problem?") raise IOError("AlphaSense lost AD connection.") return rec if type(value) is float: if conf['debug']: MyLogger.log( modulename, 'DEBUG', "%s : %5.2f %s not calibrated" % (conf['fields'][gas], value, conf['units'][gas])) else: MyLogger.log(modulename, 'DEBUG', "%s : None" % conf['fields'][gas]) rec[conf['fields'][gas]] = round(value, 2) if ('raw' in conf.keys()) and (type(conf['raw']) is module): conf['raw'].publish(tag='%s' % conf['type'].lower(), data=','.join(rawData)) return rec
def setWebValue(nid,item,value,adebug=False): global prepend, fld2drupal, modulename if not item in fld2drupal.keys() or not fld2drupal[item][1]: return False if type(value) is bool: value = "1" if value else "0" elif item in ['first']: value = "'%s'" % datetime.datetime.fromtimestamp(value).strftime("%Y-%m-%d") else: value = "'%s'" % str(value) update = '' # compile update Web DB Drupal query try: if item == 'coordinates': ### kaart_meetkit type ord = value[1:-1].split(',') lat = max(ord[0],ord[1]); lon = min(ord[0],ord[1]) flds = [ # in Nld lat is > lon, watchout for script and admin errors # entity_type = map point fld2drupal[item][0]+'_lat = %3.7f' % lat, fld2drupal[item][0]+'_top = %3.7f' % lat, fld2drupal[item][0]+'_bottum = %3.7f' % lat, fld2drupal[item][0]+'_lon = %3.7f' % lon, fld2drupal[item][0]+'_left = %3.7f' % lon, fld2drupal[item][0]+'_right = %3.7f' % lon, fld2drupal[item][0]+"_geohash = '%s'" % str(pygeohash(lat,lon,precision=12)), ] update = "UPDATE %s SET %s WHERE entity_id = %d" % (prepend+fld2drupal[item][0],','.join(flds),nid) elif fld2drupal[item][1] == 'value': ### value type update = "UPDATE %s SET %s = %s WHERE entity_id = %d" % (prepend+fld2drupal[item][0],'field_'+fld2drupal[item][0]+'_value',value,nid) elif fld2drupal[item][1] == 'tid': ### tid type qry = WebDB.db_query("SELECT tid FROM taxonomy_term_data WHERE name = %s" % value, True) if not len(qry) or not len(qry[0]): # unknown term, insert new one MyLogger.log(modulename,'WARNING','Add to Web Drupal DB taxonomy term %s' % value) update = "INSERT INTO %s (vid,name,description,weight,format) VALUES (8,%s,'project ID',1)" % value if adebug: print("WebDB change to tid 1001:\n %s" % update); qry =[(1001,)] else: WebDB.db_query(update, False) qry = WebDB.db_query("SELECT tid FROM taxonomy_term_data WHERE name = %s" % value, True) if not len(qry) or not len(qry[0]): MyLogger.log(modulename,'ERROR','Failed to insert new taxonomy term %s' % value) return False update = "UPDATE %s SET %s = %d WHERE entity_id = %d" % (prepend+fld2drupal[item][0],'field_'+fld2drupal[item][0]+'_tid', qry[0][0], nid) elif fld2drupal[item][0] == 'fysiek_adres': ### adres type if fld2drupal[item][1] in ['thoroughfare','locality']: qry = WebDB.db_query("SELECT field_fysiek_adres_%s FROM %s WHERE entity_id = %d" % (('locality' if fld2drupal[item][1] == 'thoroughfare' else 'thoroughfare'),prepend+fld2drupal[item][0],nid),True) if len(qry) and len(qry[0]): if fld2drupal[item][1] == 'thoroughfare': try: update = "%s: %s" % (str(qry[0][0]),str(value[1:value.rindex(' ')])) except: update = "%s: %s" % (str(qry[0][0]),str(value[1:-1])) else: update = "%s: %s" % (str(value[1:-1]),str(qry[0][0])) update = "UPDATE node SET title = '%s' WHERE nid = %d" % (update,nid) if not adebug: WebDB.db_query(update, False) update = "UPDATE %s SET %s = %s WHERE entity_id = %d" % (prepend+fld2drupal[item][0],'field_fysiek_adres_'+fld2drupal[item][1], value, nid) if adebug: print("WebDB change with:\n %s" % update) elif update: WebDB.db_query(update, False) except: MyLogger.log(modulename,'DEBUG','Failed to update Drupal web DB with %s: query %s' % (item,update)) MyLogger.log(modulename,'ERROR','Failed to update Drupal web DB with %s: value %s' % (item,value)) return False return True
def getFromWeb(nid, info, items=info_fields): global Conf, prepend, fld2drupal selects = []; froms = []; wheres = []; address = []; fields = []; tids = [] if not type(items) is list: items = items.split(',') for item in items: if not item in fld2drupal.keys() or not fld2drupal[item][0]: continue if fld2drupal[item][0] == 'fysiek_adres': address.append(item); continue if fld2drupal[item][1] == 'tid': tids.append(item); continue # handle field_*_value type of info fields.append(item) if fld2drupal[item][1] == 'value': selects.append(prepend+fld2drupal[item][0]+'.field_'+fld2drupal[item][0]+'_'+fld2drupal[item][1]) froms.append(prepend+fld2drupal[item][0]) wheres.append(prepend+fld2drupal[item][0]+'.'+'entity_id = %s' % nid) elif fld2drupal[item][1] == 'ordinates': selects.append('CONCAT('+prepend+fld2drupal[item][0]+'.field_'+fld2drupal[item][0]+'_lat,",",'+prepend+fld2drupal[item][0]+'.field_'+fld2drupal[item][0]+'_lon,",0")') froms.append(prepend+fld2drupal[item][0]) wheres.append(prepend+fld2drupal[item][0]+'.'+'entity_id = %s' % nid) # try to get them in one show if not one by one qry = 'SELECT '+','.join(selects)+' FROM '+','.join(list(set(froms)))+' WHERE '+' AND '.join(list(set(wheres))) qry = WebDB.db_query(qry, True) if not len(qry): # try one by one MyLogger.log(modulename,'DEBUG','Failed to get info %s from Web database' % str(items)) qry = [[]] for indx in range(len(fields)): qry[0].append(None) nqry = WebDB.db_query('SELECT %s FROM %s WHERE %s' % (selects[indx],froms[indx],wheres[indx]),True) if len(nqry) and len(nqry[0]): qry[0][indx] = nqry[0][0] for indx in range(len(fields)): try: if len(fld2drupal[fields[indx]]) > 3: # indexed value try: info[fields[indx]] = fld2drupal[fields[indx]][3][int(qry[0][indx])] except: MyLogger.log(modulename,'ERROR','Unexpected index %s for field %s.' % (qry[0][indx],fields[indx])) info[fields[indx]] = qry[0][indx] elif type(qry[0][indx]) is int: if qry[0][indx] <= 1: info[fields[indx]] = (True if qry[0][indx] else False) else: info[fields[indx]] = qry[0][indx] else: info[fields[indx]] = qry[0][indx] except: MyLogger.log(modulename,'ERROR','Failed to import info %s from Web database' % fields[indx]) return False if 'GPS' in info.keys(): if not 'coordinates' in info.keys(): info['coordinates'] = info['GPS'] ord = [float(x) for x in info['GPS'].split(',')] info['GPS'] = { 'longitude': ord[LON], 'latitude': ord[LAT], 'altitude': None } for item in ['first','datum']: if item in info.keys() and (not type(info[item]) is int): info[item] = DateTranslate(info[item]) if not 'description' in info.keys() or not info['description']: hw = [] for item in ['meteo','dust','gps']: if item in info.keys() and info[item]: hw.append(info[item].upper()) hw.sort() if len(hw): info['description'] = ";hw: %s,TIME" % ','.join(hw) if len(tids) and not getTIDvalues(nid,tids,info): return False if len(address) and not getWebAddress(nid,address,info): return False for key,value in info.items(): if value == 'None' or value == 'NULL': info[key] = None return True
def getUserIdList(active): MyLogger.logMsg("get user id") select_stmt = "SELECT USER_ID FROM USERLIST WHERE IS_ACTIVE = '%s'" % (active) list = executeSelect(select_stmt) MyLogger.logMsg(list) return list
if sys.argv[i] in ['--debug', '-d']: # do not change DB values debug = True; continue if sys.argv[i] in ['--time', '-t']: # minimal modification diff in time if sys.argv[i].find('-t') == 0 and len(sys.argv[i]) > 2: changeTime = int(sys.argv[i][2:]) else: changeTime = int(sys.argv[i+1]); i += 1 continue if sys.argv[i] in ['--interact', '-i']: # interactive synchronization mode interact = True; continue if sys.argv[i] in ['--all', '-a']: # if in interactive mode also interact on not different values interactAll = True; continue if sys.argv[i] and sys.argv[i][-5:].lower() == '.json': # import json node file into Air Quality DB nodes = True try: nodesImport(sys.argv[i]) except: print("Failed to import json nodes file: " % sys.argv[i]) else: KitSelections.append(sys.argv[i]) if not nodes: if not len(KitSelections): KitSelections = [ '.*' ] else: MyLogger.log(modulename,'ATTENT',"Limit kit synchronisation to kits matching: %s" % ('('+'|'.join(KitSelections)+')')) for i in range(len(KitSelections)): # select which kits to synchronize KitSelections[i] = re.compile('^'+KitSelections[i]+'$', re.I) try: syncWebDB() # synchronize Drupal Web DB kit info with Air Qual except: print("FAIL to synchronize Drupal web kit info with Air Qual DB") EXIT(1) EXIT(0)
import MyLogger import scriptingClasses as sc """ # Comment on documentation: # When reading the doc strings if "Pre:" is present then this stands for "precondition", or the conditions in order to invoke something. # Oppositely, "Post:" stands for "postcondition" and states what is returned by the method. """ __author__ = "Tristan J. Hillis" ## Global Variables app = None # reference to Evora app port_dict = {} # dictionary storing different connections that may be open. logger = MyLogger.myLogger("photoAcquisitionGUI.py", "client") ## Getting to parents (i.e. different classes) # Three parents will get to the Evora class and out of the notebook # Frame class. class Evora(wx.Frame): def __init__(self): wx.Frame.__init__(self, None, -1, "Evora Acquisition GUI", size=(600, 450)) self.protocol = None # client protocol self.connection = None self.connected = False # keeps track of whether the gui is connect to camera self.active_threads = {} # dict of the active threads
def PubOrSub(topic,option): global Conf, waiting, mid, telegrams, PingTimeout, ErrorCnt waiting = False mid = None telegram = None def on_connect(client, obj, rc): global waiting if rc != 0: MyLogger.log(modulename,'ERROR',"Connection error nr: %s" % str(rc)) Conf['input'] = False waiting = False if 'fd' in Conf.keys(): Conf['fd'] = None raise IOError("MQTTsub connect failure.") else: MyLogger.log(modulename,'DEBUG',"Connected.") pass def on_message(client, obj, msg): global waiting, telegrams waiting = False #MyLogger.log(modulename,'DEBUG','MQTTsub msg: topic: ' + msg.topic + ", qos: " + str(msg.qos) + ", msg: " + str(msg.payload)) try: if len(telegrams) > 100: # 100 * 250 bytes MyLogger.log(modulename,'ERROR',"Input buffer is full.") return telegrams.append( { 'topic': msg.topic, 'payload': str(msg.payload), }) except: MyLogger.log(modulename,'DEBUG','In message.') def on_subscribe(client, obj, MiD, granted_qos): global waiting, mid mid = MiD MyLogger.log(modulename,'DEBUG',"mid: " + str(mid) + ",qos:" + str(granted_qos)) def on_log(client, obj, level, string): global PingTimeout, Conf, ErrorCnt # MyLogger.log(modulename,'DEBUG',"log: %s" % string) if string.find('PINGREQ') >= 0: if not PingTimeout: PingTimeout = int(time()) #MyLogger.log(modulename,'DEBUG',"log: ping") elif int(time())-PingTimeout > 10*60: # should receive pong in 10 minutes MyLogger.log(modulename,'ATTENT',"Ping/pong timeout exceeded.") if ('fd' in Conf.keys()) and (Conf['fd'] != None): Conf['fd'].disconnect() waiting = False Conf['registrated'] = False del Conf['fd'] ErrorCnt += 1 PingTimeout = 0 elif string.find('PINGRESP') >= 0: if int(time())-PingTimeout != 0: MyLogger.log(modulename,'DEBUG',"Log: ping/pong time: %d secs" % (int(time())-PingTimeout)) PingTimeout = 0 else: MyLogger.log(modulename,'DEBUG',"Log: %s..." % string[:17]) def on_disconnect(client, obj, MiD): global waiting, mid, Conf waiting = False if 'fd' in Conf.keys(): Conf['fd'] = None mid = MiD MyLogger.log(modulename,'DEBUG',"Disconnect mid: " + str(mid) ) raise IOError("MQTTsub: disconnected") try: if (not 'fd' in Conf.keys()) or (Conf['fd'] == None): Conf['fd'] = mqtt.Client(Conf['prefix']+Conf['apikey']) Conf['fd'].on_connect = on_connect Conf['fd'].on_disconnect = on_disconnect try: ['NOTSET','DEBUG'].index(MyLogger.Conf['level']) Conf['fd'].on_log = on_log except: pass if ('user' in Conf.keys()) and Conf['user'] and ('password' in Conf.keys()) and Conf['password']: Conf['fd'].username_pw_set(username=Conf['user'],password=Conf['password']) #Conf['fd'].connect(Conf['hostname'], port=Conf['port'], keepalive=60) Conf['fd'].connect(Conf['hostname'], Conf['port']) Conf['fd'].on_subscribe = on_subscribe Conf['fd'].on_message = on_message Conf['fd'].loop_start() Conf['fd'].subscribe(topic, qos=Conf['qos']) timeout = time() + Conf['timeout'] waiting = True while waiting: if time() > timeout: break if len(telegrams): waiting = False break sleep(1) # maybe it should depend on timeout # Conf['fd'].disconnect() #Conf['fd'].loop_stop() except: MyLogger.log(modulename,'ERROR',"Failure type: %s; value: %s. MQTT broker aborted." % (sys.exc_info()[0],sys.exc_info()[1]) ) Conf['output'] = False del Conf['fd'] raise IOError("%s" % str(mid)) return telegram if waiting: MyLogger.log(modulename,'ATTENT',"Sending telegram to broker") raise IOError("%s" % str(mid)) return telegram MyLogger.log(modulename,'DEBUG',"Received telegram from broker, waiting = %s, message id: %s" % (str(waiting),str(mid)) ) if len(telegrams): return telegrams.pop(0) return telegram
def getdata(): global Conf, ErrorCnt if (not Conf['input']): sleep(10) return {} if ErrorCnt: if ErrorCnt > 20: Conf['registrated'] = None if ('fd' in Conf.keys()) and (Conf['fd'] != None): try: Conf['fd'].disconnect() Conf['fd'] = None except: pass sleep((ErrorCnt-20)*60) else: sleep(ErrorCnt) if (not 'registrated' in Conf.keys()) or (Conf['registrated'] == None): if 'registrated' in Conf.keys(): MyLogger.log(modulename,'ATTENT',"Try to reconnect to broker.") if (not 'projects' in Conf.keys()) or (not len(Conf['projects'])): Conf['projects'] = ['ALL'] if (not 'topic' in Conf.keys()) or (Conf['topic'] == None): Conf['topic'] = 'IoS' for key in ['user','password','hostname']: if (not key in Conf.keys()) or (Conf[key] == None): Conf['input'] = False MyLogger.log(modulename,'FATAL',"Missing login %s credentials." % key) try: Conf['projects'] = re.compile(Conf['projects'], re.I) Conf['serials'] = re.compile(Conf['serials'], re.I) except: MyLogger.log(modulename,'FATAL','Regular expression for project or serial.') Conf['registrated'] = True try: msg = PubOrSub(Conf['topic']+"/#", None) if msg == None: ErrorCnt += 1 return {} ErrorCnt = 0 msg['topic'] = msg['topic'].split('/') msg['payload'] = json.loads(msg['payload']) except IOError as e: if ErrorCnt > 40: MyLogger.log(modulename,'FATAL',"Subscription failed Mid: %s. Aborted." % e) ErrorCnt += 1 MyLogger.log(modulename,'WARNING',"Subscription is failing Mid: %s. Slowing down." % e) if (len(msg['topic']) < 3) or (msg['topic'][0] != Conf['topic']) or (not type(msg['payload']) is dict) or (not 'metadata' in msg['payload'].keys()): sleep(0.1) return getdata() msg['project'] = msg['topic'][1] msg['serial'] = msg['topic'][2] # check the pay load if not type(msg['payload']) is dict: sleep(0.1) return getdata() if not 'metadata' in msg['payload'].keys(): msg['metadata'] = { 'project': msg['project'], 'serial': msg['serial']} else: msg['metadata'] = msg['payload']['metadata'] if not 'data' in msg['payload'].keys(): msg['payload']['data'] = None # validate identification # TO DO: check serial to api key (mqtt broker checks user with project/serial) for key in ['project','serial']: if (not Conf[key+'s'].match(msg[key])) or (not key in msg['payload']['metadata'].keys()) or (msg[key] != msg['payload']['metadata'][key]): MyLogger.log(modulename,'WARNING',"Not a proper telegram. Skipped.") sleep(0.1) return getdata() return { 'register': msg['metadata'], 'data': msg['payload']['data'] }
test_py_content = test_py.read(os.path.getsize(file_path)) try: if re.search(r'# <test_cases_block>', test_py_content): old_test_cases = re.search(r'(?s)( def test.*)\r?\n# <test_cases_block>', test_py_content).group(1).replace('\\', '\\\\') else: old_test_cases = re.search(r'(?s)( def test.*)if __name__ == "__main__":', test_py_content).group(1).replace('\\', '\\\\') old_test_cases = old_test_cases.rstrip('\r\n ') # Updating QUERY names according to the test.py_template file old_test_cases = re.sub(r'self\.[^_]+_QUERY', 'self.SEARCH_QUERY', old_test_cases) if old_test_cases: file_altering(log, pattern_dir_name, r'# <test_cases_block>', old_test_cases + '\r\n# <test_cases_block>') except AttributeError as err: log.error('No match found for test case functions ending in \'test.py\' file: ' + str(err)) except IOError as err: log.error('No \'test.py\' was found to obtain existent test case functions from it: ' + str(err)) except EnvironmentError as err: log.error(str(err)) return None if __name__ == '__main__': log = MyLogger.create_logger('WebsphereAppServer', 3) # f = open('/usr/tideway/testdir/WebSphereAppServer/tests/searchstring', 'r') # add_test_case(log, 'WebSphereAppServer', ['172.22.90.121', '172.22.90.140'], 'Windows_additional_tc', # ['search SoftwareInstance where type has substring "IBM WebSphere Application Server" order by name show name,type, count, version, product_version, edition, service_pack, install_root, server_name, cell_name, node_name, jmx_enabled, jmx_port, profile_dir']) pre_format_template(log, 'WebsphereAppServer') add_old_test_cases(log, 'WebsphereAppServer')
def syncWebDB(): global Conf, KitSelections, modulename global DustTypes, MeteoTypes, GpsTypes #if Conf['log']: # WebDB.Conf['log'] = MyDB.Conf['log'] = Conf['log'] # WebDB.Conf['print'] = MyDB.Conf['print'] = Conf['print'] # #ThreadStops.append(Conf['log'].stop) if not WebDB.db_connect(): # credentials from WEB ENV sys.stderr.write("Cannot connect to %s database" % Conf['WEBdatabase']) EXIT(1) if not MyDB.db_connect(): # credentials from DB ENV sys.stderr.write("Cannot connect to %s database" % Conf['database']) EXIT(1) WebMeetkits = getMeetKits() for indx in range(len(WebMeetkits)): qry = MyDB.db_query("SELECT UNIX_TIMESTAMP(datum) FROM Sensors WHERE project = '%s' AND serial = '%s' ORDER BY active DESC, datum DESC LIMIT 1"% (WebMeetkits[indx]['project'],WebMeetkits[indx]['serial']), True) if not len(qry): MyLogger.log(modulename,'ATTENT','Sensor kit from Drupal node %d with project %s,serial %s is not defined in Sensors/TTNtable table(s).' % (WebMeetkits[indx]['nid'],WebMeetkits[indx]['project'],WebMeetkits[indx]['serial'])) WebMeetkits[indx]['datum'] = insertNewKit(WebMeetkits[indx]) else: WebMeetkits[indx]['datum'] = qry[0][0] # WebMeetkits list of dict nid, changed, project, serial, Sensors datum keys MyLogger.log(modulename,'DEBUG','Kit Drupal node nid %d dated %s AND data DB kit %s_%s dated %s.' % (WebMeetkits[indx]['nid'],datetime.datetime.fromtimestamp(WebMeetkits[indx]['changed']).strftime("%Y-%m-%d %H:%M"),WebMeetkits[indx]['project'],WebMeetkits[indx]['serial'],datetime.datetime.fromtimestamp(WebMeetkits[indx]['datum']).strftime("%Y-%m-%d %H:%M"))) if WebMeetkits[indx]['datum'] and (abs(WebMeetkits[indx]['datum']-WebMeetkits[indx]['changed']) >= changeTime): try: # modification times bigger as 5 minutes WebInfo = {} # info fields: # "TTN_id", "project", "GPS", "coordinates", "label", "serial", # "street","pcode", "municipality", "village", "province", # "first", "comment", # "AppEui", "DevEui", "AppSKEY", # "DevAdd", "NwkSKEY", # "meteo", "dust", "gps", "description", # "notice", # "active", "luftdaten.info", "luftdaten", "luftdatenID", try: if not getFromWeb( WebMeetkits[indx]['nid'],WebInfo): MyLogger.log(modulename,'WARNING',"Unable to get Drupal meta data info for nid %d." % nid) except: pass AQinfo = {} try: if not getFromAQ(WebMeetkits[indx]['project'],WebMeetkits[indx]['serial'],AQinfo): MyLogger.log(modulename,'WARNING',"Unable to get AQ meta data info for kit projec %s serial %s." % (WebMeetkits[indx]['project'], WebMeetkits[indx]['serial'])) except: pass if 'description' in AQinfo.keys(): for item in AQinfo['description'].replace(';hw:','').strip().split(','): if item.strip()[:3].upper() in DustTypes: AQinfo['dust'] = item.strip().upper() elif item.strip()[:3].upper() in MeteoTypes: AQinfo['meteo'] = item.strip().upper() elif item.strip()[:3].upper() in GpsTypes: # only one product AQinfo['gps'] = 'NEO-6' if WebMeetkits[indx]['changed'] > WebMeetkits[indx]['datum']: (WebInfo,AQinfo) = diffInfos(WebInfo,AQinfo,True,'\033[32mweb Drupal DB\033[0m info (nid %d) >> \033[31mAQ data DB\033[0m info (%s,%s)' % (WebMeetkits[indx]['nid'],WebMeetkits[indx]['project'],WebMeetkits[indx]['serial']),"%34.34s >> %s" % (datetime.datetime.fromtimestamp(WebMeetkits[indx]['changed']).strftime("%Y-%m-%d %H:%M"),datetime.datetime.fromtimestamp(WebMeetkits[indx]['datum']).strftime("%Y-%m-%d %H:%M"))) else: (WebInfo,AQinfo) = diffInfos(WebInfo,AQinfo,False,'\033[31mweb Drupal DB\033[0m info (nid %d) << \033[32mAQ data DB\033[0m info (%s,%s)' % (WebMeetkits[indx]['nid'],WebMeetkits[indx]['project'],WebMeetkits[indx]['serial']),"%34.34s << %s" % (datetime.datetime.fromtimestamp(WebMeetkits[indx]['changed']).strftime("%Y-%m-%d %H:%M"),datetime.datetime.fromtimestamp(WebMeetkits[indx]['datum']).strftime("%Y-%m-%d %H:%M"))) # may need to return also a dict with fields/values which are not change but present if len(WebInfo): for (item,value) in WebInfo.items(): MyLogger.log(modulename,'INFO',"Synchronize web Drupal DB node %d: %s: '%s'." % (WebMeetkits[indx]['nid'],item,str(value))) setWebValue(WebMeetkits[indx]['nid'],item,value,adebug=debug) if not debug: WebDB.db_query("UPDATE node SET changed = UNIX_TIMESTAMP(now()) WHERE nid = %d" % WebMeetkits[indx]['nid'], False) if len(AQinfo): if not 'project' in AQinfo.keys(): AQinfo['project'] = WebMeetkits[indx]['project'] if not 'serial' in AQinfo.keys(): AQinfo['serial'] = WebMeetkits[indx]['serial'] MyLogger.log(modulename,'INFO',"Synchronize web Air Quality DB meta info: %s" % str(AQinfo)) MyDB.putNodeInfo(AQinfo,adebug=debug) except Exception as e: sys.stderr.write("%s: While handling %s/%s, exception error: %s\n" % (modulename,WebMeetkits[indx]['project'],WebMeetkits[indx]['serial'],str(e))) return True
subprocess.call('/usr/tideway/bin/tw_scan_control -p System2$ --add ' + ' '.join(ip_addresses) + ' 1>/dev/null', shell=True) # Need to wait until the scan is completed before staring other activities while True: time.sleep(10) if subprocess.check_output('/usr/tideway/bin/tw_scan_control -p System2$ --list', shell=True) == 'No scan ranges\n': break # Adding the scan results into 'test.py' GenerateTestPy.add_test_case(log, pattern_dir_name, ip_addresses, test_case_name, queries_list) # Creating dml file of the record data SetEnvironment.generate_dml(log, pattern_dir_name, test_case_name) else: log.warn('No IP addresses were extracted for \'' + test_case_name + '\'. It will be skipped.') # Deleting the TC folder if should_remove_tc_folder: shutil.rmtree(test_case_dir) except (IOError, NameError, OSError) as err: log.error('Got an exception after unpacking the TC zip folder: ' + str(err)) return None if __name__ == '__main__': log = MyLogger.create_logger('HelpSystemsStandGuardAnti-Virus', 3) run_test(log, 'Unix_active_versioning', 'HelpSystemsStandGuardAnti-Virus', ['SEARCH SoftwareInstance WHERE type = "HelpSystems StandGuard Anti-Virus" ORDER BY key SHOW name, type, version, product_version', 'SEARCH SoftwareInstance WHERE type = "HelpSystems StandGuard Anti-Virus" TRAVERSE InferredElement:Inference:Contributor:DiscoveredCommandResult ORDER BY cmd SHOW cmd', 'SEARCH SoftwareInstance WHERE type = "HelpSystems StandGuard Anti-Virus" TRAVERSE InferredElement:Inference:Contributor:DiscoveredFile ORDER BY path SHOW path', 'SEARCH SoftwareInstance WHERE type = "HelpSystems StandGuard Anti-Virus" TRAVERSE ElementWithDetail:Detail:Detail:Detail WHERE type = "Licensing Detail" ORDER BY name SHOW name'])
if __name__ == "__main__": # Read in a csv file and call analyze import argparse import MyYAML import MyLogger parser = argparse.ArgumentParser() parser.add_argument("yaml", metavar="fn.yml", help="YAML file to load") parser.add_argument("csv", metavar="fn.csv", help="CSV file to load") parser.add_argument("--seed", type=int, help="Random number generator seed, 32 bit int") parser.add_argument("--save", action="store_true", help="Should CSV output files be generated?") parser.add_argument("--plot", action="store_true", help="Plot z versus t") MyLogger.addArgs(parser) args = parser.parse_args() logger = MyLogger.mkLogger(args, __name__, "%(asctime)s: %(levelname)s - %(message)s") data = MyYAML.load(args.yaml, logger) pos = pd.read_csv(args.csv) (output, summary) = analyze(args.csv, data, pos, data["depth"], logger) if args.save: saveCSV(args.csv, output, "analysis") saveCSV(args.csv, summary, "summary") if args.plot:
# 'rssi': None, # if wifi provide signal strength # 'last': None, # last time checked to throddle connection info 'fields': ['rssi'], # strength or signal level 'units': ['dBm'], # per field the measurement unit 'raw': False, # no raw measurements displayed by dflt } try: import MyLogger import re # import subprocess, threading import subprocess from threading import Timer from time import time except ImportError as e: MyLogger(modulename, 'FATAL', "Unable to load module %s" % e) class Command(object): def __init__(self, cmd): self.cmd = cmd self.process = None def run(self, timeout): stdout = '' def target(): global stdout print 'Thread started' self.process = subprocess.Popen(self.cmd, shell=True,
def publish(**args): """ add records to the database, on the first update table Sensors with ident info """ global Conf, ErrorCnt if (not 'output' in Conf.keys()) or (not Conf['output']): return for key in ['data','ident']: if not key in args.keys(): MyLogger.log(modulename,'FATAL',"Publish call missing argument %s." % key) # TO DO: get the translation table from the MySense.conf file def trans(name): global Conf if (not 'match' in Conf.keys()) or (not type(Conf['match']) is list): return name for item in Conf['match']: if not type(item) is tuple: continue if name.find(item[0]) < 0: continue name = name.replace(item[0],item[1]) return name def findInfo(ident,field): UT = ['',''] # (unit,sensor type) try: indx = ident['fields'].index(field) UT[0] = ident['units'][indx] UT[1] = ident['types'][indx].upper() except: pass finally: return (UT[0],UT[1]) if Conf['fd'] == None: Conf['registrated'] = None if not db_registrate(args['ident']): MyLogger.log(modulename,'WARNING',"Unable to registrate the sensor.") return False if Conf['fd'] == None: return False if 'text' in args['data'].keys(): # special case, overloading this method # show a text line as eg starting up sensor X if type(args['data']['text']) is str: displayMsg(['<clear>' + args['text']]) elif type(args['data']['text']) is list: lines = [] for string in args['data']['text']: if type(string) is str: lines.append(string) if len(lines): try: displayMsg(lines) except: MyLogger.log(modulename,'ERROR','Unable to send text to display service.') return False return True lines = ['','','',''] # sensor type, DB name, unit, value for item in args['data'].keys(): if item in Conf['omit']: continue if type(args['data'][item]) is list: MyLogger.log(modulename,'WARNING',"Found list for sensor %s." % item) continue else: if args['data'][item] == None: continue Unit, Type = findInfo(args['ident'],item) bar = '' if len(lines[0]): bar = '|' thisValue = args['data'][item] if thisValue is float: thisValue = '%.1f' % thisValue elif Type[0:3] == 'gps': continue # do not display geo location string = format(thisValue).replace('.0','') if string[0] == '<': string = 'NaN' # not a number of string lines[0] += bar + '%6.6s' % Type lines[1] += bar + ' %5.5s' % trans(item) lines[2] += bar + ' %5.5s' % trans(Unit) lines[3] += bar + '%6.6s' % string lines.insert(0,'<clear>' + datetime.datetime.fromtimestamp(time()).strftime('%d %b %Hh%M:%S')) if 'time' in args['data'].keys(): try: lines[0] = '<clear>' + datetime.datetime.fromtimestamp(args['data']['time']).strftime('%d %b %H:%M:%S') except: pass try: displayMsg(lines) except: MyLogger.log(modulename,'ERROR','Unable to send text to display service.') return False return True
def registrate(): global Conf, MyThread if (not Conf['input']): return False if ('fd' in Conf.keys()) and (Conf['fd'] != None): return True for key in ['i2c', 'sensitivity']: # handle configured arrays of values if (key in Conf.keys()) and (type(Conf[key]) is str): Conf[key] = Conf[key].replace(' ', '') Conf[key] = Conf[key].replace('],[', '#') Conf[key] = Conf[key].replace('[', '') Conf[key] = Conf[key].replace(']', '') if key == 'i2c': Conf[key] = Conf[key].split(',') else: Conf[key] = Conf[key].split('#') for i in range(0, len(Conf[key])): Conf[key][i] = [int(a) for a in Conf[key][i].split(',')] Conf['input'] = False Conf['fd'] = [] for gas in range(0, len(Conf['i2c'])): Conf['fd'].append(None) if not int(Conf['i2c'][gas], 0) in [0x48 ]: # address pin 0-2 to null, read MyLogger.log( modulename, 'ERROR', "I2C address %s not correct. Disabled." % Conf['i2c'][gas]) return False else: try: Conf['fd'][gas] = ADC(address=int(Conf['i2c'][gas], 0)) except IOError: MyLogger.log(modulename, 'WARNING', 'Try another I2C address.') continue Conf['input'] = True cnt = 0 if not len(MyThread): # only the first time for thread in range(0, len(Conf['fd'])): if Conf['fd'][thread] == None: continue MyThread.append( MyThreading.MyThreading(bufsize=int(Conf['bufsize']), interval=int(Conf['interval']), name='Alpha Sense %s sensor' % Conf['fields'][gas].upper(), callback=Add, conf=Conf, sync=Conf['sync'], DEBUG=Conf['debug'])) # first call is interval secs delayed by definition try: if MyThread[gas].start_thread(): cnt += 1 continue except: MyThread[gas] = None if not cnt: MyLogger.log( modulename, 'ERROR', "Unable to registrate/start AlphaSsense sensors thread(s).") Conf['input'] = False return False return True
continue if self.__patternEnabled: try: info = self.__loadDB(dbName) (goto, maxDist) = self.__mkGoto(info) for thr in threads: thr.put(args.glider, goto, maxDist) except: logger.exception("Exception while updating") q.task_done() if __name__ == "__main__": import MyLogger parser = argparse.ArgumentParser(description="Glider Updater") parser.add_argument("--glider", type=str, required=True, help="Name of glider") Update.addArgs(parser) MyLogger.addArgs(parser) args = parser.parse_args() logger = MyLogger.mkLogger(args) logger.info("args=%s", args) update = Update(args, logger) update.start() # Start the update thread try: update.waitToFinish() except: logger.exception("Unexpected Exception")
subprocess.call( '/usr/tideway/bin/tw_pattern_management -p System2$ --install ' + f + ' 1>/dev/null', shell=True) except IOError as err: log.error('TPLPreprocessor error: ' + str(err)) # Activating uploaded patterns log.debug('Activating patterns') subprocess.call( '/usr/tideway/bin/tw_pattern_management -p System2$ --activate-all', shell=True) # Removig temporary patterns folder shutil.rmtree(tmp_patterns_dir) else: # If the new patterns upload was suppressed then parsed_includes contain *.tplpre names instead of *.tpl. # Normalizing... parsed_includes = map(lambda x: x.replace('tplpre', 'tpl'), parsed_includes) return includes, parsed_includes if __name__ == '__main__': log = MyLogger.create_logger('WebsphereAppServer', 3) includes, parsed_includes = action( log, '/usr/PerforceCheckout/tkn_main/tku_patterns/CORE', '/usr/PerforceCheckout/tkn_main', 'WebsphereAppServer', True, 'tpl110') print includes print parsed_includes
def get_device(): global Conf if Conf['fd'] != None: return True if Conf['usbid']: serial_dev = None if Conf['usbid'] != None: # try serial with product ID byId = "/dev/serial/by-id/" if not os.path.exists(byId): MyLogger.log(modulename, 'FATAL', "There is no USBserial connected. Abort.") device_re = re.compile( ".*%s\d.*(?P<device>ttyUSB\d+)$" % Conf['usbid'], re.I) devices = [] try: df = subprocess.check_output(["/bin/ls", "-l", byId]) for i in df.split('\n'): if i: info = device_re.match(i) if info: dinfo = info.groupdict() serial_dev = '/dev/%s' % dinfo.pop('device') break except CalledProcessError: MyLogger.log(modulename, 'ERROR', "No serial USB connected.") except (Exception) as error: MyLogger.log( modulename, 'ERROR', "Serial USB %s not found, error:%s" % (Conf['usbid'], error)) Conf['usbid'] = None if serial_dev == None: MyLogger.log(modulename, 'WARNING', "Please provide serial USB producer info.") MyLogger.log(modulename, 'FATAL', "No input stream defined.") return False # check operational arguments for item in ['interval', 'sample', 'debug', 'rawCnt']: if type(Conf[item]) is str: if not Conf[item].isdigit(): MyLogger.log(modulename, 'FATAL', '%s should be nr of seconds' % item) Conf[item] = int(Conf[item]) if type(Conf[item]) is bool: Conf[item] = 1 if Conf[item] else 0 MyLogger.log( modulename, 'INFO', "Sample interval cycle is set to %d seconds." % Conf['interval']) MyLogger.log( modulename, 'INFO', "(%s) values are in (%s)" % (','.join(Conf['fields']), ','.join(Conf['units']))) try: Conf['fd'] = serial.Serial(serial_dev, baudrate=9600, stopbits=serial.STOPBITS_ONE, parity=serial.PARITY_NONE, bytesize=serial.EIGHTBITS, timeout=2) # read timeout 2 seconds if not Conf['fd'].isOpen(): raise IOError("Unable to open USB to PMS7003") MyLogger.log(modulename, 'INFO', "Serial used for USB: %s" % serial_dev) Conf['firmware'] = '' # completed by first reading except IOError as error: MyLogger.log(modulename, "WARNING", "Connectivity: %s" % error) Conf['fd'].device.close() return False except (Exception) as error: MyLogger.log(modulename, 'FATAL', "%s" % error) return False else: Logger.log('ERROR', "Failed access PMS7003 module") return False return True
# $Id: MySHT31.py,v 1.2 2018/07/09 08:01:19 teus Exp teus $ """ Get measurements from SHT31 Sensirion chip via the I2C-bus. Measurements have a calibration factor (calibrated to Oregon weather station) Relies on Conf setting by main program """ modulename = '$RCSfile: MySHT31.py,v $'[10:-4] __version__ = "0." + "$Revision: 1.2 $"[11:-2] __license__ = 'GPLV4' try: from time import time, sleep from types import ModuleType as module import MyThreading # needed for multi threaded input import MyLogger except ImportError: MyLogger.log(modulename, 'FATAL', "Missing modules") # configurable options __options__ = [ 'input', 'i2c', 'type', 'calibrations', 'fields', 'units', 'raw', 'interval', 'bufsize', 'sync' ] # multithead buffer size and search for input secs Conf = { 'input': False, # no temp/humidity sensors installed 'type': 'SHT31', # type of the chip eg BME280 Bosch 'fields': ['temp', 'rh'], # temp, humidity 'units': ['C', '%'], # C Celcius, K Kelvin, F Fahrenheit, % rh 'calibrations': [[0, 1], [0, 1]], # calibration factors, here order 1 'i2c': '0x44', # I2C-bus address 'interval': 30, # read dht interval in secs (dflt)
def PMSread(conf): ''' read data telegrams from the serial interface (32 bytes) before actual read flush all pending data first during the period sample time: active (200-800 ms), passive read cmd 1 sec calculate average during sample seconds if passive mode fan off: switch fan ON and wait 30 secs. ''' global ACTIVE, PASSIVE ErrorCnt = 0 StrtTime = 0 cnt = 0 PM_sample = {} if not 'wait' in conf.keys(): conf['wait'] = 0 if conf['wait'] > 0: sleep(conf['wait']) for fld in PM_fields: PM_sample[fld[0]] = 0.0 # clear the input buffer first so we get latest reading conf['fd'].flushInput() StrtTime = 0 LastTime = 0 while True: if (conf['mode'] != ACTIVE): # in PASSIVE mode we wait one second per read if cnt: wait = time() - LastTime if (wait < 1) and (wait > 0): sleep(wait) PassiveRead( conf) # passive?:if fan off switch it on, initiate read while True: # search header (0x42 0x4D) of data telegram try: c = conf['fd'].read(1) # 1st byte header if not len(c): # time out on read, try wake it up ErrorCnt += 1 if ErrorCnt >= 10: raise IOError("Sensor PMS connected?") MyLogger.log(modulename, 'WARNING', 'Try to wakeup sensor') if conf['mode'] == ACTIVE: conf['mode'] = PASSIVE if GoActive(conf): continue else: PassiveRead(conf) continue elif ord(c[0]) == 0x42: c = conf['fd'].read(1) # 2nd byte header if len(c) >= 1: if ord(c[0]) == 0x4d: break except: ErrorCnt += 1 if ErrorCnt >= 10: raise IOError("Sensor PMS read error.") continue # try next data telegram if not cnt: StrtTime = time() LastTime = time() # packet remaining. fixed length packet structure buff = conf['fd'].read(30) if len(buff) < 30: MyLogger.log(modulename, "WARNING", "Read telegram timeout") ErrorCnt += 1 if ErrorCnt >= 10: raise IOError("Sensor PMS connected?") continue # one measurement 200-800ms or every second in configured sample time if cnt and (StrtTime + cnt < time()): continue # skip measurement if time < 1 sec check = 0x42 + 0x4d # sum check every byte from HEADER to ERROR byte for c in buff[0:28]: check += ord(c) data = struct.unpack('!HHHHHHHHHHHHHBBH', buff) if not sum(data[PMS_PCNT_0P3:PMS_VER]): # first reads show 0 particul counts, skip telegram # if conf['debug']: print("skip data telegram: particle counts of ZERO") continue # compare check code if check != data[PMS_SUMCHECK]: MyLogger.log( modulename, "ERROR", "Incorrect check code: received : 0x%04X, calculated : 0x%04X" % (data[PMS_SUMCHECK], check)) ErrorCnt += 1 if ErrorCnt > 10: raise IOError("Too many incorrect dataframes") continue if data[PMS_ERROR]: MyLogger.log(modulename, "WARNING", "Module returned error: %s" % str(data[PMS_ERROR])) ErrorCnt += 1 if ErrorCnt > 10: raise ValueError("Module errors %s" % str(data[PMS_ERROR])) continue if not conf['firmware']: conf['firmware'] = str(data[PMS_VER]) MyLogger.log( modulename, 'INFO', 'Device %s, firmware %s' % (conf['type'], conf['firmware'])) # if conf['debug']: # print 'Frame len [byte] :', str(data[PMS_FRAME_LENGTH]) # print 'Version :', str(data[PMS_VER]) # print 'Error code :', str(data[PMS_ERROR]) # print 'Check code : 0x%04X' % (data[PMS_SUMCHECK]) sample = {} for fld in PM_fields: # concentrations in unit ug/m3 # concentration (generic atmosphere conditions) in ug/m3 # number of particles with diameter N in 0.1 liter air pcs/0.1dm3 sample[fld[0]] = float(data[fld[2]]) # make it float if conf['debug']: if not cnt: for fld in PM_fields: sys.stderr.write("%8.8s " % fld[0]) sys.stderr.write("\n") for fld in PM_fields: sys.stderr.write( "%8.8s " % ('ug/m3' if fld[0][-4:] != '_cnt' else 'pcs/0.1dm3')) sys.stderr.write("\n") for fld in PM_fields: sys.stderr.write("%8.8s " % str(sample[fld[0]])) sys.stderr.write("\n") #print("%s [%s]\t: " % (fld[0],'ug/m3' if fld[0][-4:] != '_cnt' else 'pcs/0.1dm3'), str(sample[fld[0]])) cnt += 1 for fld in PM_fields: if fld[1] == 'par': # parameter PM_sample[fld[0]] = sample[fld[0]] else: PM_sample[fld[0]] += sample[fld[0]] # average read time is 0.85 secs. Plantower specifies 200-800 ms # Plantower: in active smooth mode actual data update is 2 secs. if time() > StrtTime + conf['sample'] - 0.5: break SampleTime = time() - StrtTime if SampleTime < 0: SampleTime = 0 if cnt: # average count during the sample time for fld in PM_fields: if fld[1] == 'par': continue PM_sample[fld[0]] /= cnt # if conf['debug']: # print("Average read time: %.2f secs, # reads %d,sample time %.1f seconds" % (SampleTime/cnt,cnt,SampleTime)) conf['wait'] = conf['interval'] - SampleTime if conf['wait'] < 0: conf['wait'] = 0 if conf['wait'] >= 60: if conf['mode'] != ACTIVE: Standby(conf) # switch fan OFF return PM_sample
sys.exit(1) # Checking that RunTH.py is running under tideway if 'tideway' not in os.popen('whoami').read(): print('ERROR \'RunTH.py\' should run under \'tideway\' user') sys.exit(1) # Setting up the environment variables environment = SetEnvironment.set_environment_variables(arguments.v) # Updating the test scripts, copying the tested pattern into isolated environment SetEnvironment.update_test_directory(arguments.v, environment['tkn_core'], environment['tkn_main'], pattern_dir_name) # Creating logger log = MyLogger.create_logger(pattern_dir_name, arguments.v) #################################### # CHECKING PATTERN FOLDER STRUCTURE: #################################### # *.tplpre file and 'tests' folder should exist. # 'TEST' file and: # either ../tests/dml folder and ../tests/test.py file for verifications tests # or ../tests/data folder for new tests exist. # Also setting the th_run_type to either 'verify_tests' or 'new_tests' th_run_type = SetEnvironment.check_pattern_folder( log, '/usr/tideway/testdir/' + pattern_dir_name) # Running the tests os.chdir('/usr/tideway/testdir/' + pattern_dir_name + '/tests') #########################
def __init__(self, pin, debug=False): self.debug = debug self.logger = MyLogger.get_logger(__class__.__name__, self.debug) self.logger.debug('pin=%d', pin) self.r = IrRecv(pin, verbose=True, debug=self.debug)
# $Id: MyCONSOLE.py,v 2.11 2018/10/02 13:47:58 teus Exp teus $ # TO DO: write to file or cache """ Publish measurements to console STDOUT Relies on Conf setting biy main program """ modulename = '$RCSfile: MyCONSOLE.py,v $'[10:-4] __version__ = "0." + "$Revision: 2.11 $"[11:-2] try: import MyLogger import sys from time import time import datetime except ImportError as e: MyLogger.log(modulename, "FATAL", "One of the import modules not found: %s" % e) # configurable options __options__ = ['output', 'file'] Conf = { 'output': False, # console output dflt enabled if no output channels defined # 'fd': None, # input handler 'file': '/dev/stdout', # Debugging: write to file 'match': [ # translation table for db abd unit names ('C', 'oC'), ('F', 'oF'), ('pcs/qf', 'pcs/0.01qf'), ],
} # from MySense import log try: try: import os from time import time from time import sleep from types import ModuleType as module import MyLogger import serial except: try: import Serial as serial except ImportError as e: MyLogger.log(modulename,'FATAL',"Missing module %s" % e) import re import subprocess # needed to find the USB serial import MyThreading # needed for multi threaded input except ImportError as e: MyLogger.log(modulename,'FATAL',"Missing module %s" % e) # convert pcs/qf (counter) to ug/m3 (weight) # ref: https://github.com/andy-pi/weather-monitor/blob/master/air_quality.py def convertPM(nr,conf,value): if conf['units'][nr].find('pcs') >= 0: return value r = 0.44 # diameter of PM2.5 if nr: r = 2.60 # diameter of PM10 # concentration * K * mass (mass=:density (=:1.65*10**12) * vol (vol=:4/3 * pi * r**3)) return value * 3531.5 * ((1.65 * (10**12)) * ((4/3.0) * 3.14159 * (r * (10**-6))**3))
'ttl': '%Y-%b', # time dependent name of sheet (strftime) 'sheet': 'IoS_', # sheet name prepend: <name><ttl> } # spreadsheets dispatcher CSV = {} # keys are serials with dict to GSPREAD CSV file handling try: import MyLogger import gspread # install via eg github https://github.com/burnash/gspread from oauth2client.service_account import ServiceAccountCredentials import datetime import os from datetime import date from time import time except ImportError: MyLogger.log(modulename, 'FATAL', "Module missing for spreadsheet output.") Conf['output'] = False # ======================================================== # write data or sensor values to share spreadsheet file at Google # ======================================================== # create spreadsheet file and push the header # create a new sheet if the ttl (dflt one day) is changed # extend the file name with a month string # an easy hack to get credentials #class Credentials (object): # global Conf # def __init__ (self, access_token=None): # self.access_token = Conf['apikey'] #
def open_serial(): global Conf # scan for available ports. return a list of tuples (num, name) def scan_serial(): available = [] for i in range(256): try: s = serial.Serial(i) available.append( (i, "/dev/ttyUSB"+str(i) ) ) s.close() # explicit close 'cause of delayed GC in java except serial.SerialException: pass return available if Conf['fd'] != None: return True if (not Conf['file']) and (Conf['port'] or Conf['usbid']): serial_dev = None # if port number == 0 we read from stdin # if port number == None we try serial USB product:vender ID if (Conf['port'] == None) and (Conf['usbid'] != None): # try serial with product ID byId = "/dev/serial/by-id/" if not os.path.exists(byId): MyLogger.log(modulename,'FATAL',"There is no USBserial connected. Abort.") device_re = re.compile(".*-%s.*_USB-Serial.*(?P<device>ttyUSB\d+)$" % Conf['usbid'], re.I) devices = [] try: df = subprocess.check_output(["/bin/ls","-l",byId]) for i in df.split('\n'): if i: info = device_re.match(i) if info: dinfo = info.groupdict() serial_dev = '/dev/%s' % dinfo.pop('device') break except CalledProcessError: MyLogger.log(modulename,'ERROR',"No serial USB connected.") except (Exception) as error: MyLogger.log(modulename,'ERROR',"Serial USB %s not found, error:%s"%(Conf['usbid'], error)) Conf['usbid'] = None if (Conf['port'] == None) and (serial_dev == None): MyLogger.log(modulename,'WARNING',"Please provide serial USB producer info.") for n,s in scan_serial(): port=n+1 MyLogger.log(modulename,'WARNING',"%d --> %s" % (port,s) ) MyLogger.log(modulename,'FATAL',"No input stream defined.") return False #Initialize Serial Port for Dylos if serial_dev == None: serial_dev = "/dev/ttyUSB"+str(Conf['port']-1) try: Conf['fd'] = serial.Serial( # for here tuned for Dylos serial_dev, baudrate=9600, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS, writeTimeout = 1, # only needed if there was a firm upgrade on dylos timeout=65*60) # allow also monitor mode MyLogger.log(modulename,'INFO',"COM used for serial USB: %s" % serial_dev) except (Exception) as error: MyLogger.log(modulename,'FATAL',"%s" % error) return False else: # input is read from a file if not Conf['file']: Conf['file'] = "Dylos-test.input" Conf['sync'] = False # no multi threading try: Conf['fd'] = open(Conf['file']) except: MyLogger.log(modulename,'FATAL', "Failed top open input for %s" % sensor) return False return True
import MyLogger from twisted.protocols import basic from twisted.internet import protocol, reactor, threads # For filter controls #from FilterMotor import filtermotor # port for evora is 5502 # Global Variables acquired = None t = None isAborted = None # tracks globally when the abort has been called. Every call to the parser # is an new instance logger = MyLogger.myLogger("evora_server.py", "server") # Get gregorian date, local #d = date.today() #logFile = open("/home/mro/ScienceCamera/gui/logs/log_server_" + d.strftime("%Y%m%d") + ".log", "a") class EvoraServer(basic.LineReceiver): """ This is the Evora camera server code using Twisted's convienience object of basic.LineReceiver. When a line is recieved from the client it is sent to the parser to execute the camera commands and the resulting data is sent back to the client. This a threaded server so that long running functions in the parser don't hang the whole server. """ def connectionMade(self): """
import MyLogger as log message = log.Logger('all.log', level='debug') #首先生成一个数据集 def loadDataSet(): return [[1, 3, 4], [2, 3, 5], [1, 2, 3, 5], [2, 5]] #测试数据集dataset有了: #第一步,我们要根据数据集dataset得到一个集合C1, #集合C1中包含的元素为dataset的无重复的每个单元素,候选项集。 def createC1(dataSet): C1 = [] for transaction in dataSet: for item in transaction: if not [item] in C1: C1.append([item]) # store all the item unrepeatly C1.sort() #返回的数据map计算得到一个元素为frozenset的集合。 # return map(frozenset, C1)#frozen set, user can't change it. return list(map(frozenset, C1)) """ 第二步,计算C1<key>每个元素key的支持度。 支持度= count(key) / sizeof(C1) 先把dataset转成元素为集合的类型。
subprocess.call(['python', os.path.abspath(__file__), '-h']) sys.exit(1) # Checking that RunTH.py is running under tideway if 'tideway' not in os.popen('whoami').read(): print('ERROR \'RunTH.py\' should run under \'tideway\' user') sys.exit(1) # Setting up the environment variables environment = SetEnvironment.set_environment_variables(arguments.v) # Updating the test scripts, copying the tested pattern into isolated environment SetEnvironment.update_test_directory(arguments.v, environment['tkn_core'], environment['tkn_main'], pattern_dir_name) # Creating logger log = MyLogger.create_logger(pattern_dir_name, arguments.v) #################################### # CHECKING PATTERN FOLDER STRUCTURE: #################################### # *.tplpre file and 'tests' folder should exist. # 'TEST' file and: # either ../tests/dml folder and ../tests/test.py file for verifications tests # or ../tests/data folder for new tests exist. # Also setting the th_run_type to either 'verify_tests' or 'new_tests' th_run_type = SetEnvironment.check_pattern_folder(log, '/usr/tideway/testdir/' + pattern_dir_name) # Running the tests os.chdir('/usr/tideway/testdir/' + pattern_dir_name + '/tests') ######################### # CREATING NEW TEST DATA:
def on_publish(client, obj, MiD): global waiting, mid waiting = False mid = MiD MyLogger.log(modulename, 'DEBUG', "Publish mid: " + str(mid))