def parser(qconff, mconff, passes=None): """ """ # Parse the big list of queries # We assume all the queries are live, hence enableCheck is False qs, cb = parseConfig(qconff, classes.databaseQuery, passfile=passes, searchCommon=True, enableCheck=False) # Associate the database queries with the proper database connection class qs = assignComm(qs, cb, commkey='database') # Parse the text file and check if any sections are disabled # No common blocks in the module config are possible so skip that ms, _ = parseConfig(mconff, confClasses.moduleConfig, passfile=passes, searchCommon=False, enableCheck=True) # Now combine the modules and queries into stuff we can itterate over modules, queries = groupConfFiles(qs, ms) # 'modules' is now a list of moduleConfig objects. If any of the entries # in mconff had queries that didn't match entries in qconff, that module # was obliterated completely and will be ignored! # 'queries' is a list of all the active database sections associated # with the individual modules. It's technically a set, so no dupes. # Now we're ready for an actual loop! But let someone else do it. return modules, queries
def main(): """ """ # Switch to file-based logging since docker logs -f is mysteriously failing lfile = './outputs/logs/camLooper.log' cfile = './config/webcams.conf' pfile = './config/passwords.conf' confclass = classes.Webcam # Need to pass getList = False otherwise it'll try to generate a list # of files found in that directory and return it to you. files.checkOutDir('./outputs/logs/', getList=False) # If abort was True, this'll probably blow up...? logs.setup_logging(logName=lfile, nLogs=5) # Read the webcam config file and parse it accordingly. # Will return an OrderedDict of enabled webcams IF enableCheck is True oncams, _ = confparsers.parseConfig(cfile, confclass, passfile=pfile, searchCommon=False, enableCheck=True) # Before we start, check the ALL the image output directories. # Only checking the enabled ones, but could check allcams if desired. for cam in oncams: curcam = oncams[cam] # Test the output location to make sure it exists location = curcam.odir files.checkOutDir(location, getList=False) # Just run it for ever and ever and ever and ever and ever and ever while True: # failimg = None just uses the default one: # resources/images/percy_txt.jpg # Note: Archive locations will be created automatically in here cams.grabSet(oncams, failimg=None, archive=True, makeMini=True) print("Done Grabbing Images!") print("Sleeping for 60 seconds...") time.sleep(60.)
print(xmld.unparse(dPacket, pretty=True)) return xPacket if __name__ == "__main__": # Define the default config files we'll use/look for. These are passed to # the worker constructor (toServeMan). conf = './queue.conf' passes = './passwords.conf' # This defines the subclass that is filled by the confparser functions # IT IS SITUATIONAL DEPENDENT! conftype = classes.brokerCommandingTarget # Actually parse the files and set stuff up config, comm = confparsers.parseConfig(conf, conftype, passfile=passes, searchCommon=True, enableCheck=True) amqlistener = amq.silentSubscriber() amqtopics = amq.getAllTopics(config, comm) amqs = connSetup.connAMQ(comm, amqtopics, amqlistener=amqlistener) # Just hardcode this for now. It's a prototype! conn = amqs['broker-dct'][0] # Now that we have all of this, the final product will have command line # switches that link commands to the actions; since this is just a test # we need to do a little dance to set things up better for testing # and actually just loop over the specified actor actors = {"MrFreezeControl": ["LMISunpowerSetpoint", "DeVenySunpowerSetpoint", "DeVenySunpowerPowerMin",
def main(): """ """ # Define the default files we'll use/look for. These are passed to # the worker constructor (toServeMan). conf = './config/alfred.conf' passes = './config/passwords.conf' logfile = '/tmp/alfred.log' desc = 'Alfred: The Instrument Monitor' eargs = alfred.parseargs.extraArguments conftype = classes.hostTarget # Note: We need to prepend the PATH setting here because some hosts # (all recent OSes, really) have a more stringent SSHd config # that disallows the setting of random environment variables # at login, and I can't figure out the goddamn pty shell settings # for Ubuntu (Vishnu) and OS X (xcam) # # Also need to make sure to use the relative path (~/) since OS X # puts stuff in /Users/<username> rather than /home/<username> # Messy but necessary due to how I'm doing SSH baseYcmd = 'export PATH="~/miniconda3/bin:$PATH";' baseYcmd += 'python ~/LIG/DataServants/Yvette.py' baseYcmd += ' ' # Interval between successive runs of the instrument polling (seconds) bigsleep = 60 # Total time for entire set of actions per instrument alarmtime = 300 # config: dictionary of parsed config file # comm: common block from config file # args: parsed options # runner: class that contains logic to quit nicely config, comm, args, runner = workerSetup.toServeMan(conf, passes, logfile, desc=desc, extraargs=eargs, conftype=conftype, logfile=True) # Parse the extra config file, but do it in a bit of a sloppy way # that just fills out the class with whatever else # we find in the file. # REMEMBER there are two returns! The second contains any common # items, and is just None if searchCommon is False...but it's # always returned! epings, _ = confparsers.parseConfig(args.extraPings, conftype, passfile=None, searchCommon=False, enableCheck=True, debug=args.debug) # Actually define the function calls/references to functions actions = defineActions() # Get this PID for diagnostics pid = os.getpid() # Print the preamble of this particular instance # (helpful to find starts/restarts when scanning thru logs) common.printPreamble(pid, config) # Check to see if there are any connections/objects to establish idbs = connSetup.connIDB(comm) # Semi-infinite loop while runner.halt is False: # This is a common core function that handles the actions and # looping over each instrument. We keep the main while # loop out here, though, so we can do stuff with the # results of the actions from all the instruments. _ = common.instLooper(config, runner, args, actions, updateArguments, baseYcmd, db=idbs, alarmtime=alarmtime) # Doing the extra pings as a side job/quickie # No need to make this into a big to-do if epings is not None: for sect in epings: pobj = epings[sect] dbtag = pobj.database db = idbs[dbtag] res = alfred.tasks.actionPing(pobj, db=db, debug=args.debug) print(res) # After all the instruments are done, take a big nap if runner.halt is False: print("Starting a big sleep") # Sleep for bigsleep, but in small chunks to check abort for _ in range(bigsleep): time.sleep(0.1) if runner.halt is True: print("halt requested while sleeping!") print("issuing 'break'") break else: print("runner.halt has been triggered!") # The above loop is exited when someone sends SIGTERM print("PID %d is now out of here!" % (pid)) # Return STDOUT and STDERR to their system defaults sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ print("STDOUT and STDERR reset.")
from ligmos.utils import amq, classes, database, confparsers from ligmos.workers import connSetup # cfile = './alfred.conf' # confclass = classes.hostTarget # pfile = './passwords.conf' cfile = './abu.conf' confclass = classes.sneakyTarget pfile = './passwords.conf' # Parse the things! config, comm = confparsers.parseConfig(cfile, confclass, passfile=pfile, searchCommon=True, enableCheck=True, debug=True) # Check to see if there are any connections/objects to establish amqlistener = amq.silentSubscriber() # NOTE: The idea is that basically ALL of what follows will become completely # generic boilerplate, to be shuffled away behind the scenes somewhere # for much easier access. The only thing that might poke out is the stuff # above, but even that can be hidden away if we really need/want to do that. amqtopics = amq.getAllTopics(config, comm) amqbrokers, influxdatabases = connSetup.connAMQ_IDB(comm, amqtopics, amqlistener=amqlistener)