Esempio n. 1
0
    def __init__(self, options, args, log_queue):

        self.stationSettings = options.settings.GetFirstSubTree("StationSettings")
        self.pptSettings = options.settings.GetFirstSubTree("PostProcessorTree")
       
        self.logger = DAQLogClient(log_queue, "ENG")
        self.PPT_logger= DAQLogClient(log_queue, "ENG.PPT")
        self.log_queue = log_queue
        self.logger.status('********** Initializing Engine **********')

        self.start()
        self.logger.status("Constructed PostProcessorTree.")
Esempio n. 2
0
    def __init__(self, settings, log_queue):
        self.Queue = Queue(20)  #process Queue  # 10 -> 20 5/18/2020 APS
        self.MessageQueue = Queue(2)
        self.logger = DAQLogClient(log_queue, "TASKMAN")
        
        tm_process = Process(target=TM_Process, args=(settings,self.Queue,self.MessageQueue, log_queue))
        tm_process.daemon=True

        # if os.name=='nt':
        #     self.logger.info("setting below-normal priority to TaskManager")
        #     psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
        tm_process.start()
        self.logger.info('Starting TaskManager on pid %d' % tm_process.pid)
Esempio n. 3
0
    def GenerateModules(self,settings):
        #get a list of all Task entries
        task_settings_list = settings.GetSubTree("Task")

        tasknum = 0
        
        #iterate over the tasks and instantiate the modules
        for task_settings in task_settings_list:
        
            settings = DAQConfig(task_settings)
            
            module_name = str(task_settings.attributes['module'].value)
            
            self.logger.info("Instantiating module: %s" % module_name)
            
            #setup individual logger
            task_logger = DAQLogClient(self.log_queue,'T%s.%s' % (tasknum, module_name))
            
            try:
                task_module = __import__('Tasks',globals(),locals(),[module_name],-1)
                
                constructor = 'task_module.' + module_name + '.' + module_name + '(settings, task_logger, tasknum)'
                
                #construct the module and add it to the list of tasks
                self.tasks.append(eval(constructor))
                
            except Exception as e:
                self.logger.exception("Error while instantiating module \'%s\'" % module_name)
                
            tasknum += 1
            
        self.logger.info("Finished instantiating modules.")
Esempio n. 4
0
        def __init__(self, config, main_logger, logger):
            # add ch to logger
            self.GPS_logger = DAQLogClient(main_logger.log_queue, "GPS")

            # Add flags
            self.GPS_flags = EngineFlags()

            self.logger = logger

            self.gps = MotorolaClock(config, self)
Esempio n. 5
0
class TaskManager:
    """
    Interface to separate TaskManager process
    """

    def __init__(self, settings, log_queue):
        self.Queue = Queue(20)  #process Queue  # 10 -> 20 5/18/2020 APS
        self.MessageQueue = Queue(2)
        self.logger = DAQLogClient(log_queue, "TASKMAN")
        
        tm_process = Process(target=TM_Process, args=(settings,self.Queue,self.MessageQueue, log_queue))
        tm_process.daemon=True

        # if os.name=='nt':
        #     self.logger.info("setting below-normal priority to TaskManager")
        #     psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
        tm_process.start()
        self.logger.info('Starting TaskManager on pid %d' % tm_process.pid)

    def Start(self):
        self.Queue.put('start')
        self.MessageQueue.get() #block until done

    def Stop(self):
        self.Queue.put('stop')
        self.MessageQueue.get() #block until done

    def Quit(self):
        self.Queue.put('quit')
        
        self.logger.info("Quitting Task Manager")
        
        self.MessageQueue.get() #block until done

        self.logger.info("Finished quitting Task Manager")
Esempio n. 6
0
    def __init__(self, main_logger, logger):

        # add ch to logger
        self.DAQ_logger = DAQLogClient(main_logger.log_queue, "DAQ")
        
        # Add flags
        self.DAQ_flags = EngineFlags()

        self.logger = logger
        
        self.daq = NIDAQmx(config, self, have_gps = False)
        self.DAQ_flags.init.wait()
        self.logger.info("DAQ initialized at %s" % ctime())
Esempio n. 7
0
    def __init__(self, settings, log_queue):
        # Initialize data members
        self.running = False
        self.logger = None
        self.log_queue = log_queue
        self.thread = None

        self.tasks = []

##        self.logger = logging.getLogger('TaskManager')    #for thread
        #if separate process:
        #self.logger = get_logger()
        self.logger = DAQLogClient(self.log_queue, "TASKS")
        #formatter = logging.Formatter('TM: (%(levelname)s) %(message)s')
        #handler = logging.StreamHandler()
        #handler.setFormatter(formatter)
        #self.logger.addHandler(handler)
        #self.logger.setLevel(logging.INFO)

        # Parse the settings XML file
        #settings = parse(settings) #Settings already parsed
        self.settings = settings
        self.GenerateModules(settings)
Esempio n. 8
0
    def ConstructSequence(self, sequenceSettings):
        sequence = []
        for settings in sequenceSettings.childNodes:
            if str(settings.nodeName) == "PostProcessor":

                # Get module name
                module = str(settings.attributes["module"].value)

                # get channel number
                channel = str(
                    settings.getElementsByTagName("adc_channel_number")
                    [0].firstChild.data)

                # import python module
                processors = __import__("PostProcessors", globals(), locals(),
                                        [module], -1)

                # setup the logger with logger name
                logger = DAQLogClient(self.log_queue,
                                      "ENG.PPT.%s%s" % (module, channel))

                #make station settings available to each post processor as a separate tree:
                settings.appendChild(self.stationSettings)

                #constructor = "processors." + module + "." + module + "(settings,logger)"
                constructor = "processors.%s.%s(settings,logger)" % (module,
                                                                     module)
                self.logger.debug("Contructing: %s" % constructor)

                try:
                    sequence.append(eval(constructor))
                except:
                    self.logger.exception(
                        "Exception while instantiating processor module: %s" %
                        module)

            elif str(settings.nodeName) == "PostProcessorSequence":
                sequence.append(self.ConstructSequence(settings))

        return sequence
Esempio n. 9
0
        <ErrorEmail>0</ErrorEmail>
        <ErrorPost>0</ErrorPost>
        <LogPostUrl>/field_sites_logs/logging.php</LogPostUrl>
        <LogPostServer>vlf-engineering.stanford.edu:80</LogPostServer>
        <LogLevel>DEBUG</LogLevel>
        <ConsoleLevel>DEBUG</ConsoleLevel>
        <LogFileLevel>WARNING</LogFileLevel>
        <PostLevel>WARNING</PostLevel>
    </Logger>
    </DaqConfiguration>
    """

    config = parseString(settings)
    main_logger = DAQLogger(config)
    main_logger.start()
    logger = DAQLogClient(main_logger.log_queue, "MAIN")
    
    logger.debug("Test debug statements")
    
    test = test_engine(main_logger, logger)
    
    test.StartTest()
    
    for i in range(10):
        data = test.GetData()
        logger.info( "Received data at %s." % ctime())

    logger.info('************************* Full Restart Test*************************')
    test.Restart()

    for i in range(10):
Esempio n. 10
0
        <ErrorEmail>0</ErrorEmail>
        <ErrorPost>0</ErrorPost>
        <LogPostUrl>/field_sites_logs/logging.php</LogPostUrl>
        <LogPostServer>vlf-engineering.stanford.edu:80</LogPostServer>
        <LogLevel>DEBUG</LogLevel>
        <ConsoleLevel>DEBUG</ConsoleLevel>
        <LogFileLevel>WARNING</LogFileLevel>
        <PostLevel>WARNING</PostLevel>
    </Logger>
    </Configuration>
    """

    config = parseString(settings)
    main_logger = DAQLogger(config)
    main_logger.start()
    logger = DAQLogClient(main_logger.log_queue, "MAIN")

    logger.debug("Test debug statements")

    test = tester(config, main_logger, logger)

    # Start the GPS clock
    logger.info("Starting GPS clock at %s." % ctime())
    test.gps.Start()
    logger.info("Started GPS clock at %s." % ctime())

    sleep(20)

    # Restart the GPS clock
    logger.info("Restarting GPS clock at %s." % ctime())
    #test.gps.Stop()
Esempio n. 11
0
class _TaskManager:
    """
    This module is responsible for starting and controlling background processes
    such as automatic SFTP file transfers and data folder cleanup
    """

    # ========================
    # Constructors/Destructors
    # ========================
    def __init__(self, settings, log_queue):
        # Initialize data members
        self.running = False
        self.logger = None
        self.log_queue = log_queue
        self.thread = None

        self.tasks = []

##        self.logger = logging.getLogger('TaskManager')    #for thread
        #if separate process:
        #self.logger = get_logger()
        self.logger = DAQLogClient(self.log_queue, "TASKS")
        #formatter = logging.Formatter('TM: (%(levelname)s) %(message)s')
        #handler = logging.StreamHandler()
        #handler.setFormatter(formatter)
        #self.logger.addHandler(handler)
        #self.logger.setLevel(logging.INFO)

        # Parse the settings XML file
        #settings = parse(settings) #Settings already parsed
        self.settings = settings
        self.GenerateModules(settings)

    def GenerateModules(self,settings):
        #get a list of all Task entries
        task_settings_list = settings.GetSubTree("Task")

        tasknum = 0
        
        #iterate over the tasks and instantiate the modules
        for task_settings in task_settings_list:
        
            settings = DAQConfig(task_settings)
            
            module_name = str(task_settings.attributes['module'].value)
            
            self.logger.info("Instantiating module: %s" % module_name)
            
            #setup individual logger
            task_logger = DAQLogClient(self.log_queue,'T%s.%s' % (tasknum, module_name))
            
            try:
                task_module = __import__('Tasks',globals(),locals(),[module_name],-1)
                
                constructor = 'task_module.' + module_name + '.' + module_name + '(settings, task_logger, tasknum)'
                
                #construct the module and add it to the list of tasks
                self.tasks.append(eval(constructor))
                
            except Exception as e:
                self.logger.exception("Error while instantiating module \'%s\'" % module_name)
                
            tasknum += 1
            
        self.logger.info("Finished instantiating modules.")
        
    # =======
    # Methods
    # =======
    def Start(self):
        """
        Begins execution of the data acquisition in a thread.
        """
        #self.logger.info("Starting the VLF DAQ Engine.")
        # Launch the thread
        self.thread = Thread(target=self.MainLoop)
        self.thread.start()

    def Stop(self):
        """
        Terminates execution of the data acquisition.
        """
        
        if self.running:
            self.logger.debug("Stopping tasks.")
            
            for task in self.tasks:
                task.Stop()

            self.logger.debug("Stopping the Taskmanager thread.")
            
            # Shutdown the Engine properly
            if self.thread is not None:
                self.running = False            # Kill the thread
                self.thread.join()
                self.thread = None

##        exit()

    def Quit(self):
        self.Stop()

    #You have to create a new thread for the restart because you can't
    #.join() a currently running thread (deadlock)
    def SignalRestart(self, restart_args):
    
        self.logger.critical('Restart signal received.')
        
        self.restarting = True
        self.running = False
        RestartThread = Thread(target=self.Restart, args = restart_args)
        RestartThread.daemon=True
        RestartThread.start()


    def Restart(self, hardRestart = False, systemRestart = False, restartGPS=True):
        """
        Stops of the data acquisition, and re-creates/restarts modules in case of an error.
        I'm not sure about memory leaks, but restarting several times will probably lead to them;
        a hard restart (stopping the program and restarting using "python main.py --args"
        is probably a good thing to do after N soft restarts
        """
        self.restarts += 1
        self.logger.warning("Restart Thread Activated")

        if systemRestart:
            #in case of serious errors, execute a shell script to restart the entire system
            #This has to be platform-specific, for windows, linux, mac, etc
            pass

        # Stop the thread
        if self.thread is not None:
            self.isReady.set()
            self.thread.join()
            self.thread = None


        # Clear old modules
        for task in self.tasks:
            task.Stop()

        self.tasks=[]

        #Important Note: This doesn't actually work right now, it just stops everything in engine.py
        #I couldn't find a way to stop the main (CLI) thread from this one, so this is still WIP
        if hardRestart:
            #in case of certain classes of errors, quit the current python instance and launch another
            #from the shell
            #self.logger.critical("HARD RESTART")
            #subprocess.Popen("python main.py")
            if self.parent:
                self.parent.onecmd("quit")
                return

        settings = self.settings
        self.GenerateModules(settings)

        # Launch the thread
        self.thread = Thread(target=self.MainLoop)
        self.thread.start()


    # ==============
    # Helper Methods
    # ==============
    def MainLoop(self):
        self.running = True

        try:

            #start the tasks
            for task in self.tasks:
                task.Start()

            #wait until Stop() has been called
            while self.running:
                sleep(0.5)

            #stop the tasks
            for task in self.tasks:
                task.Stop()

        except Exception,inst:
            #(excType, excValue, excTb) = exc_info()
            #tb = traceback.extract_tb(excTb)[-1]
            #self.logger.error('EXCEPTION: %s'%str(excValue))
            #self.logger.error('\tFIle: %s'% tb[0])
            #self.logger.error('\tLine %s in function %s: %s'% (tb[1], tb[2], tb[3]))
            self.logger.exception("Exception encountered while running task.")
            
            #self.logger.critical('Restarting')
            self.SignalRestart(())
Esempio n. 12
0
        except Exception,inst:
            #(excType, excValue, excTb) = exc_info()
            #tb = traceback.extract_tb(excTb)[-1]
            #self.logger.error('EXCEPTION: %s'%str(excValue))
            #self.logger.error('\tFIle: %s'% tb[0])
            #self.logger.error('\tLine %s in function %s: %s'% (tb[1], tb[2], tb[3]))
            self.logger.exception("Exception encountered while running task.")
            
            #self.logger.critical('Restarting')
            self.SignalRestart(())

# =========
# Unit Test
# =========
if __name__ == "__main__":

    main_logger = DAQLogger(options)
    main_logger.start()
    
    logger = DAQLogClient(main_logger.log_queue, "MAIN")
    logger.debug("Main logger ready.")
    

    # Create the TaskManager object
    tm = TaskManager("DaqSettings.xml")
    try:
        tm.MainLoop()
    except KeyboardInterrupt:
        tm.Stop()
Esempio n. 13
0
class Engine:

    def __init__(self, options, args, log_queue):

        self.stationSettings = options.settings.GetFirstSubTree("StationSettings")
        self.pptSettings = options.settings.GetFirstSubTree("PostProcessorTree")
       
        self.logger = DAQLogClient(log_queue, "ENG")
        self.PPT_logger= DAQLogClient(log_queue, "ENG.PPT")
        self.log_queue = log_queue
        self.logger.status('********** Initializing Engine **********')

        self.start()
        self.logger.status("Constructed PostProcessorTree.")


    def start(self):
        ''' Start the PostProcessorTree '''
        self.ppt = PostProcessorTree(self.pptSettings, self.stationSettings, self)
        # self.nbproc = Narrowband(options, self.PPT_logger)

    def stop(self):
        ''' Stop the PostProcessorTree '''
        self.logger.info("Shutting down PostProcessor")
        self.ppt.Stop()
        self.ppt = None
        self.logger.info('PPT process stopped')

    def get_queue_status(self):
        ''' Get the current length of the process queues '''

        qstats = []
        for sequence in self.ppt.ppt:
            for p in sequence:
                # try:
                if (len(p) > 0) and hasattr(p[0],'queue'):
                    qstats.append(p[0].queue.qsize())
                else:
                    logger.debug("Process has no queue to check")
                # except:
                #     logger.warning("issues with get_queue_status")
        return qstats


    def process_files(self, file_prefixes):
        '''
            Run prerecorded continuous files through the PostProcessorTree.
            Input: [file_prefixes]: A list of prefix files to load 
                   (e.g., strip the "_000.mat" and "_001.mat" suffixes off the end.)
                   We recreate the filenames in here based on the number of
                   channels in the PostProcessorTree.
        '''

        num_channels = self.ppt.GetNumSequences()

        for f_prefix in file_prefixes:

            # Start execution timer
            tstart = time.time()

            s_list = []
            for ind in xrange(num_channels):

                fname = f_prefix + "_%03d.mat"%ind
                logger.status("loading file %s"%fname)
                lm = loadMATdata(fname)
                s = lm.loadAllData()
                s_list.append(s)
        
                print('Data has dimensions', np.shape(s['data']))
                print('sample rate is ',s['Fs'])


            L = 1   #[seconds]  -- The code will probably handle longer strides,
                    #              but MatFileWriter.py will start new files at each break over 1 sec.               
            fs = int(s['Fs'])
            logger.info("file length is %d seconds"%(len(s['data'])/s['Fs']))

            sec_in_file = len(s['data'])/s['Fs']
            n_strides = int(sec_in_file/L)

            stride_length = int(s['Fs']*L)

            logger.debug("n_strides: %d"%n_strides)
            logger.debug("stride_length: %d"%stride_length)

            lat = s['latitude']
            lon = s['longitude']
            alt = s['altitude']
            Q   = int(s['gps_quality'])
            timestamp = datetime(year=int(s['start_year']), month=int(s['start_month']), day=int(s['start_day']),
                hour=int(s['start_hour']), minute=int(s['start_minute']), second=int(s['start_second']))

            logger.debug("time = %s"%timestamp)
            logger.debug("data has shape %s"%np.shape(s['data']))

            for i in xrange(n_strides):
                        # for i in xrange(n_strides):

                # logger.info("stride %d"%i)
                left = i*stride_length
                right= (i+1)*stride_length
                # rawBuffer = s['data'][left:right].transpose().astype('float32')
                rawBuffer = [x['data'][left:right].transpose().astype('float32') for x in s_list]

                # Data message is in the following format:
                # [[raw data], [gps timestamp, [lat, lon, alt], [gps quality]], sample rate]
                data = [rawBuffer, [timestamp, [lat, lon, alt], [Q]], fs]

                # Queue it up!
                self.ppt.Process(data)
                timestamp += timedelta(seconds=L)

            # Wait until the processing queues are empty before loading another file in
            qstat = self.get_queue_status()
            logger.info("Queue status: %s"%(qstat))

            while any(np.array(qstat) > 0):
                qstat = self.get_queue_status()
                logger.timestamping("Waiting for PostProcessor Queues: depth =  %s          "%(qstat))
                time.sleep(1)

            # Lap
            tstop = time.time()
            run_ratio = sec_in_file/(tstop - tstart)
            logger.status('Finished file prefix %s (%0.1f faster than realtime)'%(f_prefix, run_ratio))
Esempio n. 14
0
    for el in f.getElementsByTagName("PostProcessor"):
        if el.getAttribute("module")  in "MatFileWriter":
            
            # Check if IsSynoptic == 0:
            if (el.getElementsByTagName('IsSynoptic')[0].childNodes[0].data) in "0":
                print('Removing Continuous MatFileWriter')
                parent = el.parentNode
                parent.removeChild(el)

    # Instantiate the configuration module
    options.settings = DAQConfig(f)

    # Start the loggers
    main_logger = DAQLogger(options)
    main_logger.start()    
    logger = DAQLogClient(main_logger.log_queue, "MAIN")
    logger.debug("Main logger ready.")


    # Figure out our input directory. If None, ask for one with a dialog
    inp_dir = options.inp_dir

    if not inp_dir:
        root = Tkinter.Tk()
        root.withdraw()
        logger.info('Please select an input directory')
        inp_dir = tkFileDialog.askdirectory(title="Please select an input directory")

    if not os.path.isdir(inp_dir):
        sys.exit("Invalid input directory: %s"%inp_dir)