Exemple #1
0
    def __init__(self, input_queue, output_queue):
        multiprocessing.Process.__init__(self)
        self.input_queue = input_queue
        self.output_queue = output_queue
        self.configuration = {}
        self.sensorDevices = []
        self.relay = Relay()

        # List of "raw" jobData, as constructed by client "Jobs" page
        self.jobs = []
        # List of JobProcessor instances
        self.runningJobs = []
        self.stoppedJobs = []
Exemple #2
0
class GPIOProcess(multiprocessing.Process):

    def __init__(self, input_queue, output_queue):
        multiprocessing.Process.__init__(self)
        self.input_queue = input_queue
        self.output_queue = output_queue
        self.configuration = {}
        self.sensorDevices = []
        self.relay = Relay()

        # List of "raw" jobData, as constructed by client "Jobs" page
        self.jobs = []
        # List of JobProcessor instances
        self.runningJobs = []
        self.stoppedJobs = []

    def run(self):

        if _TESTING_:
            print "_TESTING_ is True"

        # Setup
        # First create user directory for brewable data
        # then move data from old location into it
        try:
            os.makedirs(BREWABLE_BASE_DIR)
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(BREWABLE_BASE_DIR):
                pass
            else:
                raise

        oldbase = os.path.join(_home, 'src', 'brewable')
        try:
            os.rename(os.path.join(oldbase, JOB_HISTORY_DIR), os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR))
            os.rename(os.path.join(oldbase, JOB_ARCHIVE_DIR), os.path.join(BREWABLE_BASE_DIR, JOB_ARCHIVE_DIR))
        except Exception as e:
            print "Moving stuff 1: ", e
        try:
            os.rename(os.path.join(oldbase, PROFILE_DATA_FILE), os.path.join(BREWABLE_BASE_DIR, PROFILE_DATA_FILE))
            os.rename(os.path.join(oldbase, JOB_TEMPLATES_FILE), os.path.join(BREWABLE_BASE_DIR, JOB_TEMPLATES_FILE))
        except Exception as e:
            print "Moving stuff 2: ", e

        # Next, read user config or generate default configuration
        try:
            os.makedirs(USER_CONFIG_DIR)
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(USER_CONFIG_DIR):
                pass
            else:
                raise

        # Load saved configuration
        try:
            with open(USER_CONFIG_FILE) as json_file:
                json_data = json.load(json_file)
                print "CONFIGURATION DATA: ", json_data
                #for k,v in json_data.iteritems():
                #    self.configuration[k] = v
                for k in json_data.keys():
                    self.configuration[k] = json_data[k]
        except Exception as e:
            print e
            print "Generating new configuration from defaults"
            # Can't open user config file - either corrupted or doesn't exist,
            # so generate a default config and save that.
            self.configuration['sensorFudgeFactor'] = float(0.0);
            self.configuration['multiSensorMeanWeight'] = int(50);
            self.configuration['relayDelayPostON'] = int(180);
            self.configuration['relayDelayPostOFF'] = int(480);
            #print "CONFIGURATION: ", self.configuration
            with open(USER_CONFIG_FILE, 'w') as json_file:
                json.dump(self.configuration, json_file)

        for k in self.configuration.keys():
            print "config item: ", k, self.configuration[k]



        # Look for sensor devices
        try:
            sensor_file = open(SensorDevice.deviceDirectory())
            sensors = sensor_file.read()
            sensor_file.close
            for sensorId in sensors.split():
                self.sensorDevices.append(SensorDevice(sensorId, self.configuration['sensorFudgeFactor']))
            #for sensor in self.sensorDevices:
            #    print "SENSOR", sensor.getId()
        except:
            print "No sensors connected?"

        # Load running jobs
        # Look through all files in the history directory.
        # If any is "current" (still running),
        # then add it to self.runningJobs.
        # First ensure the directory exists
        try:
            os.makedirs(os.path.join(BREWABLE_BASE_DIR, JOB_RUN_DIR))
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(os.path.join(BREWABLE_BASE_DIR, JOB_RUN_DIR)):
                pass
            else:
                raise

        # JOB_HISTORY_DIR is where saved job history files are kept
        try:
            os.makedirs(os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR))
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR)):
                pass
            else:
                raise
        # Load saved job templates
        try:
            with open(os.path.join(BREWABLE_BASE_DIR, JOB_TEMPLATES_FILE)) as json_file:
                json_data = json.load(json_file)
                #print "Job data: ", json_data['job_data']
                for job in json_data['job_data']:
                    self.jobs.append(job)
                #print "Job data: ", self.jobs
        except Exception as e:
            # Can't open job file - either corrupted or doesn't exist
            print e

        # JOB_ARCHIVE_DIR is where saved job history files are kept
        # yet are not to be displayed in the Job History section of the browser
        try:
            os.makedirs(os.path.join(BREWABLE_BASE_DIR, JOB_ARCHIVE_DIR))
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(os.path.join(BREWABLE_BASE_DIR, JOB_ARCHIVE_DIR)):
                pass
            else:
                raise


        # Now check the relays
        #self.relay_test()

        # Start with all relays off
        self.relay.ALLOFF()

        # Relay DelaySets look like:
        # {'on_time':180, 'off_time':480, 'isset':False}
        for id in range(self.relay.device_count()):
            self.relay.setDelaySetValue(id+1, 'on_time', self.configuration['relayDelayPostON'])
            self.relay.setDelaySetValue(id+1, 'off_time', self.configuration['relayDelayPostOFF'])

        # Loop
        count = 0
        while True:
            # Incoming request from app
            if not self.input_queue.empty():
                data = self.input_queue.get()
                # Do something with it
                #print "data 0: ", data
                #print "data 0 length: ", len(data)
                try:
                    jmsg = json.loads(data.strip())
                    if jmsg['type'] == 'save_job':
                        # Update local version, then save to file
                        #print("gpio found save_job msg")
                        self.jobs.append(jmsg['data'])
                        #print("self.jobs: ", self.jobs)
                        with open(os.path.join(BREWABLE_BASE_DIR, JOB_TEMPLATES_FILE), 'w') as json_file:
                            json.dump({'job_data':self.jobs}, json_file)
                        # Return updated jobs list to client
                        jdata = json.dumps({'type':'loaded_jobs',
                                            'data':self.jobs})
                        self.output_queue.put(jdata)
                    elif jmsg['type'] == 'load_jobs':
                        #print("gpio found load_jobs msg")
                        jdata = json.dumps({'type':'loaded_jobs',
                                            'data':self.jobs})
                        self.output_queue.put(jdata)
                        print("self.jobs: ", self.jobs)
                    elif jmsg['type'] == 'delete_job':
                        # First check if index in range?
                        del self.jobs[jmsg['data']['index']]

                        # Save result
                        with open(os.path.join(BREWABLE_BASE_DIR, JOB_TEMPLATES_FILE), 'w') as json_file:
                            json.dump({'job_data':self.jobs}, json_file)
                        # Return updated jobs list to client
                        jdata = json.dumps({'type':'loaded_jobs',
                                            'data':self.jobs})
                        self.output_queue.put(jdata)
                    elif jmsg['type'] == 'run_job':
                        #print("gpio received run_job msg");
                        # First check that this job isn't already running
                        isRunning = False
                        for job in self.runningJobs:
                            if job.name() == self.jobs[jmsg['data']['index']]['name']:
                                print "Job %s already running" % job.name()
                                isRunning = True
                        if not isRunning:
                            if not self.setupJobRun(jmsg['data']['index']):
                                # Need to send msg back to client here!
                                print "Couldn't start job"
                            else:
                                print "Started job ", jmsg['data']['index']
                        # Do an initial processing of the new job
                        for job in self.runningJobs:
                            if job.name() == self.jobs[jmsg['data']['index']]['name']:
                                job.process()
                        # Send updated list of running jobs
                        self.loadRunningJobs(jmsg)
                    elif jmsg['type'] == 'load_startup_data':
                        self.loadStartupData(jmsg)
                    elif jmsg['type'] == 'load_running_jobs':
                        self.loadRunningJobs(jmsg)
                    elif jmsg['type'] == 'stop_running_job':
                        self.stopRunningJob(jmsg)
                    elif jmsg['type'] == 'remove_running_job':
                        self.removeRunningJob(jmsg)
                    elif jmsg['type'] == 'save_running_job':
                        self.saveRunningJob(jmsg)
                    elif jmsg['type'] == 'load_saved_jobs':
                        self.loadSavedJobs(jmsg)
                    elif jmsg['type'] == 'load_saved_job_data':
                        self.loadSavedJobData(jmsg)
                    elif jmsg['type'] == 'archive_saved_job':
                        self.archiveSavedJob(jmsg)
                    elif jmsg['type'] == 'delete_saved_job':
                        self.deleteSavedJob(jmsg)
                    elif jmsg['type'] == 'save_profiles':
                        with open(os.path.join(BREWABLE_BASE_DIR, PROFILE_DATA_FILE), 'w') as json_file:
                            json.dump({'profiles_data':jmsg['data']}, json_file)
                    elif jmsg['type'] == 'load_profiles':
                        self.loadProfiles(jmsg)

                    elif jmsg['type'] == 'list_sensors':
                        sensor_ids = []
                        for sensor in self.sensorDevices:
                            sensor_ids.append(sensor.getId())
                        jdata = json.dumps({'type':'sensor_list',
                                            'data':sensor_ids})
                        self.output_queue.put(jdata)
                    elif jmsg['type'] == 'list_relays':
                        relay_ids = []
                        for i in range(self.relay.device_count()):
                            relay_ids.append('Relay {:02}'.format(i+1))
                        jdata = json.dumps({'type':'relay_list',
                                            'data':relay_ids})
                        self.output_queue.put(jdata)
                    elif jmsg['type'] == 'config_change':
                        self.configChange(jmsg);
                    elif jmsg['type'] == 'CMD':
                        # With a CMD type, the data field is an array whose
                        # first element is the command,
                        # the remaining elements are the command's args
                        command = jmsg['data']
                        print "running command: ", command
                        self.run_command(command)
                except:
                    print "Non json msg: ", data


            # Data/info from sensor device
            data = "generic %d" % count

            # Send update of sensors & relay state
            self.liveUpdate()

            # Send a heartbeat (in absence of any sensors)
            if len(self.sensorDevices) == 0:
                jdata = json.dumps({'type':'heartbeat','data':count});
                self.output_queue.put(jdata)

            # Check/process any running jobs
            if _TESTING_:
                for job in self.runningJobs:
                    job.process()
            else:
                if count % 20  == 0:
                    for job in self.runningJobs:
                        job.process()




            count += 1

            #time.sleep(1)


    def deleteSavedJob(self, jmsg):
        print "deleteSavedJob() ",  jmsg['data']['jobName'], jmsg['data']['instance']
        historyFileName = jmsg['data']['jobName'] + '-' + jmsg['data']['instance'] + '.txt'
        historyFilePath = os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, historyFileName)
        #print "Trying to remove ", historyFilePath
        try:
            os.remove(historyFilePath)
            jdata = json.dumps({'type':'removed_saved_job',
                                'data':{'jobName':jmsg['data']['jobName'],'instance':jmsg['data']['instance']}})
            self.output_queue.put(jdata)
        except Exception as e:
            print "archiveSavedJob() ERROR: ", e

    def archiveSavedJob(self, jmsg):
        print "archiveSavedJob() ",  jmsg['data']['jobName'], jmsg['data']['instance']
        historyFileName = jmsg['data']['jobName'] + '-' + jmsg['data']['instance'] + '.txt'
        from_path = os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, historyFileName)
        to_path = os.path.join(BREWABLE_BASE_DIR, JOB_ARCHIVE_DIR, historyFileName)
        #print "Trying to move ", from_path, to_path
        try:
            os.rename(from_path, to_path)
            jdata = json.dumps({'type':'archived_job',
                                'data':{'jobName':jmsg['data']['jobName'],'instance':jmsg['data']['instance']}})
            self.output_queue.put(jdata)
        except Exception as e:
            print "archiveSavedJob() ERROR: ", e


    def configChange(self, jmsg):
        print "configChange() ", jmsg['data'], jmsg['data'].keys()
        for k in jmsg['data'].keys():
            try:
                print "Processing config change for: ", k,jmsg['data'][k]
                if k == 'multiSensorMeanWeight':
                    print "Changing config item %s to %d" % (k,int(jmsg['data'][k]))
                    self.configuration['multiSensorMeanWeight'] = int(jmsg['data'][k])
                    print "Changed multiSensorMeanWeight configuration to: ", self.configuration['multiSensorMeanWeight']
                elif k == 'sensorFudgeFactor':
                    print "Changing config item %s to %f" % (k,float(jmsg['data'][k]))
                    for sensor in self.sensorDevices:
                        sensor.set_fudge(float(jmsg['data'][k]))
                    self.configuration['sensorFudgeFactor'] = self.sensorDevices[0].get_fudge()
                    print "Changed fudge configuration to: ", self.configuration['sensorFudgeFactor']
                elif k == 'relayDelayPostON':
                    # {'on_time':180, 'off_time':480, 'isset':False}
                    print "Changing config item %s to %d" % (k,int(jmsg['data'][k]))
                    for id in range(self.relay.device_count()):
                        self.relay.setDelaySetValue(id+1, 'on_time', int(jmsg['data'][k]))
                    self.configuration['relayDelayPostON'] = self.relay.getDelaySetValue(1, 'on_time')
                    print "Changed relay on_time configuration to: ", self.configuration['relayDelayPostON']
                elif k == 'relayDelayPostOFF':
                    print "Changing config item %s to %d" % (k,int(jmsg['data'][k]))
                    for id in range(self.relay.device_count()):
                        self.relay.setDelaySetValue(id+1, 'off_time', int(jmsg['data'][k]))
                    self.configuration['relayDelayPostOFF'] = self.relay.getDelaySetValue(1, 'off_time')
                    print "Changed relay off_time configuration to: ", self.configuration['relayDelayPostOFF']
                else:
                    print "Unknown configuration item: ", k
                with open(USER_CONFIG_FILE, 'w') as json_file:
                    json.dump(self.configuration, json_file)
            except Exception as e:
                print "Unable to process configChange for item:", k
                print e

    def loadStartupData(self, jmsg):
        jdata = json.dumps({'type':'startup_data',
                            'data':{'testing':_TESTING_,
                                    'config':self.configuration,
                                    'the_end':'orange' }})
        self.output_queue.put(jdata)

        self.loadRunningJobs(jmsg)
        self.loadSavedJobs(jmsg)
        self.loadProfiles(jmsg)

    def loadProfiles(self, jmsg):
        try:
            with open(os.path.join(BREWABLE_BASE_DIR, PROFILE_DATA_FILE)) as json_file:
                json_data = json.load(json_file)
                #print(json_data['profiles_data'])
                jdata = json.dumps({'type':'loaded_profiles',
                                    'data':json_data['profiles_data']})
        except:
            print "Couldn't load profile data file"
            jdata = json.dumps({'type':'loaded_profiles', 'data':[]})
        finally:
            self.output_queue.put(jdata)

    def loadRunningJobs(self, jmsg):
        if jmsg['type'] == 'load_running_jobs':
            print "Send running_jobs list after request to LOAD_RUNNING_JOBS"
        elif jmsg['type'] == 'run_job':
            print "Send running_jobs list after request to RUN_JOBS"
        elif jmsg['type'] == 'load_startup_data':
            print "Send running_jobs list after request to LOAD_STARTUP_DATA"
        else:
            print "Send running_jobs list after UNKNOWN request"
        # We send "public" job info (since client doesn't
        # need stuff like local file name etc.
        # Also send collected status reports
        # (history without "private" header)
        if len(self.runningJobs) > 0:
            running_jobs = []
            for j in self.runningJobs:
                if ['jmsg.type'] == 'run_job':
                    j.process()
                #job_info = j.jobInfo()
                #job_info['history'] = j.history[1:]
                #print "list running job: ", job_info
                job_info = {}
                job_info['header'] = j.jobInfo()
                job_info['updates'] = j.history[1:]
                running_jobs.append(job_info)
            print "running_jobs list: ", running_jobs
            jdata = json.dumps({'type':'running_jobs',
                                'data':running_jobs})
            self.output_queue.put(jdata)
        else:
            print "No jobs running"
            jdata = json.dumps({'type':'running_jobs',
                                'data':[]})
            self.output_queue.put(jdata)

    def loadSavedJobData(self, jmsg):
        #print "Rcvd request to LOAD SAVED JOB DATA ", jmsg['data']['fileName'] + '.txt'

        fileName = jmsg['data']['fileName'] + '.txt'
        #print "fileName: ", fileName
        filepath = os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, fileName)
        #print "loading data from file: ", filepath
        try:
            with open(filepath) as f:
                lines = [json.loads(line) for line in f]
            print "lines: ", lines
            jdata = {'type':'saved_job_data', 'data':{'header':lines[0:1],'updates':lines[1:]}}
        except:
                print "Couldn't load saved job data"
                jdata = json.dumps({'type':'saved_job_data',
                                    'data':[]})
        finally:
            self.output_queue.put(jdata)


    def loadSavedJobs(self, jmsg):
        #print "Rcvd request to LOAD SAVED JOBS"
        goodhistoryfiles = []
        try:
            historyfiles = [f for f in os.listdir(os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR)) if os.path.isfile(os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, f))]
        except Exception as e:
            print "error loadSavedJobs(); ", e
        for file in historyfiles:
            try:
                lastline = json.loads(deque(open(os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, file)), 1).pop())
                if lastline['running'] == 'saved':
                    goodhistoryfiles.append(file)
            except Exception as e:
                print "wrong file format in loadSavedJobs(); ", file, e
        print "good history files: ", goodhistoryfiles
        jdata = json.dumps({'type':'saved_jobs_list',
                            'data':{'historyfiles':goodhistoryfiles}})
        self.output_queue.put(jdata)
        #print "file list sent: ", jdata

    def removeRunningJob(self, jmsg):
        #print "Rcvd request to REMOVE JOB"
        jobName = jmsg['data']['jobName']
        longName = jmsg['data']['longName']
        self.stopRunningJob(jmsg)

        # Whether previously running or not, it should now be in stoppedJobs
        job_found = False
        for i in range(len(self.stoppedJobs)):
            if self.stoppedJobs[i].name() == jobName:
                job_found = True
                del self.stoppedJobs[i]
                #print "Job %s removed from stoppedJobs" % jobName
                jdata = json.dumps({'type':'removed_job',
                                    'data':{'jobName':jobName}})
                self.output_queue.put(jdata)
                break
        if not job_found:
            # This shouldn't be possible
            print "Job to remove NOT FOUND! ", jobName

        filename = longName + '.txt'
        filepath = os.path.join(BREWABLE_BASE_DIR, JOB_RUN_DIR, filename)
        #print "Deleting filepath: ", filepath
        if os.path.exists(filepath):
            try:
                os.remove(filepath)
            except OSError as e:
                print "OSError", e
        else:
            print "Can't find file to delete: ", filepath
        # LATER - could clear out other files?
        # - need to check if still running, stopped etc.

    def saveRunningJob(self, jmsg):
        print "Rcvd request to SAVE RUNNING JOB", jmsg['data']['jobName']
        jobName = jmsg['data']['jobName']
        self.stopRunningJob(jmsg, 'save')

        # Whether previously running or not, it should now be in stoppedJobs
        job_found = False
        for i in range(len(self.stoppedJobs)):
            print "checking ", self.stoppedJobs[i].name()
            if self.stoppedJobs[i].name() == jobName:
                job_found = True
                self.stoppedJobs[i].stop('saved')
                historyFileName = self.stoppedJobs[i].historyFileName
                from_path = os.path.join(BREWABLE_BASE_DIR, JOB_RUN_DIR, historyFileName)
                to_path = os.path.join(BREWABLE_BASE_DIR, JOB_HISTORY_DIR, historyFileName)
                try:
                    os.rename(from_path, to_path)
                    jdata = json.dumps({'type':'saved_job',
                                        'data':{'jobName':jobName}})
                    self.output_queue.put(jdata)
                except Exception as e:
                    print "saveRunningJob() ERROR: ", e
                #print "Moved ", from_path, to_path

        if not job_found:
            jdata = json.dumps({'type':'error_save_running_job',
                                'data':{'jobName':jobName}})
            self.output_queue.put(jdata)

        self.loadSavedJobs(json.dumps({'type':'load_saved_jobs','data':[]}))

    def stopRunningJob(self, jmsg, stopStatus='stop'):
        print "Rcvd request to STOP RUNNING JOB", jmsg['data']['jobName']
        job_found = False
        for job in self.runningJobs:
            print "Trying: ", job.name()
            if job.name() == jmsg['data']['jobName']:
                job_found = True
                print "Job %s running - ready to stop" % job.name()
                while job.processing:
                    # Wait for any current processing to complete
                    print "spinning ..."
                    time.sleep(0.05)
                job.stop(stopStatus)
                break
        if not job_found:
            # Perhaps the job was already stopped?
            for job in self.stoppedJobs:
                if job.name() == jmsg['data']['jobName']:
                    #print "Job %s already stopped" % job.name()
                    jdata = json.dumps({'type':'stopped_job',
                                        'data':{'jobName':job.name()}})
                    self.output_queue.put(jdata)
                    break

    def liveUpdate(self):
        # Data/info from sensor devices
        try:
            #sensor_state = list({'sensorId':sensor.getId(),'temperature':st.get_temp(sensor.getId())} for sensor in self.sensorDevices)
            sensor_state = list({'sensorId':sensor.getId(),'temperature':sensor.get_temp()} for sensor in self.sensorDevices)
        except:
            sensor_state = []
        #print "sensor_state: ", sensor_state

        # Data/info from relay device
        try:
            relay_state = list((self.relay.isOn(i+1),self.relay.isDelayed(i+1)) for i in range(self.relay.device_count()))
        except:
            relay_state = []
        #print "relay_state: ", relay_state

        # Send live_update (= sensor_state + relay_state)
        jdata = json.dumps({'type':'live_update',
                            'sensor_state':sensor_state,
                            'relay_state':relay_state})
        self.output_queue.put(jdata)
        #print "live_update jdata: ", jdata

    def setupJobRun(self, jobIndex):
        try:
            self.runningJobs.append(JobProcessor(copy.deepcopy(self.jobs[jobIndex]), self.output_queue, self.runningJobs, self.stoppedJobs, self.relay, self.sensorDevices, self.configuration))
            return True
        except:
            print "JOB CREATE FAIL!"
            return False


    def relay_test(self):
        #print "Relay count = ", self.relay.device_count()
        for i in range(self.relay.device_count()):
            self.relay.ON(i+1)
            time.sleep(1)
        for i in range(self.relay.device_count()):
            self.relay.OFF(i+1)
            time.sleep(1)
        self.relay.ALLON()
        time.sleep(1)
        self.relay.ALLOFF()

    def toggle_relay_command(self, channel):
        if self.relay.isOn(channel):
            #print "relay %d is already on; switching off" % channel
            self.relay.OFF(channel)
        else:
            #print "relay %d is off; switching on" % channel
            self.relay.ON(channel)
        self.liveUpdate()
        #print "STATE: ", self.relay.state()
        #if self.relay.isOn(channel):
        #    data = 'relay ' + str(channel) + ' now ON'
        #else:
        #    data = 'relay ' + str(channel) + ' now OFF'
        #jdata = json.dumps({'type':'info',
        #                    'data':data})
        #self.output_queue.put(jdata)
        #print "STATE: ", jdata

    def run_command(self, command):
        if command[0] == 'toggle_relay':
            self.toggle_relay_command(command[1])