예제 #1
0
def processserialdata(data):
    from iiutilities.datalib import parseoptions
    datadicts = []
    messages = []
    # try:
    # Break into chunks

    print('processing data: ')
    print(data)
    print('end data')

    # RF Message (deprecated, all are of serial form below)
    if data.strip().find('BEGIN RECEIVED') > 0:
        split1 = data.strip().split('BEGIN RECEIVED')
        for split in split1:
            if split.find('END RECEIVED') >= 0:
                message = split.split('END RECEIVED')[0].replace('\x00', '')
                # print(message)
                messages.append(message.strip())
                try:
                    datadict = parseoptions(message)
                except:
                    print('error parsing message: ' + message)
                else:
                    # print(datadict)
                    datadicts.append(datadict)
                    messages.append(message)
    # Serial message
    else:
        messagesplit = data.strip().split('\n')
        # print(messagesplit)
        datadicts = []
        for entry in messagesplit:
            # print(entry)
            dict = parseoptions(entry)
            # print('dict')
            # print(dict)
            if 'node' or 'cmd' in dict:
                datadicts.append(dict)
                messages.append(entry)
    # except:
    #     print('there was an error processing the message')
    #     return
    # else:
    return datadicts, messages
예제 #2
0
def handle_unit_tests(**kwargs):

    settings = {
        'notifications':[]
    }
    settings.update(kwargs)

    from iiutilities import utility
    import cupidunittests
    import pilib
    from iiutilities import datalib

    system_database = pilib.dbs.system
    notifications_database = pilib.dbs.notifications

    unittestresults = cupidunittests.runalltests()

    # print('** Unit TEST RESULTS ** ')
    # print(unittestresults['totalerrorcount'],unittestresults['totalfailurecount'])
    if unittestresults['totalerrorcount'] > 0 or unittestresults['totalfailurecount'] > 0:
        unitnotify = next((item for item in settings['notifications'] if item['item'] == 'unittests' and int(item['enabled'])),
                          None)

        if unitnotify:
            options = datalib.parseoptions(unitnotify['options'])
            if 'type' in options:
                if options['type'] == 'email' and 'email' in options:
                    currenttime = datalib.gettimestring()
                    lastnotificationtime = unitnotify['lastnotification']
                    # default
                    frequency = 600
                    if 'frequency' in options:
                        try:
                            frequency = float(options['frequency'])
                        except:
                            pass

                    elapsedtime = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(
                        lastnotificationtime)
                    # print(elapsedtime,frequency)
                    if elapsedtime > frequency:
                        # Queue a message indicating we had to restart the systemstatus daemon
                        message = 'CuPID has failed unittests. Details follow:\r\n\r\n'
                        message += unittestresults['stringresult'].replace('\'', '"')
                        # message += '\r\n\r\n'
                        # message +=

                        subject = 'CuPID : ' + hostname + ' : unittests'
                        notification_database.insert('queuednotifications',
                                                     {'type': 'email', 'message': message,
                                                      'options': 'email:' + options['email'] + ',subject:' + subject,
                                                      'queuedtime': currenttime})
                        system_database.set_single_value('notifications', 'lastnotification', currenttime,
                                                         condition="item='unittests'")
예제 #3
0
def processnotification(notification):
    from iiutilities import datalib
    from iiutilities import utility
    from iiutilities.netfun import pingstatus

    senttime = datalib.gettimestring()
    result = {'status': 1, 'senttime': senttime}
    if notification['type'] == 'email':

        # Check to make sure we're online.
        pingresult = pingstatus()
        if not pingresult['status']:

            utility.log(dirs.logs.notifications,
                        'WAN access is ok, so processing notification')
            options = datalib.parseoptions(notification['options'])
            message = notification['message']
            if 'subject' in options:
                subject = options['subject']
            else:
                subject = 'CuPID Notification Email'

            message += '\r\n\r\n'
            message += 'Message queued:\t ' + notification[
                'queuedtime'] + '\r\n'
            message += 'Message sent:\t ' + senttime + '\r\n'

            if 'email' in options:
                try:
                    email = options['email']
                    actionmail = utility.gmail(message=message,
                                               subject=subject,
                                               recipient=email)
                    actionmail.send()
                except:
                    pass
                else:
                    result['status'] = 0
        else:
            utility.log(
                dirs.logs.notifications,
                'WAN access does not appear to be ok. Status is: ' +
                str(pingresult['status']))

    return result
예제 #4
0
def processnotification(notification):
    from iiutilities import datalib
    from iiutilities import utility
    from iiutilities.netfun import pingstatus

    senttime = datalib.gettimestring()
    result = {'status':1, 'senttime':senttime}
    if notification['type'] == 'email':

        # Check to make sure we're online.
        pingresult = pingstatus()
        if not pingresult['status']:

            utility.log(dirs.logs.notifications, 'WAN access is ok, so processing notification')
            options = datalib.parseoptions(notification['options'])
            message = notification['message']
            if 'subject' in options:
                subject = options['subject']
            else:
                subject = 'CuPID Notification Email'

            message += '\r\n\r\n'
            message += 'Message queued:\t ' + notification['queuedtime'] + '\r\n'
            message += 'Message sent:\t ' + senttime + '\r\n'

            if 'email' in options:
                try:
                    email = options['email']
                    actionmail = utility.gmail(message=message, subject=subject, recipient=email)
                    actionmail.send()
                except:
                    pass
                else:
                    result['status'] = 0
        else:
            utility.log(dirs.logs.notifications, 'WAN access does not appear to be ok. Status is: ' + str(pingresult['status']))

    return result
예제 #5
0
def generatehamachipage(hamachidata=None, path=None):
    from iiutilities.netfun import gethamachidata
    from iiutilities.datalib import parseoptions, gettimestring

    if not hamachidata:
        hamachidata = gethamachidata()

    if path:
        file = open(path, 'w')
        htmlstring = (
            '<!DOCTYPE html>' + '<html>' + '<head>' +
            '<title>Hamachi Remotes Home</title>' +
            '<meta name="viewport" content="width=device-width, initial-scale=1">'
            + '<link rel="stylesheet" href="jqm/themes/base.css" />' +
            '<link rel="stylesheet" href="jqm/themes/jquery.mobile.icons.min.css" />'
            +
            '<link rel="stylesheet" href="jqm/jquery.mobile.custom.structure.min.css" />'
            + '<!--jQuery Mobile is 1.4.5-->' +
            '<script src="js/jquery-1.11.1.js"></script>' +
            '<script src="jqm/jquery.mobile.custom.js"></script>' + '<style>' +
            '.online {' + 'background-color:#bbffbb' + '}' + '.offline {' +
            'background-color:#ffbbbb' + '}' + '</style>' + '</head>' +
            '<body>' +
            '<div data-role="page" id="demo-page" class="my-page" data-theme="d">'
            + '<div role="main" class="ui-content">')
        htmlstring += '<ul data-role="listview" data-inset=true><li data-role="list-divider">'
        htmlstring += 'Updated : ' + gettimestring() + '</li></ul>'

        file.write(htmlstring)
    for network in hamachidata:
        if path:
            htmlstring = ('<ul data-role="listview" data-inset="true">' +
                          '<li data-role="list-divider">' + network['name'] +
                          ' : ' + network['id'] + '</li>')

        for client in network['clientlist']:
            # print(client['name'] + ' : ' + client['hamachiip'])
            if path:
                htmlstring += '<li>'
                htmlstring += '<fieldset class="ui-grid-a"><div class="ui-block-a" style="width:50%">'
                # htmlstring += client['name'] + ' : ' + client['hamachiip']
                htmlstring += '<a href="https://' + client[
                    'hamachiip'] + '/">' + client['name'] + '</a> : ' + client[
                        'hamachiip']

                options = parseoptions(client['options'])

                htmlstring += '</div>'

                if client['onlinestatus']:
                    htmlstring += '<div class="online" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Online</div>'
                else:
                    htmlstring += '<div class="offline" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Offline</div>'

                if 'monitor' in options:
                    if options['monitor'] == '1':
                        htmlstring += '<div class="online" style="width:70px; float:right; text-align:center; border-radius:0.4em; border-width:1px; border-style:solid; border-color:#333333; margin-right:10px">Daemon</div>'

                htmlstring += '</fieldset></li>\n'

        if path:
            htmlstring += '</ul>'
            file.write(htmlstring)

    if path:
        htmlstring = '</div></div>\n'
        file.write(htmlstring)
        file.close()
예제 #6
0
def handle_modify_channel_alarm(d, output):

    import pilib
    from iiutilities import dblib

    output['message'] += 'modifychannelalarm keyword found. '
    required_keywords = ['database', 'valuename', 'value', 'actionname']
    if not all(keyword in d for keyword in required_keywords):
        output['message'] += 'Not all required keywords were found: ' + str(required_keywords) + '. '
        return

    allowed_valuenames = ['enabled', 'PV_low', 'PV_high', 'actiondetail']

    if d['valuename'] not in allowed_valuenames:
        output['message'] += 'Selected valuename is not allowed for this action. '
        return

    dbpath = pilib.dbnametopath(d['database'])
    database = pilib.cupidDatabase(dbpath)

    action_condition = '"name"=\'' + d['actionname'] + "'"
    if d['valuename'] in ['enabled','actiondetail']:

        try:
            if d['valuename'] == 'enabled':
                set_value = int(d['value'])
            else:
                set_value = str(d['value'])
        except:
            output['message'] += 'Missing keys or bad value conversion. '
        else:
            output['message'] += 'Setting value' + str(set_value) + ' with condition ' + action_condition + '. '
            try:
                database.set_single_value('actions',d['valuename'],set_value, action_condition)
            except:
                output['message'] += 'Query error. '
            else:
                output['message'] += 'That appears to have worked. '

    elif d['valuename'] in ['PV_high', 'PV_low']:
        """
        These values have to be set in the options field.
        So we have to pull it out as a json string, put it into a dict, modify values, and then put it back as a string
        """
        from iiutilities.datalib import parseoptions, dicttojson

        optionstring = database.get_single_value('actions','actiondata',action_condition)
        output['message'] += 'Existing options: ' + optionstring + '. '
        options = parseoptions(optionstring)

        try:
            set_value = float(d['value'])
        except:
            output['message'] += 'Bad value conversion. '
            return

        if not d['valuename'] in options:
            output['message'] += 'Valuename does not exist in options. Creating. '

        options[d['valuename']] = set_value

        # Now rewrite to actions
        optionstring = dicttojson(options)
        output['message'] += 'New optionstring: ' + optionstring + '. '
        try:
            database.set_single_value('actions','actiondata',optionstring, action_condition)
        except:
            output['message'] += 'Query error. '
        else:
            output['message'] += 'That appears to have worked. '

    return
예제 #7
0
def handle_modify_channel(d, output):
    import pilib

    """ 
    This is being replaced by class-based functions
    """

    required_keywords = ['database', 'valuename', 'value', 'channelname']
    if not all(keyword in d for keyword in required_keywords):
        output['message'] += 'Not all required keywords were found: ' + str(required_keywords) + '. '
        return

    allowed_valuenames = ['enabled', 'setpointvalue']

    if d['valuename'] not in allowed_valuenames:
        output['message'] += 'Selected valuename is not allowed for this action. '
        return

    dbpath = pilib.dbnametopath(d['database'])
    database = pilib.cupidDatabase(dbpath)

    channel_condition = '"name"=\'' + d['channelname'] + "'"
    if d['valuename'] in allowed_valuenames:

        try:
            if d['valuename'] == 'enabled':
                set_value = int(d['value'])
            else:
                set_value = float(d['value'])
        except:
            output['message'] += 'Missing keys or bad value conversion. '
            return


        """
        For a channel, we will check type. If remote, we set as pending and then initiate processing the channel.

        """
        from iiutilities.datalib import parseoptions, dicttojson

        # Handle error here.
        the_channel = database.read_table('channels',channel_condition)[0]

        if the_channel['type'] == 'remote':
            output['message'] += 'Processing remote channel, setting pending value. '
            print(output['message'])

            if the_channel['pending']:
                pending = parseoptions(the_channel['pending'])
            else:
                pending = {}

            pending[d['valuename']] = set_value
            pending_string = dicttojson(pending)
            try:
                database.set_single_value('channels', 'pending', pending_string, channel_condition)
            except:
                output['message'] +=  'Query error. '
                return
            else:
                output['message'] += 'That appears to have worked. Now running channel processing on channel. '


        else:

            output['message'] += 'Setting local setpoint value. '
            try:
                database.set_single_value('channels','setpointvalue',set_value)
            except:
                output['message'] += 'Query error. '
                return
            else:
                output['message'] += 'That appears to have worked. '

        # Process channel now
        from cupid.picontrol import process_channel
        process_channel(channel_name=d['channelname'])

        # Let's also update the input while we're at it



    return
예제 #8
0
def generatehamachipage(hamachidata=None, path=None):
    from iiutilities.netfun import gethamachidata
    from iiutilities.datalib import parseoptions, gettimestring

    if not hamachidata:
        hamachidata = gethamachidata()

    if path:
        file = open(path, 'w')
        htmlstring = (
            '<!DOCTYPE html>' +
            '<html>' +
            '<head>' +
            '<title>Hamachi Remotes Home</title>' +
            '<meta name="viewport" content="width=device-width, initial-scale=1">' +
            '<link rel="stylesheet" href="jqm/themes/base.css" />' +

            '<link rel="stylesheet" href="jqm/themes/jquery.mobile.icons.min.css" />' +
            '<link rel="stylesheet" href="jqm/jquery.mobile.custom.structure.min.css" />' +

            '<!--jQuery Mobile is 1.4.5-->' +
            '<script src="js/jquery-1.11.1.js"></script>' +
            '<script src="jqm/jquery.mobile.custom.js"></script>' +

            '<style>' +
                '.online {' +
                    'background-color:#bbffbb' +
                '}' +
                '.offline {' +
                    'background-color:#ffbbbb' +
                '}' +
            '</style>' +
            '</head>' +

            '<body>' +
            '<div data-role="page" id="demo-page" class="my-page" data-theme="d">' +
                '<div role="main" class="ui-content">')
        htmlstring += '<ul data-role="listview" data-inset=true><li data-role="list-divider">'
        htmlstring += 'Updated : ' + gettimestring() + '</li></ul>'

        file.write(htmlstring)
    for network in hamachidata:
        if path:
            htmlstring = ('<ul data-role="listview" data-inset="true">' +
                    '<li data-role="list-divider">' + network['name'] + ' : ' + network['id'] + '</li>')

        for client in network['clientlist']:
            # print(client['name'] + ' : ' + client['hamachiip'])
            if path:
                htmlstring += '<li>'
                htmlstring += '<fieldset class="ui-grid-a"><div class="ui-block-a" style="width:50%">'
                # htmlstring += client['name'] + ' : ' + client['hamachiip']
                htmlstring += '<a href="https://' +  client['hamachiip'] +'/">' + client['name'] + '</a> : ' + client['hamachiip']

                options = parseoptions(client['options'])

                htmlstring+='</div>'

                if client['onlinestatus']:
                    htmlstring += '<div class="online" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Online</div>'
                else:
                    htmlstring += '<div class="offline" style="width:60px; float:right; text-shadow:none; text-align:center; border-radius:0.3em; border-width:1.2px; border-style:solid; border-color:#333333">Offline</div>'

                if 'monitor' in options:
                    if options['monitor'] == '1':
                        htmlstring += '<div class="online" style="width:70px; float:right; text-align:center; border-radius:0.4em; border-width:1px; border-style:solid; border-color:#333333; margin-right:10px">Daemon</div>'

                htmlstring += '</fieldset></li>\n'

        if path:
            htmlstring+='</ul>'
            file.write(htmlstring)

    if path:
        htmlstring = '</div></div>\n'
        file.write(htmlstring)
        file.close()
예제 #9
0
def rundaemon(**kwargs):


    """
    First thing we are going to do is check to see if code is working. We do this first to minimize what we have to
    import to test this -- the script should not crash out before we do this.

    So we need dblib to function to read from the database to see whether we are going to email someone if things are
    broken.
    We need datalib to parse options on the notifications
    We also need utility to send an email
    """

    settings = {
        'startall':False,
        'debug':False,
        'daemon_freq': 60,
        'unit_test_frequency': 3600,  # Once per hour
        'runonce':False
    }

    settings.update(kwargs)
    FNULL = open(os.devnull, 'w')

    try:
        import socket
        hostname = socket.gethostname()
    except:
        hostname = 'unknown (?!)'

    import importlib

    try:
        import simplejson as json
    except:
        import json

    testmodules = ['iiutilities.dblib', 'iiutilities.utility', 'iiutilities.datalib', 'cupid.pilib']

    # these are the libraries we will need to send notifications that things aren't working.
    # To do this, however, we need some libraries.
    failures = ''
    for testmodule in testmodules:
        try:
            tempmodule = importlib.import_module(testmodule)
        except:
            failures += testmodule + ', '
    if failures:
        # Send an email to indicate that things are horribly broken
        subject = 'Hostname: ' + hostname + ' things are broken.'
        message = 'Test import of module(s) ' + failures[:-2] + ' failed. '
        em_gmail = gmail(subject=subject, message=message)
        em_gmail.send()

    from iiutilities import dblib, utility, datalib
    from cupid import pilib
    if settings['debug']:
        print('** DEBUG MODE ** ')
        pilib.set_debug()

    last_unittests = ''

    # Get notifications so we know when to notify
    system_database = pilib.dbs.system
    notification_database = pilib.dbs.notifications

    while True:

        notifications = system_database.read_table('notifications')

        currenttime = datalib.gettimestring()

        run_unit_tests = False
        if not last_unittests:
            run_unit_tests = True
        elif datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(last_unittests) > settings['unit_test_frequency']:
            run_unit_tests = True

        if run_unit_tests:
            utility.log(pilib.dirs.logs.daemon, 'Running unit tests. ', 2, pilib.loglevels.daemon)
            handle_unit_tests()
            last_unittests = datalib.gettimestring()

        from subprocess import Popen, PIPE
        from time import sleep

        """
        Set up list of enabled statuses (whether to restart if
        we find that the process is not currently running
        from iiutilities import dblib, utility, datalib
        """

        system_status_options = system_database.read_table_row('systemstatus')[0]
        # print('systemstatusoptions')
        # print(system_status_options)

        item_enabled_dict = {'updateio':int(system_status_options['updateioenabled']),
                             'picontrol':int(system_status_options['picontrolenabled']),
                             'systemstatus':int(system_status_options['systemstatusenabled']),
                             'sessioncontrol':int(system_status_options['sessioncontrolenabled']),
                             'serialhandler':int(system_status_options['serialhandlerenabled'])
                             }

        # updateio_enabled = int(system_status_options['updateioenabled'])
        # picontrol_enabled = int(system_status_options['picontrolenabled'])
        # systemstatus_enabled = int(system_status_options['systemstatusenabled'])
        # sessioncontrol_enabled = int(system_status_options['sessioncontrolenabled'])
        # serialhandler_enabled =int( system_status_options['serialhandlerenabled'])

        # enableditemlist = [(int(updateio_enabled)), (int(picontrolenabled)), int(systemstatusenabled), int(sessioncontrolenabled), int(serialhandlerenabled)]

        # These are hard-coded and must match up for now. This should be cleaned up to be more easily modified.
        itemstatuses = utility.find_proc_statuses(pilib.daemonprocs)

        item_status_dict = {}
        for proc_name, status in zip(pilib.daemonprocnames, itemstatuses):
            item_status_dict[proc_name] = status

        """
        Here we check to see if things are running properly and not hung. First here is systemstatus
        """

        if item_enabled_dict['systemstatus'] and item_status_dict['systemstatus']['count'] == 1:
            lastsystemstatus = dblib.getsinglevalue(pilib.dirs.dbs.system, 'systemstatus', 'lastsystemstatuspoll')
            currenttime = datalib.gettimestring()

            timesincelastsystemstatus = datalib.timestringtoseconds(currenttime) - datalib.timestringtoseconds(lastsystemstatus)
            timecriterion = 90
            if timesincelastsystemstatus > timecriterion:
                utility.log(pilib.dirs.logs.daemon, 'Killing systemstatus because it has not run in ' + str(timesincelastsystemstatus) + 's', 1,pilib.loglevels.daemon)
                # utility.log(pilib.dirs.logs.system, 'Killing systemstatus because it has not run in ' + str(timesincelastsystemstatus) + 's',1,1, pilib.loglevels.system)

                killnotify = next((item for item in notifications if item['item'] == 'daemonkillproc' and int(item['enabled'])), None)
                if killnotify:
                    options = datalib.parseoptions(killnotify['options'])
                    if 'type' in options:
                        if 'type' == 'email' and 'email' in options:
                            # Queue a message indicating we had to restart the systemstatus daemon
                            message = 'Systemstatus is being killed on ' + hostname + ' because it has not run in ' + \
                                str(timesincelastsystemstatus) + 's with a criteria of ' +  \
                                str(timecriterion) + '. This occurred at ' + currenttime
                            subject = 'CuPID : ' + hostname + ' : killnotify'
                            notification_database.insert('queuednotifications',
                                                         {'type': 'email', 'message': message,
                                                          'options': 'email:' + options['email'] + ',subject:' + subject,
                                                          'queuedtime': currenttime})

                utility.kill_proc_by_name('systemstatus')

                # Also kill hamachi, since this is almost always the culprit
                utility.kill_proc_by_name('hamachi')

        # These are hard-coded and must match up for now. This should be cleaned up to be more easily modified.
        hamachi_status = utility.find_proc_statuses(['hamachi'])[0]
        if hamachi_status['count'] > 1:
            utility.log(pilib.dirs.logs.daemon, 'Killing hamachi with proc count of {}'.format(hamachi_status['count']), 0, pilib.loglevels.daemon)
            utility.kill_proc_by_name('hamachi')


        # Set system message
        systemstatusmsg = ''
        for name in pilib.daemonprocnames:
            systemincmessage = name + ' - Enabled: ' + str(item_enabled_dict[name]) + ' Status: ' + str(item_status_dict[name]['count']) + '. '
            systemstatusmsg += systemincmessage
            utility.log(pilib.dirs.logs.daemon, 'Item status message: ' + systemincmessage, 0, pilib.loglevels.daemon)

        system_database.set_single_value('systemstatus', 'systemmessage', systemstatusmsg)

        # Set up list of itemnames in the systemstatus table that
        # we assign the values to when we detect if the process
        # is running or not

        for name, process in zip(pilib.daemonprocnames, pilib.daemonprocs):

            # set status
            if item_status_dict[name]['count'] == 1:
                # Set status variable by name. This is static based on schema
                system_database.set_single_value('systemstatus', name + 'status', 1)
                if pilib.loglevels.daemon > 0:
                    utility.log(pilib.dirs.logs.daemon, 'Process is running: ' + pilib.dirs.baselib + process, 4, pilib.loglevels.daemon)

            elif item_status_dict[name]['count'] > 1:
                # multiple instances are running. This is bad.
                system_database.set_single_value('systemstatus', name + 'status', 0)
                if pilib.loglevels.daemon > 0:
                    utility.log(pilib.dirs.logs.daemon, 'Multple instances of process {} are running ({}): '.format(pilib.dirs.baselib + process, item_status_dict[name]['count']), 2,
                                pilib.loglevels.daemon)

                utility.kill_proc_by_name(process)

            # Now fire up if we need to.
            if item_status_dict[name]['count'] != 1:
                system_database.set_single_value('systemstatus', name + 'status', 0)
                if pilib.loglevels.daemon > 0:
                    utility.log(pilib.dirs.logs.daemon, 'Process is not running: ' + pilib.dirs.baselib + process, 2, pilib.loglevels.daemon)

                # run if set to enable
                if item_enabled_dict[name]:
                    # print(pilib.dirs.baselib + pilib.daemonprocs[index])
                    if pilib.loglevels.daemon > 0:
                        utility.log(pilib.dirs.logs.daemon, 'Starting ' + pilib.dirs.baselib + process, 2, pilib.loglevels.daemon)

                    # procresult = Popen([pilib.dirs.baselib + process], stdout=PIPE, stderr=PIPE)
                    procresult = Popen([pilib.dirs.baselib + process, '&'], stdout=FNULL, stderr=FNULL)
                    # if pilib.loglevels.daemon > 0:
                    #     pilib.writedatedlogmsg(pilib.dirs.logs.daemonproc, procresult.stdout.read())



        # Time to let things start up
        sleep(3)

        # Refresh after set
        itemstatuses = utility.find_proc_statuses(pilib.daemonprocs)
        item_status_dict = {}
        for name, status in zip(pilib.daemonprocnames, itemstatuses):
            item_status_dict[name] = status

        for name in pilib.daemonprocnames:
            # set status
            if item_status_dict[name]:
                system_database.set_single_value('systemstatus', name + 'status', 1)
            else:
                system_database.set_single_value('systemstatus', name + 'status', 0)

        """
        Process Actions.
        Careful here. This does not carry out things like indicators, which are set from picontrol. A bit wonky, as we
        would like the indicators to update immediately. On the other hand, we want picontrol to be the master controller
        of IO.
        """

        from cupid.actions import processactions
        utility.log(pilib.dirs.logs.daemon, 'Processing actions', 2, pilib.loglevels.daemon)
        processactions()
        utility.log(pilib.dirs.logs.daemon, 'Done processing actions', 2, pilib.loglevels.daemon)

        systemstatusmsg = ''
        for name in pilib.daemonprocnames:
            systemincmessage = name + ' - Enabled: ' + str(item_enabled_dict[name]) + ' Status: ' + json.dumps(
                item_status_dict[name]) + '. '
            systemstatusmsg += systemincmessage
            if pilib.loglevels.daemon > 0:
                utility.log(pilib.dirs.logs.daemon, 'Item status message: ' + systemincmessage, 2, pilib.loglevels.daemon)

        # print(systemstatusmsg)
        system_database.set_single_value('systemstatus', 'systemmessage', systemstatusmsg)

        # Rotate all logs
        utility.log(pilib.dirs.logs.daemon, 'Rotating logs. ')
        pilib.rotate_all_logs()

        if settings['runonce']:
            return
예제 #10
0
def application(environ, start_response):
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import inventorylib
    from iiutilities import dblib, datalib
    from time import time

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    try:
        post = json.loads(request_body.decode('utf-8'))
    except:
        print('Error decoding: ')
        print(request_body.decode('utf-8'))
        post = {}

    output = {'message': ''}
    status = '200 OK'

    try:
        try:
            output['remote_ip'] = environ['HTTP_X_FORWARDED_FOR'].split(
                ',')[-1].strip()
        except KeyError:
            output['remote_ip'] = environ['REMOTE_ADDR']
    except:
        output['remote_ip'] = 'Error getting IP address'
    """
    Here we verify credentials of session data against those in the database.
    While we authenticate in the browser, this does not stop POST queries to the API without the page provided
    So we take the hpass stored in the dictionary and verify.

    * Which databases are available are stored in users table, column accesskeywords
    * Which one is currently in use is stored in table usermeta, data where user=username. data is json-encoded metadata
        pathalias field

    * What path extension this corresponds to is stored in pathaliases

    """

    # I dont' think this will be used. We will get pathalias from database. Let's deal with changing it later.

    # First, let's get our pathalias and translate to a path, using our path reloader

    # if 'pathalias' in post:
    #     output['message'] += inventorylib.reloaddatapaths(pathalias=post['pathalias'])
    # else:
    #     output['message'] += 'No pathalias found in postictionary. '

    wsgiauth = True
    authverified = False

    if wsgiauth:

        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in postatabase.
        import hashlib

        safe_database = dblib.sqliteDatabase(
            inventorylib.sysvars.dirs.dbs.safe)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row(
                    'users', condition=condition)[0]
            except:
                output[
                    'message'] += 'error in user sqlite query for session user "' + post[
                        'username'] + '". '
                user_data = {'accesskeywords': 'demo', 'admin': False}
            else:
                # Get session hpass to verify credentials

                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'].encode('utf-8'))
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update((hashedname + inventorylib.sysvars.salt +
                               hashedpassword).encode('utf-8'))
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True
                    # output['message'] += 'accesskeywords : ' + str(userdata)
                    output['accesskeywords'] = user_data['accesskeywords']
                    if output['accesskeywords'].find(',') >= 0:
                        accesskeywords = output['accesskeywords'].split(',')
                        accesskeywords = [
                            accesskeyword.strip()
                            for accesskeyword in accesskeywords
                        ]
                    else:
                        accesskeywords = output['accesskeywords'].strip()

                    path_aliases = safe_database.read_table('pathaliases')

                    # Find usermeta entry and grab which database is selected. If one is not selected, update selection
                    # to first that user is allowed to access
                    try:
                        user_meta_row = safe_database.read_table_row(
                            'usermeta',
                            condition="user='******'username'] + "'")[0]
                    except:
                        print('error getting usermeta for username ' +
                              post['username'])
                        output[
                            'message'] += 'error getting usermeta for username ' + post[
                                'username']
                        user_meta_row = []
                        return

                    path_alias = ''
                    if not user_meta_row:
                        output[
                            'message'] += 'User meta entry not found. Attempting to create. '

                        # assign default database
                        default_database = accesskeywords[0]

                        output[
                            'message'] += 'Choosing pathalias from first in keywords: ' + default_database + '. '
                        if any(default_database == path_alias['alias']
                               for path_alias in path_aliases):
                            output[
                                'message'] += 'Verified that default alias exists in pathaliases database. '
                        else:
                            output[
                                'message'] += 'ERROR: first entry in keywords (' + default_database + ') not found in aliases. '

                        # Insert usermeta entry. This should never happen.
                        safe_database.insert(
                            'usermeta', {
                                'user': post['username'],
                                'data': 'pathalias:' + default_database
                            })
                        path_alias = default_database
                    else:
                        output[
                            'message'] += 'User meta entry found with text ' + str(
                                user_meta_row) + '. '

                        # Parse the string into json and ensure that the pathalias is in there
                        user_meta_dict = datalib.parseoptions(
                            user_meta_row['data'])
                        if 'pathalias' in user_meta_dict:
                            path_alias = user_meta_dict['pathalias']
                            output[
                                'message'] += 'pathalias found: ' + user_meta_dict[
                                    'pathalias'] + '. '

                            if any(path_alias == stored_path_alias['alias']
                                   for stored_path_alias in path_aliases):
                                output[
                                    'message'] += 'Verified that default alias exists in pathaliases database. '

                    if path_alias:
                        # reload datapaths with path alias
                        reload_message = inventorylib.reloaddatapaths(
                            pathalias=path_alias)

                        # DEFINITELY COMMENT THIS OUT FOR SECURITY SAKE (absolute paths are secret!!)
                        output['message'] += reload_message

                else:
                    # successful auth
                    output['message'] += 'Failed password check. '
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel': 0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True
    else:
        output['authorized'] = False

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''

    if output['authorized'] and action:
        output['action_allowed'] = inventorylib.check_action_auths(
            action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        # Stock functions
        if action == 'addeditpart':
            output['message'] += 'addpart keyword found. '
            inventorylib.addeditstockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'copypart':
            output['message'] += 'copypart keyword found. '
            inventorylib.copystockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'deleteparts':
            output['message'] += 'deleteparts keyword found. '
            inventorylib.deletestockparts(post, output)
            inventorylib.calcstockfromall()
        elif action == 'gettrackedpartdata':
            output['message'] += 'gettrackedpartdata keyword found. '
            output['data'] = inventorylib.calcstockfromall(
                **post)['trackedpart']
        elif action == 'generateorders':
            output['message'] += 'generate orders keyword found. '
            inventorylib.generateandaddorders()

        # Inventory functions
        # Edit and add are separated, as names are autogenerated
        elif action == 'editinventory':
            output['message'] += 'editinventory keyword found. '
            inventorylib.editinventory(post, output)
            inventorylib.calcstockfromall()
        elif action == 'addinventory':
            output['message'] += 'addinventory keyword found. '
            inventorylib.createnewinventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteinventories':
            output['message'] += 'deleteinventories keyword found. '
            inventorylib.deleteinventories(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditinventorypart':
            output['message'] += 'addeditinventorypart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfrominventory':
            output['message'] += 'deletepartsfrominventory keyword found. '
            inventorylib.deletepartsfrominventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()

        # Order functions
        elif action == 'editorder':
            output['message'] += 'editorder keyword found. '
            inventorylib.editorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addorder':
            output['message'] += 'addorder keyword found. '
            inventorylib.createneworder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteorders':
            output['message'] += 'deleteorders keyword found. '
            inventorylib.deleteorders(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderpart':
            output['message'] += 'addeditorderpart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderparts':
            output['message'] += 'addeditorderparts keyword found. '
            if 'partsdata' in post:
                post['partsdata'] = json.loads(post['partsdata'])
                inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfromorder':
            output['message'] += 'deletepartsfromorder keyword found. '
            inventorylib.deletepartsfromorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()

        # BOM functions
        elif action == 'copybom':
            output['message'] += 'copybom keyword found. '
            inventorylib.copybom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbom':
            output['message'] += 'addeditbom keyword found. '
            inventorylib.addeditbom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbomparts':
            output['message'] += 'addeditbomparts keyword found. '
            # Operate on partsdata
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makebommetadata()
        elif action == 'getbomcalcs':
            output['message'] += 'getbomcalcs keyword found. '
            inventorylib.calcbomprice(post, output)
        elif action == 'getquotecalcs':
            output['message'] += 'getquotecalcs keyword found. '
            output['message'] += 'function not written yet. '
            # inventorylib.calcbomprice(post, output)
        elif action == 'deletepartsfrombom':
            output['message'] += 'deletepartsfrombom keyword found. '
            inventorylib.deletepartsfrombom(post, output)
            inventorylib.makebommetadata()
        elif action == 'deleteboms':
            output['message'] += 'deleteboms keyword found. '
            inventorylib.deleteboms(post, output)
            inventorylib.makebommetadata()

        # Assembly functions
        elif action == 'copyassembly':
            output['message'] += 'copyassembly keyword found. '
            inventorylib.copyassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'copybomintoassembly':
            output['message'] += 'copybomintoassembly keyword found. '
            inventorylib.copybomintoassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassembly':
            output['message'] += 'addeditassembly keyword found. '
            inventorylib.addeditassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassemblyparts':
            output['message'] += 'addeditassemblypart keyword found. '
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        elif action == 'getassemblycalcs':
            output['message'] += 'getassemblycalcs keyword found. '
            inventorylib.calcassemblyprice(post, output)
        elif action == 'deletepartsfromassembly':
            output['message'] += 'deletepartsfromassembly keyword found. '
            inventorylib.deletepartsfromassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteassemblys':
            output['message'] += 'deleteassemblys keyword found. '
            inventorylib.deleteassemblies(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        # Quotes
        elif action == 'deletequotes':
            output['message'] += 'deletequotes keyword found. '
            inventorylib.deletequotes(post, output)
            inventorylib.makebommetadata(
                database=inventorylib.sysvars.dbs.quotes)
        elif action == 'copyquotetoboms':
            output['message'] += 'copyquotetoboms keyword found. '
            inventorylib.copyquotetoboms(post, output)
            inventorylib.makebommetadata()

        # Export functions

        elif action == 'exportbomtopdf':
            output['message'] += 'exportbomtopdf keyword found. '
            inventorylib.writepanelbomtopdf(post, output)

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            bom = inventorylib.sysvars.dbs.boms.read_table(post['name'])

            cleanbomname = post['name'].replace(' ', '_').replace(':', '_')
            filename = cleanbomname + '_' + cleantime
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(
                **{
                    'bomdata': bom,
                    'title': 'Bom generated from ' + post['name'] + ' ' +
                    cleantime,
                    'outputfile': outputroot + filename
                })

            output['data']['weblink'] = weblink

        elif action == 'exportassemblytopdf':
            output['message'] += 'exportassemblytopdf keyword found. '

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            assemblydata = inventorylib.sysvars.dbs.assemblies.read_table(
                post['name'])

            cleanname = post['name'].replace(' ', '_').replace(':', '_')
            filename = cleanname + '_' + cleantime + '.pdf'
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(
                **{
                    'bomdata': assemblydata,
                    'title': 'Bom generated from ' + post['name'] + ' ' +
                    thetime,
                    'format': 'picklist',
                    'outputfile': outputroot + filename
                })

            output['data'] = {'assemblydata': assemblydata}
            output['weblink'] = weblink

        # Panel builder
        elif action in ['panelcalcs', 'panelcalcsgenquote']:
            output['message'] += 'panelcalc keyword found. '
            import panelbuilder
            for key, value in post.items():
                # print(key, value)
                pass

            if 'paneldesc' in post:
                import json
                post['paneldesc'] = json.loads(post['paneldesc'])

            bomresults = panelbuilder.paneltobom(**post)

            output['data'] = {}
            # d needs to have a 'paneldesc' key with the panel spec data in it.
            output['data']['bomdescription'] = bomresults['bomdescription']
            output['data']['options'] = bomresults['options']
            output['data']['bomcalcs'] = inventorylib.calcbomprice(
                {'bomdictarray': bomresults['bom']})['data']
            output['message'] += bomresults['message']

            # We don't actually want to return the full boms by default. We don't want this in the client, and it's
            # lot of data anyway
            if 'returnfullboms' not in post:
                for option, value in output['data']['options'].items():
                    if 'bom' in value:
                        print('Deleting bom from option ' + str(option))

                        del output['data']['options'][option]['bom']
                    if 'flatbom' in value:
                        print('Deleting flatbom from option ' + str(option))
                        del output['data']['options'][option]['flatbom']

            if action == 'panelcalcsgenquote':
                thetime = datalib.gettimestring()
                cleantime = thetime.replace(' ', '_').replace(':', '_')
                outputroot = '/var/www/html/panelbuilder/data/downloads/'

                if 'paneltype' in post['paneldesc'] and post['paneldesc'][
                        'paneltype'] == 'brewpanel':
                    datedquotefilename = 'panelbuilder_brew_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_brew_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_brew_quote.pdf'
                    genericbomfilename = 'panelbuilder_brew_bom.pdf'
                elif 'paneltype' in post['paneldesc'] and post['paneldesc'][
                        'paneltype'] == 'temppanel':
                    datedquotefilename = 'panelbuilder_temp_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_temp_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_temp_quote.pdf'
                    genericbomfilename = 'panelbuilder_temp_bom.pdf'
                else:
                    datedquotefilename = 'panelbuilder_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_quote.pdf'
                    genericbomfilename = 'panelbuilder_bom.pdf'

                weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + datedquotefilename

                # until we can get this to format properly in the pdf, we are going to leave it generic
                # description = output['data']['bomdescription']
                description = 'Control panel quote generated by panelbuilder.'
                datedquotes = True

                # Create quote pdf from BOM
                if datedquotes:

                    inventorylib.writepanelquotetopdf(
                        **{
                            'bomdata':
                            bomresults['bom'],
                            'options':
                            bomresults['options'],
                            'title':
                            'Quote auto-generated by panelbuilder   \t\t' +
                            datalib.gettimestring(),
                            'price':
                            str(output['data']['bomcalcs']['totalprice']),
                            'outputfile':
                            outputroot + datedquotefilename,
                            'description':
                            description
                        })

                inventorylib.writepanelquotetopdf(
                    **{
                        'bomdata': bomresults['bom'],
                        'options': bomresults['options'],
                        'title': 'Quote auto-generated by panelbuilder ' +
                        thetime,
                        'price': output['data']['bomcalcs']['totalprice'],
                        'outputfile': outputroot + genericquotefilename
                    })

                # Create database entry BOM

                # Create table
                # print('** DATABASE')
                # print(panelbuilder.sysvars.dirs.dbs.quotes)

                bomname = 'quote_' + cleantime
                inventorylib.addeditbom(
                    {
                        'bomdata': {
                            'name': bomname
                        },
                        'database': panelbuilder.sysvars.dirs.dbs.quotes
                    }, output)
                # print('** BOM **')
                # print(bomresults['bom'])
                inserts = []
                for part in bomresults['bom']:
                    inserts.append(
                        dblib.makesqliteinsert(bomname,
                                               [part['partid'], part['qty']],
                                               ['partid', 'qty']))
                dblib.sqlitemultquery(inventorylib.sysvars.dirs.dbs.quotes,
                                      inserts)
                inventorylib.makebommetadata(
                    database=inventorylib.sysvars.dbs.quotes)

                # inventorylib.addeditpartlist(post, output)

                # Create pdfs

                if datedquotes:
                    inventorylib.writepanelbomtopdf(
                        **{
                            'bomdata': bomresults['bom'],
                            'options': bomresults['options'],
                            'title': 'Quote auto-generated by panelbuilder ' +
                            thetime,
                            'outputfile': outputroot + datedbomfilename
                        })

                inventorylib.writepanelbomtopdf(
                    **{
                        'bomdata': bomresults['bom'],
                        'title': 'panelbuilder BOM generated ' + thetime,
                        'outputfile': outputroot + genericbomfilename,
                        'totalprice': output['data']['bomcalcs']['totalprice']
                    })

                output['data']['quotelink'] = weblink
                from iiutilities.utility import gmail
                mymail = gmail(subject="Quote generated")
                mymail.message = 'Quote generated at ' + cleantime + '\r\n'

                if 'remote_ip' in output:
                    mymail.message = 'IP address ' + output[
                        'remote_ip'] + '\r\n'

                mymail.message += bomresults['bomdescription']
                mymail.recipient = '*****@*****.**'
                mymail.sender = 'II Panelbuilder'
                mymail.send()

        # Multi-use
        elif action == 'reloaditemdatafromstock':
            output['message'] += 'reloaditemdatafromstock keyword found. '
            inventorylib.refreshpartsfromstock(post, output)
            if 'bomname' in post:
                inventorylib.recalcpartdata(bomname=post['bomname'])
                inventorylib.makebommetadata()
            elif 'assemblyame' in post:
                inventorylib.recalcpartdata(assemblyname=post['assemblyname'])
                inventorylib.makeassemblymetadata()

        # Generic functions
        elif action == 'gettablenames':
            dbpath = inventorylib.dbnametopath(post['database'])
            try:
                output['data'] = dblib.gettablenames(dbpath)
            except:
                output['message'] += 'Error getting table names'
        elif action == 'switchtablerows':
            dbpath = inventorylib.dbnametopath(post['database'])
            dblib.switchtablerows(dbpath, post['tablename'], post['row1'],
                                  post['row2'], post['uniqueindex'])
        elif action == 'modwsgistatus':
            output['processgroup'] = repr(environ['mod_wsgi.process_group'])
            output['multithread'] = repr(environ['wsgi.multithread'])
        elif action == 'gettabledata':
            output['message'] += 'Gettabledata. '
            if 'database' in post:
                dbpath = inventorylib.dbnametopath(post['database'])
                if dbpath:
                    output['message'] += 'Friendly name ' + post[
                        'database'] + ' translated to path ' + dbpath + ' successfully. '

                    if 'tablenames' in post:  # Get multiple tables
                        output['message'] += 'Multiple tables. '
                        data = []
                        if 'start' in post:
                            fixedstart = int(post['start'])
                        else:
                            fixedstart = 0
                        if 'length' in post:
                            fixedlength = int(post['length'])
                        else:
                            fixedlength = 1
                        if 'lengths' in post:
                            lengths = map(int, post['lengths[]'])
                        else:
                            lengths = []
                        if 'starts' in post:
                            starts = map(int, post['starts'])
                        else:
                            starts = []

                        for index, table in enumerate(post['tablenames[]']):
                            try:
                                length = lengths[index]
                            except IndexError:
                                length = fixedlength
                            try:
                                start = starts[index]
                            except IndexError:
                                start = fixedstart

                            data.append(
                                dblib.dynamicsqliteread(
                                    dbpath, table, start, length))
                            output['data'] = data
                    elif 'length' in post:  # Handle table row subset
                        output['message'] += 'Length keyword. '
                        if not 'start' in post:
                            post['start'] = 0
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(
                            dbpath, post['tablename'], post['start'],
                            post['length'])
                        output['querytime'] = time() - thetime
                    elif 'row' in post:  # Handle table row
                        output['message'] += 'Row keyword. ' + str(post['row'])
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(
                            dbpath, post['tablename'], post['row'])
                        output['querytime'] = time() - thetime
                    elif 'tablename' in post:  # Handle entire table
                        output['message'] += 'Tablename keyword: ' + post[
                            'tablename'] + '. '
                        thetime = time()
                        if 'condition' in post:
                            if not post['condition'] == '':
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath,
                                    post['tablename'],
                                    condition=post['condition'])
                            else:
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath, post['tablename'])
                        else:
                            try:
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath, post['tablename'])
                            except:
                                output['message'] += 'Error retrieving data. '
                            else:
                                output[
                                    'message'] += 'Data query appears successful. '
                        output['querytime'] = time() - thetime
                else:
                    output['message'] += 'Friendly name ' + post[
                        'database'] + ' unsuccessfully translated. '
            else:
                output['message'] += 'No database present in action request'
        else:
            output[
                'message'] = 'no command matched for action "' + action + '"'
    else:
        # status = '403 Forbidden'
        output[
            'message'] += 'Not authorized for this action (or perhaps at all?) '

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(
                output['data']).encode('utf-8')).hexdigest()
            if 'etag' in post:
                if newetag == post['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag = ''
    else:
        newetag = ''

    if 'datasize' in post:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    # try:
    foutput = json.dumps(output, indent=1)
    # except:
    #     import csv
    #     w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
    #     for key, val in output.items():
    #         w.writerow([key, val])
    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag', newetag))
    start_response(status, response_headers)

    return foutput.encode('utf-8')
예제 #11
0
def process_channel(**kwargs):

    systemstatus = system_db.read_table_row('systemstatus')[0]
    if 'channel' in kwargs:
        channel = kwargs['channel']
    elif 'channel_name' in kwargs:
        channels = control_db.read_table(
            'channels', '"name"=\'' + kwargs['channel_name'] + "'")
        if len(channels) == 1:
            channel = channels[0]
        else:
            print('wrong number of channels returned. aborting')
            return

    # channelindex = str(int(channel['channelindex']))
    logtablename = 'channel' + '_' + channel['name'] + '_log'
    time = datalib.gettimestring()
    disableoutputs = True

    status_msg = channel['name'] + ': '

    log_tablenames = log_db.get_table_names()

    # Channel enabled means different things for different types of channels

    channel_condition = '"name"=\'{}\''.format(channel['name'])

    # Create log if it doesn't exist
    if logtablename not in log_tablenames:
        log_db.create_table(logtablename, pilib.schema.channel_datalog)

    if channel['type'] == 'local':

        if channel['enabled']:

            status_msg = ''
            try:
                setpoint_value = float(channel['setpoint_value'])
            except:
                channel['enabled'] = 0
                status_msg += 'Error with setpoint. Disabling'
                control_db.set_single_value('channels', 'enabled', 0,
                                            channel_condition)

            # Need to test for age of data. If stale or disconnected, invalidate
            try:
                process_value = float(channel['process_value'])
            except:
                status_msg += 'Invalid control value. Disabling channel. '
                channel['enabled'] = 0
                control_db.set_single_value('channels', 'enabled', 0,
                                            channel_condition)

        # Move forward if still enabled after error-checking
        if channel['enabled']:

            status_msg += 'Channel Enabled. '

            # TODO : look at channel auto mode.
            if channel['mode'] == 'auto':
                status_msg += 'Mode:Auto. '
                # print('running auto sequence')

                # run algorithm on channel

                response = controllib.runalgorithm(pilib.dirs.dbs.control,
                                                   pilib.dirs.dbs.session,
                                                   channel['name'])
                action = response[0]
                message = response[1]

                status_msg += ' ' + response[1] + ' '
                status_msg += 'Action: ' + str(action) + '. '

                # Set action in channel

                controllib.setaction(pilib.dirs.dbs.control, channel['name'],
                                     action)

            elif channel['mode'] == 'manual':
                # print('manual mode')
                status_msg += 'Mode:Manual. '
                action = controllib.getaction(pilib.dirs.dbs.control,
                                              channel['name'])
            else:
                # print('error, mode= ' + mode)
                status_msg += 'Mode:Error. '

            if systemstatus['enableoutputs']:
                status_msg += 'System outputs enabled. '
                if channel['outputs_enabled']:
                    status_msg += 'Channel outputs enabled. '
                    disableoutputs = False

                    # find out whether action is positive or negative or
                    # not at all.

                    # and act. for now, this is binary, but in the future
                    # this will be a duty cycle daemon

                    outputsetnames = []
                    outputresetnames = []
                    if action > 0:
                        print("set positive output on")
                        outputsetnames.append(channel['positive_output'])
                        outputresetnames.append(channel['negative_output'])
                    elif action < 0:
                        print("set negative output on")
                        outputsetnames.append(channel['negative_output'])
                        outputresetnames.append(channel['positive_output'])
                    elif action == 0:
                        status_msg += 'No action. '
                        outputresetnames.append(channel['positive_output'])
                        outputresetnames.append(channel['negative_output'])
                    else:
                        status_msg += 'Algorithm error. Doing nothing.'

                    # Check to see if outputs are ready to enable/disable
                    # If not, pull them from list of set/reset

                    control_algorithm = control_db.read_table(
                        'controlalgorithms',
                        condition='"name"=\'' + channel['controlalgorithm'] +
                        "'")
                    if len(control_algorithm) == 1:
                        algorithm = control_algorithm[0]
                    else:
                        status_msg += 'Algorithm Error: Not found (or multiple?). Using default. '
                        algorithm = default_control_algorithm

                    outputstoset = []
                    for outputname in outputsetnames:
                        offtime = control_db.get_single_value(
                            'outputs',
                            'offtime',
                            condition='"name"=\'' + outputname + "'")
                        if datalib.timestringtoseconds(datalib.gettimestring(
                        )) - datalib.timestringtoseconds(
                                offtime) > algorithm['minofftime']:
                            outputstoset.append(outputname)
                        else:
                            status_msg += 'Output ' + outputname + ' not ready to enable. '

                    outputstoreset = []
                    for outputname in outputresetnames:
                        ontime = control_db.get_single_value(
                            'outputs',
                            'ontime',
                            condition='"name"=\'' + outputname + "'")
                        if datalib.timestringtoseconds(datalib.gettimestring(
                        )) - datalib.timestringtoseconds(
                                ontime) > algorithm['minontime']:
                            outputstoreset.append(outputname)
                        else:
                            status_msg += 'Output ' + outputname + ' not ready to disable. '
                    """ TODO: Change reference to controlinputs to name rather than id. Need to double-check
                    enforcement of no duplicates."""

                    # Find output in list of outputs if we have one to set

                    time = datalib.gettimestring()
                    if len(outputstoset) > 0 or len(outputstoreset) > 0:
                        for output in outputs:
                            id_condition = '"id"=\'' + output['id'] + "'"
                            if output['name'] in outputstoset:

                                # check current status
                                currvalue = output['value']
                                if not currvalue:  # No need to set if otherwise. Will be different for analog out
                                    # set ontime
                                    control_db.set_single_value('outputs',
                                                                'ontime',
                                                                time,
                                                                id_condition,
                                                                queue=True)
                                    # set value
                                    control_db.set_single_value('outputs',
                                                                'value',
                                                                1,
                                                                id_condition,
                                                                queue=True)
                                    status_msg += 'Output ' + output[
                                        'name'] + ' enabled. '
                                else:
                                    status_msg += 'Output ' + output[
                                        'name'] + ' already enabled. '

                            if output['name'] in outputstoreset:
                                # check current status
                                currvalue = output['value']
                                if currvalue:  # No need to set if otherwise. Will be different for analog out
                                    # set ontime
                                    control_db.set_single_value('outputs',
                                                                'offtime',
                                                                time,
                                                                id_condition,
                                                                queue=True)
                                    # set value
                                    control_db.set_single_value('outputs',
                                                                'value',
                                                                0,
                                                                id_condition,
                                                                queue=True)
                                    status_msg += 'Output ' + output[
                                        'name'] + ' disabled. '
                                else:
                                    status_msg += 'Output ' + output[
                                        'name'] + ' already disabled. '

                else:
                    status_msg += 'Channel outputs disabled. '
                    action = 0

            else:
                status_msg += 'System outputs disabled. '
                action = 0

            # Insert entry into control log
            insert = {
                'time': time,
                'process_value': channel['process_value'],
                'setpoint_value': channel['setpoint_value'],
                'action': channel['action'],
                'algorithm': channel['algorithm_name'],
                'enabled': channel['enabled'],
                'status_msg': status_msg
            }
            control_db.insert(logtablename, insert, queue=True)

            log_options = datalib.parseoptions(channel['log_options'])
            log_db.size_table(logtablename, **log_options)
        else:
            # Chanel is disabled. Need to do active disable here.
            pass

    elif channel['type'] == 'remote':
        status_msg += 'Remote channel. '

        if channel['pending']:

            from iiutilities.datalib import parseoptions, dicttojson
            status_msg += 'Processing pending action. '
            pending = parseoptions(channel['pending'])

            if 'setpoint_value' in pending:
                status_msg += 'processing setpoint_value. '
                # Get control output and have a look at it.
                input_name = channel['sv_input']

                # try:
                inputs = control_db.read_table('inputs',
                                               '"name"=\'' + input_name + "'")
                # except:
                #     status_msg += 'Inputs query error. '
                #     return status_msg

                if len(inputs) == 1:
                    input = inputs[0]
                else:
                    status_msg += 'wrong number of query items returned, length: ' + str(
                        len(inputs)
                    ) + ' for query on input name: ' + input_name
                    print('ERROR: ' + status_msg)
                    return status_msg

                # write_to_input(input, value)
                if input['type'] == 'MBTCP':

                    input_id = input['id']

                    # Now, using this id, we can determine uniquely which MBTCP entry it came from
                    splits = input_id.split('_')
                    interfaceid = splits[0]
                    register = splits[1]
                    length = splits[2]

                    string_condition = dblib.string_condition_from_lists(
                        ['interfaceid', 'register', 'length'],
                        [interfaceid, register, length])
                    input_mb_entry = control_db.read_table(
                        'modbustcp', string_condition)[0]

                    # Get IP address
                    address = control_db.get_single_value(
                        'interfaces', 'address',
                        '"id"=\'' + input_mb_entry['interfaceid'] + "'")

                    from iiutilities import netfun

                    if input_mb_entry['options']:
                        input_options = parseoptions(input_mb_entry['options'])
                        if 'scale' in input_options:
                            pending['setpoint_value'] = float(
                                pending['setpoint_value']) / float(
                                    input_options['scale'])

                    try:
                        result = netfun.writeMBcodedaddresses(
                            address,
                            register, [float(pending['setpoint_value'])],
                            convert=input_mb_entry['format'])
                    except:
                        status_msg += 'Error in modbus'
                    else:
                        if result['statuscode'] == 0:

                            # Clear pending setpoint_value
                            pending.pop('setpoint_value', None)
                            pending_string = dicttojson(pending)
                            print('setting pending in setpointvaleu mbtcp')

                            control_db.set_single_value(
                                'channels', 'pending', pending_string,
                                channel_condition)
                        else:
                            status_msg += 'modbus write operation returned a non-zero status of ' + str(
                                result['status'])

                elif input['type'] == 'MOTE':
                    mote_node = input['address'].split(':')[0]
                    mote_address = input['address'].split(':')[1]
                    if mote_node == '1':
                        message = '~setsv;' + mote_address + ';' + str(
                            pending['setpoint_value'])
                    else:
                        message = '~sendmsg;' + str(
                            mote_node
                        ) + ';;~setsv;' + mote_address + ';' + str(
                            pending['setpoint_value'])

                    motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes)
                    from time import sleep
                    for i in range(2):
                        time = datalib.gettimestring(
                            datalib.timestringtoseconds(
                                datalib.gettimestring()) + i)
                        motes_db.insert('queued', {
                            'queuedtime': time,
                            'message': message
                        })

                    # Clear pending setpoint_value
                    pending.pop('setpoint_value', None)
                    pending_string = dicttojson(pending)
                    print('setting pending in setpoint_value mote')

                    control_db.set_single_value('channels', 'pending',
                                                pending_string,
                                                channel_condition)

            if 'enabled' in pending:
                status_msg += 'processing enabledvalue. '

                # Get control output and have a look at it.
                input_name = channel['enabled_input']

                try:
                    inputs = control_db.read_table(
                        'inputs', '"name"=\'' + input_name + "'")
                except:
                    status_msg += 'Inputs query error. '
                    return status_msg

                if len(inputs) == 1:
                    input = inputs[0]
                else:
                    status_msg += 'wrong number of query items returned, length: ' + str(
                        len(inputs)) + '. '
                    return status_msg

                # write_to_input(input, value)
                if input['type'] == 'MBTCP':

                    input_id = input['id']

                    # Now, using this id, we can determine uniquely which MBTCP entry it came from
                    splits = input_id.split('_')
                    interfaceid = splits[0]
                    register = splits[1]
                    length = splits[2]

                    string_condition = dblib.string_condition_from_lists(
                        ['interfaceid', 'register', 'length'],
                        [interfaceid, register, length])
                    input_mb_entry = control_db.read_table(
                        'modbustcp', string_condition)[0]

                    # Get IP address
                    address = control_db.get_single_value(
                        'interfaces', 'address',
                        '"id"=\'' + input_mb_entry['interfaceid'] + "'")

                    from iiutilities import netfun
                    # print(address, register,input_mb_entry['format'], int(pending['enabled']))

                    if input_mb_entry['options']:
                        input_options = parseoptions(input_mb_entry['options'])

                    try:
                        result = netfun.writeMBcodedaddresses(
                            address,
                            register, [int(pending['enabled'])],
                            convert=input_mb_entry['format'])
                    except:
                        status_msg += 'Error in modbus'
                    else:
                        if result['statuscode'] == 0:
                            status_msg += 'That seems to have worked ok?'
                            # Clear pending setpoint_value
                            pending.pop('enabled', None)
                            pending_string = dicttojson(pending)
                            print('setting pending in enabled mbtcp')
                            control_db.set_single_value(
                                'channels', 'pending', pending_string,
                                channel_condition)
                        else:
                            status_msg += 'modbus write operation returned a non-zero status of ' + str(
                                result['status'])

                elif input['type'] == 'MOTE':
                    mote_node = input['address'].split(':')[0]
                    mote_address = input['address'].split(':')[1]
                    if mote_node == '1':
                        message = '~setrun;' + mote_address + ';' + str(
                            pending['enabled'])
                    else:
                        message = '~sendmsg;' + str(
                            mote_node
                        ) + ';;~setrun;' + mote_address + ';' + str(
                            pending['enabled'])

                    motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes)
                    from time import sleep
                    for i in range(2):
                        time = datalib.gettimestring(
                            datalib.timestringtoseconds(
                                datalib.gettimestring() + i))
                        motes_db.insert('queued', {
                            'queuedtime': time,
                            'message': message
                        })

                    # Clear pending setpoint_value
                    pending.pop('enabled', None)
                    pending_string = dicttojson(pending)

                    control_db.set_single_value('channels', 'pending',
                                                pending_string,
                                                channel_condition)

        # Insert entry into control log
        insert = {
            'time': time,
            'process_value': channel['process_value'],
            'setpoint_value': channel['setpoint_value'],
            'action': channel['action'],
            'algorithm': channel['control_algorithm'],
            'enabled': channel['enabled'],
            'status_msg': status_msg
        }
        # print(insert)
        log_db.insert(logtablename, insert)

        # Size log
        log_options = datalib.parseoptions(channel['log_options'])
        log_db.size_table(logtablename, **log_options)

    # If active reset and we didn't set channel modes, disable outputs
    # Active reset is not yet explicitly declared, but implied

    if disableoutputs and channel['type'] not in ['remote']:
        status_msg += 'Disabling Outputs. '
        for id in [channel['positive_output'], channel['negative_output']]:
            control_db.set_single_value('outputs',
                                        'value',
                                        0,
                                        '"id"=\'' + id + "'",
                                        queue=True)
            status_msg += 'Outputs disabled for id=' + id + '. '

    # Set status message for channel
    control_db.set_single_value('channels',
                                'status_message',
                                status_msg,
                                channel_condition,
                                queue=True)

    # Set update time for channel
    control_db.set_single_value('channels',
                                'control_updatetime',
                                time,
                                channel_condition,
                                queue=True)

    # Execute query
    control_db.execute_queue()
    return status_msg
예제 #12
0
def runboot():
    import subprocess
    from time import sleep

    import pilib
    import spilights
    from iiutilities import utility, dblib, datalib

    try:
        pilib.set_all_wal(False)
    except:
        print('error setting wal mode')

    interfaces = pilib.dbs.control.read_table('interfaces')

    # Clear out status bits, if for no other reason to see the LEDs come on
    for statusvalue in ['systemstatusstatus', 'hamachistatus', 'picontrolstatus', 'updateiostatus', 'serialhandlerstatus' ]:
        dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus', statusvalue, 0)

    systemstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'systemstatus')[0]


    # Queue a message indicating we are rebooting
    # TODO: Make this an actions option, or put it somewhere.
    # try:
    import socket
    hostname = socket.gethostname()

    message = 'CuPID is booting:\r\n\r\n'
    notifications_email = '*****@*****.**'
    subject = 'CuPID : ' + hostname + ' : booting'
    notification_database = pilib.cupidDatabase(pilib.dirs.dbs.notifications)
    system_database = pilib.cupidDatabase(pilib.dirs.dbs.system)

    currenttime = datalib.gettimestring()
    notification_database.insert('queued',
                                 {'type': 'email', 'message': message,
                                  'options': 'email:' + notifications_email + ',subject:' + subject,
                                  'queuedtime': currenttime})
    system_database.set_single_value('notifications', 'lastnotification', currenttime, condition="item='boot'")

    # except Exception as e:
    #     error_message = 'EXCEPTION in notification: {}'.format(e.message)
    #     print (error_message)
    #     utility.log(pilib.dirs.logs.system, error_message)
    # else:
    #     utility.log(pilib.dirs.logs.system, 'Boot notificaiton complete. ')


    # Start pigpiod

    subprocess.call(['killall','pigpiod'])
    sleep(1)
    utility.log(pilib.dirs.logs.system, 'boot: starting pigpio daemon', 3, pilib.loglevels.system)
    subprocess.call(['/usr/local/bin/pigpiod'])

    # Start webserver

    subprocess.call(['killall','nginx'])
    subprocess.call(['killall','uwsgi'])
    subprocess.call(['killall','apache2'])

    if systemstatus['webserver'] == 'apache':
        utility.log(pilib.dirs.logs.system, 'boot: starting apache', 3, pilib.loglevels.system)
        subprocess.call(['service', 'apache2', 'start'])
    elif systemstatus['webserver'] == 'nginx':
        utility.log(pilib.dirs.logs.system, 'boot: starting nginx', 3, pilib.loglevels.system)
        subprocess.call(['service', 'nginx', 'start'])

    # Run uwsgi daemon if nginx is running

    try:
        result = subprocess.check_output(['service', 'nginx', 'status']).decode('utf-8')
    except subprocess.CalledProcessError as e:
        result = ''
        # print('I AM FAILING')
        # print e.output

    if result:
        utility.log(pilib.dirs.logs.system, 'boot: starting uwsgi based on nginx call', 0)
        subprocess.call(['uwsgi', '--emperor', '/usr/lib/iicontrollibs/wsgi/', '--daemonize', '/var/log/cupid/uwsgi.log'])
    else:
        # print(' I KNOW NGINX IS NOT RUNNING')
        pass
    # Mount 1wire master

    subprocess.call(['killall','owfs'])
    subprocess.call(['killall','owserver'])
    subprocess.call(['killall','owhttpd'])

    runi2cowfs = True
    runusbowfs = False

    temp_unit = 'C'
    for interface in interfaces:
        if interface['enabled']:
            from iiutilities.datalib import parseoptions
            options_dict = parseoptions(interface['options'])
            if 'tempunit' in options_dict:
                if options_dict['tempunit'] in ['F','f','Fahrenheit','fahrenheit']:
                    temp_unit = 'F'

            if interface['interface'] == 'I2C' and interface['type'] == 'DS2483':
                runi2cowfs = True
            if interface['interface'] == 'USB' and interface['type'] == 'DS9490':
                runusbowfs = True

            if interface['interface'] == 'SPI1' and type == 'CuPIDlights':
                spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 1)
            if interface['interface'] == 'SPI0' and type == 'CuPIDlights':
                spilights.updatelightsfromdb(pilib.dirs.dbs.control, 'indicators', 0)

    if runi2cowfs or runusbowfs:
        if runi2cowfs:
            utility.log(pilib.dirs.logs.system, 'boot: Running i2c owserver', 3, pilib.loglevels.system)
            try:
                if temp_unit == 'F':
                    subprocess.call(['/opt/owfs/bin/owserver', '-F', '--i2c=/dev/i2c-1:ALL', '-p', '4304'])
                else:
                    subprocess.call(['/opt/owfs/bin/owserver', '--i2c=/dev/i2c-1:ALL', '-p', '4304'])
            except:
                utility.log(pilib.dirs.logs.system, 'boot: error running i2c owserver', 1, pilib.loglevels.system)
        if runusbowfs:
            utility.log(pilib.dirs.logs.system, 'boot: Running usb owserver', 3, pilib.loglevels.system)
            try:
                if temp_unit == 'F':
                    subprocess.call(['/opt/owfs/bin/owserver', '-F', '-u', '-p', '4304'])
                else:
                    subprocess.call(['/opt/owfs/bin/owserver', '-u', '-p', '4304'])
            except:
                utility.log(pilib.dirs.logs.system, 'error running usb owserver', 1, pilib.loglevels.system)

        utility.log(pilib.dirs.logs.system, 'boot: Running owfs/owserver mount', 3, pilib.loglevels.system)
        try:
            if temp_unit == 'F':
                subprocess.call(['/opt/owfs/bin/owfs', '-F', '-s', '4304', '/var/1wire/'])
            else:
                subprocess.call(['/opt/owfs/bin/owfs', '-s', '4304', '/var/1wire/'])
        except:
            utility.log(pilib.dirs.logs.system, 'boot: error running owfs', 1, pilib.loglevels.system)

        utility.log(pilib.dirs.logs.system, 'boot: Running owhttpd/owserver mount', 3, pilib.loglevels.system)
        try:
            if temp_unit == 'F':
                subprocess.call(['/opt/owfs/bin/owhttpd', '-F', '-s', '4304', '-p', '4305'])
            else:
                subprocess.call(['/opt/owfs/bin/owhttpd', '-s', '4304', '-p', '4305'])
        except:
            utility.log(pilib.dirs.logs.system, 'boot: error running owhttpd', 1, pilib.loglevels.system)

    else:
        utility.log(pilib.dirs.logs.system, 'boot: not running owfs', 3, pilib.loglevels.system)

    # Run netstart script if enabled
    if systemstatus['netconfigenabled']:
        from netconfig import runconfig
        utility.log(pilib.dirs.logs.system, 'boot: running boot netconfig', 2, pilib.loglevels.system)
        runconfig(onboot=True)
예제 #13
0
def process_channel(**kwargs):

    systemstatus = system_db.read_table_row('systemstatus')[0]
    if 'channel' in kwargs:
        channel = kwargs['channel']
    elif 'channel_name' in kwargs:
        channels = control_db.read_table('channels', '"name"=\'' + kwargs['channel_name'] + "'")
        if len(channels) == 1:
            channel = channels[0]
        else:
            print('wrong number of channels returned. aborting')
            return


    # channelindex = str(int(channel['channelindex']))
    logtablename = 'channel' + '_' + channel['name'] + '_log'
    time = datalib.gettimestring()
    disableoutputs = True

    status_msg = channel['name'] + ': '

    log_tablenames = log_db.get_table_names()

    # Channel enabled means different things for different types of channels

    channel_condition = '"name"=\'{}\''.format(channel['name'])

    # Create log if it doesn't exist
    if logtablename not in log_tablenames:
        log_db.create_table(logtablename, pilib.schema.channel_datalog)

    if channel['type'] == 'local':

        if channel['enabled']:

            status_msg = ''
            try:
                setpoint_value = float(channel['setpoint_value'])
            except:
                channel['enabled'] = 0
                status_msg += 'Error with setpoint. Disabling'
                control_db.set_single_value('channels', 'enabled', 0, channel_condition)

            # Need to test for age of data. If stale or disconnected, invalidate
            try:
                process_value = float(channel['process_value'])
            except:
                status_msg += 'Invalid control value. Disabling channel. '
                channel['enabled'] = 0
                control_db.set_single_value('channels', 'enabled', 0, channel_condition)

        # Move forward if still enabled after error-checking
        if channel['enabled']:

            status_msg += 'Channel Enabled. '

            # TODO : look at channel auto mode.
            if channel['mode'] == 'auto':
                status_msg += 'Mode:Auto. '
                # print('running auto sequence')

                # run algorithm on channel

                response = controllib.runalgorithm(pilib.dirs.dbs.control, pilib.dirs.dbs.session, channel['name'])
                action = response[0]
                message = response[1]

                status_msg += ' ' + response[1] + ' '
                status_msg += 'Action: ' + str(action) + '. '

                # Set action in channel

                controllib.setaction(pilib.dirs.dbs.control, channel['name'], action)

            elif channel['mode'] == 'manual':
                # print('manual mode')
                status_msg += 'Mode:Manual. '
                action = controllib.getaction(pilib.dirs.dbs.control, channel['name'])
            else:
                # print('error, mode= ' + mode)
                status_msg += 'Mode:Error. '

            if systemstatus['enableoutputs']:
                status_msg += 'System outputs enabled. '
                if channel['outputs_enabled']:
                    status_msg += 'Channel outputs enabled. '
                    disableoutputs = False

                    # find out whether action is positive or negative or
                    # not at all.

                    # and act. for now, this is binary, but in the future
                    # this will be a duty cycle daemon

                    outputsetnames = []
                    outputresetnames = []
                    if action > 0:
                        print("set positive output on")
                        outputsetnames.append(channel['positive_output'])
                        outputresetnames.append(channel['negative_output'])
                    elif action < 0:
                        print("set negative output on")
                        outputsetnames.append(channel['negative_output'])
                        outputresetnames.append(channel['positive_output'])
                    elif action == 0:
                        status_msg += 'No action. '
                        outputresetnames.append(channel['positive_output'])
                        outputresetnames.append(channel['negative_output'])
                    else:
                        status_msg += 'Algorithm error. Doing nothing.'

                    # Check to see if outputs are ready to enable/disable
                    # If not, pull them from list of set/reset

                    control_algorithm = control_db.read_table('controlalgorithms', condition='"name"=\'' + channel['controlalgorithm'] + "'")
                    if len(control_algorithm) == 1:
                        algorithm = control_algorithm[0]
                    else:
                        status_msg += 'Algorithm Error: Not found (or multiple?). Using default. '
                        algorithm = default_control_algorithm

                    outputstoset = []
                    for outputname in outputsetnames:
                        offtime = control_db.get_single_value('outputs', 'offtime',
                                                              condition='"name"=\'' + outputname + "'")
                        if datalib.timestringtoseconds(
                                datalib.gettimestring()) - datalib.timestringtoseconds(offtime) > algorithm[
                            'minofftime']:
                            outputstoset.append(outputname)
                        else:
                            status_msg += 'Output ' + outputname + ' not ready to enable. '

                    outputstoreset = []
                    for outputname in outputresetnames:
                        ontime = control_db.get_single_value('outputs', 'ontime',
                                                             condition='"name"=\'' + outputname + "'")
                        if datalib.timestringtoseconds(
                                datalib.gettimestring()) - datalib.timestringtoseconds(ontime) > algorithm[
                            'minontime']:
                            outputstoreset.append(outputname)
                        else:
                            status_msg += 'Output ' + outputname + ' not ready to disable. '

                    """ TODO: Change reference to controlinputs to name rather than id. Need to double-check
                    enforcement of no duplicates."""

                    # Find output in list of outputs if we have one to set

                    time = datalib.gettimestring()
                    if len(outputstoset) > 0 or len(outputstoreset) > 0:
                        for output in outputs:
                            id_condition = '"id"=\'' + output['id'] + "'"
                            if output['name'] in outputstoset:

                                # check current status
                                currvalue = output['value']
                                if not currvalue:  # No need to set if otherwise. Will be different for analog out
                                    # set ontime
                                    control_db.set_single_value('outputs', 'ontime', time, id_condition, queue=True)
                                    # set value
                                    control_db.set_single_value('outputs', 'value', 1, id_condition, queue=True)
                                    status_msg += 'Output ' + output['name'] + ' enabled. '
                                else:
                                    status_msg += 'Output ' + output['name'] + ' already enabled. '

                            if output['name'] in outputstoreset:
                                # check current status
                                currvalue = output['value']
                                if currvalue:  # No need to set if otherwise. Will be different for analog out
                                    # set ontime
                                    control_db.set_single_value('outputs', 'offtime', time, id_condition,
                                                                queue=True)
                                    # set value
                                    control_db.set_single_value('outputs', 'value', 0, id_condition, queue=True)
                                    status_msg += 'Output ' + output['name'] + ' disabled. '
                                else:
                                    status_msg += 'Output ' + output['name'] + ' already disabled. '

                else:
                    status_msg += 'Channel outputs disabled. '
                    action = 0

            else:
                status_msg += 'System outputs disabled. '
                action = 0

            # Insert entry into control log
            insert = {'time': time, 'process_value': channel['process_value'],
                      'setpoint_value': channel['setpoint_value'],
                      'action': channel['action'], 'algorithm': channel['algorithm_name'],
                      'enabled': channel['enabled'],
                      'status_msg': status_msg}
            control_db.insert(logtablename, insert, queue=True)

            log_options = datalib.parseoptions(channel['log_options'])
            log_db.size_table(logtablename, **log_options)
        else:
            # Chanel is disabled. Need to do active disable here.
            pass

    elif channel['type'] == 'remote':
        status_msg += 'Remote channel. '

        if channel['pending']:

            from iiutilities.datalib import parseoptions, dicttojson
            status_msg += 'Processing pending action. '
            pending = parseoptions(channel['pending'])

            if 'setpoint_value' in pending:
                status_msg += 'processing setpoint_value. '
                # Get control output and have a look at it.
                input_name = channel['sv_input']

                # try:
                inputs = control_db.read_table('inputs', '"name"=\'' + input_name + "'")
                # except:
                #     status_msg += 'Inputs query error. '
                #     return status_msg

                if len(inputs) == 1:
                    input = inputs[0]
                else:
                    status_msg += 'wrong number of query items returned, length: ' + str(len(inputs)) + ' for query on input name: ' + input_name
                    print('ERROR: ' + status_msg)
                    return status_msg


                # write_to_input(input, value)
                if input['type'] == 'MBTCP':

                    input_id = input['id']

                    # Now, using this id, we can determine uniquely which MBTCP entry it came from
                    splits = input_id.split('_')
                    interfaceid = splits[0]
                    register = splits[1]
                    length = splits[2]

                    string_condition = dblib.string_condition_from_lists(['interfaceid', 'register', 'length'],
                                                                         [interfaceid, register, length])
                    input_mb_entry = control_db.read_table('modbustcp', string_condition)[0]

                    # Get IP address
                    address = control_db.get_single_value('interfaces', 'address',
                                                          '"id"=\'' + input_mb_entry['interfaceid'] + "'")

                    from iiutilities import netfun

                    if input_mb_entry['options']:
                        input_options = parseoptions(input_mb_entry['options'])
                        if 'scale' in input_options:
                            pending['setpoint_value'] = float(pending['setpoint_value'])/float(input_options['scale'])

                    try:
                        result = netfun.writeMBcodedaddresses(address, register, [float(pending['setpoint_value'])], convert=input_mb_entry['format'])
                    except:
                        status_msg += 'Error in modbus'
                    else:
                        if result['statuscode'] == 0:

                            # Clear pending setpoint_value
                            pending.pop('setpoint_value', None)
                            pending_string = dicttojson(pending)
                            print('setting pending in setpointvaleu mbtcp')

                            control_db.set_single_value('channels','pending',pending_string, channel_condition)
                        else:
                            status_msg += 'modbus write operation returned a non-zero status of ' + str(result['status'])

                elif input['type'] == 'MOTE':
                    mote_node = input['address'].split(':')[0]
                    mote_address = input['address'].split(':')[1]
                    if mote_node == '1':
                        message = '~setsv;' + mote_address + ';' + str(pending['setpoint_value'])
                    else:
                        message = '~sendmsg;' + str(mote_node) + ';;~setsv;' + mote_address + ';' + str(pending['setpoint_value'])

                    motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes)
                    from time import sleep
                    for i in range(2):
                        time = datalib.gettimestring(datalib.timestringtoseconds(datalib.gettimestring()) + i)
                        motes_db.insert('queued', {'queuedtime':time, 'message':message})

                    # Clear pending setpoint_value
                    pending.pop('setpoint_value', None)
                    pending_string = dicttojson(pending)
                    print('setting pending in setpoint_value mote')

                    control_db.set_single_value('channels', 'pending', pending_string, channel_condition)

            if 'enabled' in pending:
                status_msg += 'processing enabledvalue. '

                # Get control output and have a look at it.
                input_name = channel['enabled_input']

                try:
                    inputs = control_db.read_table('inputs', '"name"=\'' + input_name + "'")
                except:
                    status_msg += 'Inputs query error. '
                    return status_msg

                if len(inputs) == 1:
                    input = inputs[0]
                else:
                    status_msg += 'wrong number of query items returned, length: ' + str(len(inputs)) + '. '
                    return status_msg

                # write_to_input(input, value)
                if input['type'] == 'MBTCP':

                    input_id = input['id']

                    # Now, using this id, we can determine uniquely which MBTCP entry it came from
                    splits = input_id.split('_')
                    interfaceid = splits[0]
                    register = splits[1]
                    length = splits[2]

                    string_condition = dblib.string_condition_from_lists(
                        ['interfaceid', 'register', 'length'],
                        [interfaceid, register, length])
                    input_mb_entry = control_db.read_table('modbustcp', string_condition)[0]

                    # Get IP address
                    address = control_db.get_single_value('interfaces', 'address',
                                                          '"id"=\'' + input_mb_entry['interfaceid'] + "'")

                    from iiutilities import netfun
                    # print(address, register,input_mb_entry['format'], int(pending['enabled']))

                    if input_mb_entry['options']:
                        input_options = parseoptions(input_mb_entry['options'])


                    try:
                        result = netfun.writeMBcodedaddresses(address, register,
                                                              [int(pending['enabled'])],
                                                              convert=input_mb_entry['format'])
                    except:
                        status_msg += 'Error in modbus'
                    else:
                        if result['statuscode'] == 0:
                            status_msg += 'That seems to have worked ok?'
                            # Clear pending setpoint_value
                            pending.pop('enabled', None)
                            pending_string = dicttojson(pending)
                            print('setting pending in enabled mbtcp')
                            control_db.set_single_value('channels', 'pending', pending_string,
                                                        channel_condition)
                        else:
                            status_msg += 'modbus write operation returned a non-zero status of ' + str(
                                result['status'])

                elif input['type'] == 'MOTE':
                    mote_node = input['address'].split(':')[0]
                    mote_address = input['address'].split(':')[1]
                    if mote_node == '1':
                        message = '~setrun;' + mote_address + ';' + str(pending['enabled'])
                    else:
                        message = '~sendmsg;' + str(mote_node) + ';;~setrun;' + mote_address + ';' + str(
                            pending['enabled'])

                    motes_db = pilib.cupidDatabase(pilib.dirs.dbs.motes)
                    from time import sleep
                    for i in range(2):
                        time = datalib.gettimestring(datalib.timestringtoseconds(datalib.gettimestring() + i))
                        motes_db.insert('queued', {'queuedtime': time, 'message': message})

                    # Clear pending setpoint_value
                    pending.pop('enabled', None)
                    pending_string = dicttojson(pending)

                    control_db.set_single_value('channels', 'pending', pending_string, channel_condition)


        # Insert entry into control log
        insert = {'time': time, 'process_value': channel['process_value'],
                  'setpoint_value': channel['setpoint_value'],
                  'action': channel['action'], 'algorithm': channel['control_algorithm'],
                  'enabled': channel['enabled'],
                  'status_msg': status_msg}
        # print(insert)
        log_db.insert(logtablename, insert)

        # Size log
        log_options = datalib.parseoptions(channel['log_options'])
        log_db.size_table(logtablename, **log_options)


    # If active reset and we didn't set channel modes, disable outputs
    # Active reset is not yet explicitly declared, but implied

    if disableoutputs and channel['type'] not in ['remote']:
        status_msg += 'Disabling Outputs. '
        for id in [channel['positive_output'], channel['negative_output']]:
            control_db.set_single_value('outputs','value',0,'"id"=\'' + id + "'", queue=True)
            status_msg += 'Outputs disabled for id=' + id + '. '

    # Set status message for channel
    control_db.set_single_value('channels', 'status_message', status_msg, channel_condition, queue=True)

    # Set update time for channel
    control_db.set_single_value('channels', 'control_updatetime', time, channel_condition, queue=True)

    # Execute query
    control_db.execute_queue()
    return status_msg
예제 #14
0
def processremotedata(datadict, stringmessage):
    import cupid.pilib as pilib
    from iiutilities import dblib, datalib, utility

    control_db = pilib.dbs.control
    motes_db = pilib.dbs.motes
    log_db = pilib.dbs.log

    print('PROCESSING REMOTE DATA')
    print(datadict)
    if 'nodeid' in datadict:
        """
        We are going to search for keywords. Message type will not be explicitly declared so
        as not to waste precious message space in transmission. Or we could tack these on in
        the gateway, but we won't yet.
        """
        """
        Then we have to construct a query where we will replace a unique item
        This will take the form :
          update or replace in remotes where nodeid=3 and msgtype='iovalue' and iopin=3
          update or repalce in remotes where nodeid=2 and msgtype='owdev' and owrom='28XXXXXXXXXXXXXX'
                      (and later which IO on this device)


          update or replace in remotes where nodeid=2 and msgtype='chanstat' channum=1
        """
        """
                      (need to see if all channel variables can be fit into one message:
                      channum, sv,pv,mode,state
        """
        runquery = False
        nodeid = datadict['nodeid']

        # We are going to use this to filter datadict entries into remote channels. More later.
        allowedfieldnames = [
            'nodeid', 'sv', 'pv', 'htcool', 'run', 'treg', 'prop', 'p', 'i',
            'd'
        ]

        control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control)

        # Command responses, including value requests

        # Node status values

        value_types = [
            'vbat', 'vout', 'autoboot', 'output', 'batterylow', 'sigbootok',
            'sigshutoff'
        ]
        # sprintf(buff, "nodeid:1,vbat:%01d.%02d,vout:%01d.%02d,autoboot:%01d,output:%01d", wholevoltage, fractvoltage,
        #        wholevoltage2, fractvoltage2, autobootenabled, outputstate);
        # Serial.println(buff);
        # sprintf(buff, "batterylow:%01d,sigbootok:%01d,sigshutoff:%01d", batteryLow, bootok, sigshutoff);

        for value_type in value_types:
            if value_type in datadict:

                insert = {
                    'nodeid': nodeid,
                    'msgtype': 'nodestatus',
                    'keyvaluename': value_type,
                    'keyvalue': datadict[value_type],
                    'data': stringmessage.replace('\x00', ''),
                    'time': datalib.gettimestring()
                }
                control_db.query(dblib.makedeletesinglevaluequery(
                    'remotes', {
                        'conditionnames': ['nodeid', 'keyvaluename'],
                        'conditionvalues': [nodeid, insert['keyvaluename']]
                    }),
                                 queue=True)
                control_db.insert('remotes', insert, queue=True)

        # Node system events

        if 'event' in datadict:
            insert = {
                'nodeid': nodeid,
                'msgtype': 'event',
                'keyvaluename': datadict['event'],
                'keyvalue': datalib.gettimestring(),
                'data': stringmessage.replace('\x00', ''),
                'time': datalib.gettimestring()
            }
            control_db.query(dblib.makedeletesinglevaluequery(
                'remotes', {
                    'conditionnames': ['nodeid', 'keyvaluename'],
                    'conditionvalues': [nodeid, insert['keyvaluename']]
                }),
                             queue=True)
            control_db.insert('remotes', insert, queue=True)

            # Also queue an email message to cupid_status
            import socket
            hostname = socket.gethostname()

            message = 'CuPID system event : {} \r\n\r\n'.format(
                insert['keyvaluename'])
            notifications_email = '*****@*****.**'
            subject = 'CuPID : {} : {} '.format(hostname,
                                                insert['keyvaluename'])
            notification_database = pilib.cupidDatabase(
                pilib.dirs.dbs.notifications)
            system_database = pilib.cupidDatabase(pilib.dirs.dbs.system)

            currenttime = datalib.gettimestring()
            notification_database.insert(
                'queued', {
                    'type': 'email',
                    'message': message,
                    'options':
                    'email:' + notifications_email + ',subject:' + subject,
                    'queuedtime': currenttime
                })
            system_database.set_single_value('notifications',
                                             'lastnotification',
                                             currenttime,
                                             condition="item='boot'")

        if 'cmd' in datadict:
            if datadict['cmd'] == 'lp':
                # Remove command key and process remaining data
                del datadict['cmd']
                motetablename = 'node_' + nodeid + '_status'

                # Create table if it doesn't exist
                motes_db.create_table(motetablename,
                                      pilib.schema.mote,
                                      queue=True)

                for key in datadict:
                    thetime = datalib.gettimestring()
                    if key in [
                            'iov', 'iov2', 'iov3', 'pv', 'pv2', 'sv', 'sv2',
                            'iomd', 'ioen', 'iordf', 'iorpf', 'chen', 'chmd',
                            'chnf', 'chpf', 'chdb', 'chsv', 'chsv2', 'chpv',
                            'chpv2'
                    ]:
                        # We need to process these specially, going back to the original message
                        values = datadict[key]
                        valuelist = values.split('|')
                        print(valuelist)
                        index = 0
                        if key in ['iov', 'iov2', 'iov3']:
                            base = 'iov_'
                            if key == 'iov2':
                                index = 5
                            elif key == 'iov3':
                                index = 9
                        elif key in ['pv', 'pv2']:
                            base = 'pv_'
                            if key == 'pv2':
                                index = 5
                        elif key in ['sv', 'sv2']:
                            base = 'sv_'
                            if key == 'sv2':
                                index = 5
                        else:
                            base = key + '_'

                        querylist = []
                        for value in valuelist:
                            query = dblib.makesqliteinsert(
                                motetablename,
                                [thetime, base + str(index), value])
                            motes_db.query(query, queue=True)
                            # querylist.append(dblib.makesqliteinsert(motetablename, [thetime, base + str(index), value]))
                            index += 1

                    # Update table entry. Each entry has a unique key
                    # updatetime, keyname, data
                    else:
                        motes_db.insert(motetablename, {
                            'time': thetime,
                            'message': key,
                            'value': datadict[key]
                        },
                                        queue=True)
                        # print('inserted ' + thetime + ' ' + key + ' ' + datadict[key])

                    if motes_db.queued_queries:
                        motes_db.execute_queue()

        # This is for values that are reported by the node
        elif 'ioval' in datadict:
            # check to see if entry exists with node and ionum. Need to generalize these.
            # Might make sense to put then into an ID to compare. Other database, compatible?
            # iovalue type message
            try:
                msgtype = 'iovalue'
                keyvalue = datadict['iopin']
                keyvaluename = 'iopin'
            except:
                print('oops')
            else:
                control_db.insert()

        elif 'owdev' in datadict:
            try:
                msgtype = 'owdev'
                keyvalue = datadict['owrom'][2:]
                keyvaluename = 'owrom'
                if len(keyvalue) != 16:
                    raise NameError('invalid ROM length')
                else:
                    for romcbar in keyvalue:
                        hexchars = [
                            '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
                            'A', 'B', 'C', 'D', 'E', 'F', 'a', 'b', 'c', 'd',
                            'e', 'f'
                        ]
                        if romcbar not in hexchars:
                            raise NameError('Invalid ROM hex character')
            except:
                print("oops")
            else:
                runquery = True

        elif 'chan' in datadict:
            # insert or update remotes database value
            # first need to get existing entry if one exists
            msgtype = 'channel'
            keyvalue = str(int(datadict['chan']))  # Zeroes bad
            keyvaluename = str(int(datadict['chan']))

            # conditions = '"nodeid"=2 and "msgtype"=\'channel\' and "keyvalue"=\'' + keyvalue + '\'"'

            # Should be able to offer all conditions, but it is not working for some reason, so we will
            # iterate over list to find correct enty

            # Here, get all remote entries for the specific node id
            conditions = '"nodeid"=\'' + datadict[
                'nodeid'] + '\' and "msgtype"=\'channel\''
            chanentries = control_db.read_table('remotes', conditions)

            # parse through to get data from newdata
            newdata = {}
            import string
            printable = set(string.printable)
            for key, value in datadict.items():
                if key not in ['chan', 'nodeid']:
                    if key in allowedfieldnames:
                        filteredvalue = filter(lambda x: x in printable, value)
                        newdata[key] = filteredvalue

            updateddata = newdata.copy()

            # This does not take time into account. This should not be an issue, as there should only be one entry
            # Now match entry from node. Here, for example, keyvaluename could be channel, and keyvalue representing the
            # channel or controller on the node.

            for chanentry in chanentries:
                if (str(int(chanentry['keyvalue']))) == keyvalue:
                    # print('I FOUND')

                    # newdata  = {'fakedatatype':'fakedata', 'anotherfakedatatype':'morefakedata'}
                    olddata = datalib.parseoptions(chanentry['data'])

                    olddata.update(updateddata)
                    updateddata = olddata.copy()

                    newqueries = []
                    conditions += ' and "keyvalue"=\'' + keyvalue + "\'"

            # Ok, so here we are. We have either added new data to old data, or we have the new data alone.
            # We take our dictionary and convert it back to json and put it in the text entry

            updatedjsonentry = datalib.dicttojson(updateddata)

            conditions += 'and "keyvalue"=\'' + keyvalue + '\''
            deletequery = dblib.makedeletesinglevaluequery(
                'remotes', conditions)

            # hardcode this for now, should supply valuename list.
            addquery = dblib.makesqliteinsert('remotes', [
                datadict['nodeid'], 'channel', keyvalue, 'channel',
                updatedjsonentry,
                datalib.gettimestring()
            ])
            print(deletequery)
            print(addquery)

            control_db.queries([deletequery, addquery])

        elif 'scalevalue' in datadict:
            # TODO : What is this?
            # querylist.append('create table if not exists scalevalues (value float, time string)')
            # querylist.append(dblib.makesqliteinsert('scalevalues', [datadict['scalevalue'], datalib.gettimestring()], ['value', 'time']))
            # log_db.queries(querylist)
            pass

        if control_db.queued_queries:
            control_db.execute_queue()

            return
        else:
            # print('not running query')
            pass
    return
예제 #15
0
def application(environ, start_response):
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import inventorylib
    from iiutilities import dblib, datalib
    from time import time

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    try:
        post = json.loads(request_body.decode('utf-8'))
    except:
        print('Error decoding: ')
        print(request_body.decode('utf-8'))
        post = {}

    output = {'message': ''}
    status = '200 OK'

    try:
        try:
            output['remote_ip'] = environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip()
        except KeyError:
            output['remote_ip'] = environ['REMOTE_ADDR']
    except:
        output['remote_ip'] = 'Error getting IP address'


    """
    Here we verify credentials of session data against those in the database.
    While we authenticate in the browser, this does not stop POST queries to the API without the page provided
    So we take the hpass stored in the dictionary and verify.

    * Which databases are available are stored in users table, column accesskeywords
    * Which one is currently in use is stored in table usermeta, data where user=username. data is json-encoded metadata
        pathalias field

    * What path extension this corresponds to is stored in pathaliases

    """

    # I dont' think this will be used. We will get pathalias from database. Let's deal with changing it later.

    # First, let's get our pathalias and translate to a path, using our path reloader

    # if 'pathalias' in post:
    #     output['message'] += inventorylib.reloaddatapaths(pathalias=post['pathalias'])
    # else:
    #     output['message'] += 'No pathalias found in postictionary. '

    wsgiauth = True
    authverified = False

    if wsgiauth:

        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in postatabase.
        import hashlib

        safe_database = dblib.sqliteDatabase(inventorylib.sysvars.dirs.dbs.safe)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row('users', condition=condition)[0]
            except:
                output['message'] += 'error in user sqlite query for session user "' + post['username'] + '". '
                user_data = {'accesskeywords':'demo','admin':False}
            else:
                # Get session hpass to verify credentials

                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'].encode('utf-8'))
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update((hashedname + inventorylib.sysvars.salt + hashedpassword).encode('utf-8'))
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True
                    # output['message'] += 'accesskeywords : ' + str(userdata)
                    output['accesskeywords'] = user_data['accesskeywords']
                    if output['accesskeywords'].find(',') >= 0:
                        accesskeywords = output['accesskeywords'].split(',')
                        accesskeywords = [accesskeyword.strip() for accesskeyword in accesskeywords]
                    else:
                        accesskeywords = output['accesskeywords'].strip()

                    path_aliases = safe_database.read_table('pathaliases')

                    # Find usermeta entry and grab which database is selected. If one is not selected, update selection
                    # to first that user is allowed to access
                    try:
                        user_meta_row = safe_database.read_table_row('usermeta', condition="user='******'username'] + "'")[0]
                    except:
                        print('error getting usermeta for username ' + post['username'])
                        output['message'] += 'error getting usermeta for username ' + post['username']
                        user_meta_row = []
                        return

                    path_alias = ''
                    if not user_meta_row:
                        output['message'] += 'User meta entry not found. Attempting to create. '

                        # assign default database
                        default_database = accesskeywords[0]

                        output['message'] += 'Choosing pathalias from first in keywords: ' + default_database + '. '
                        if any(default_database == path_alias['alias'] for path_alias in path_aliases):
                            output['message'] += 'Verified that default alias exists in pathaliases database. '
                        else:
                            output['message'] += 'ERROR: first entry in keywords (' +default_database + ') not found in aliases. '

                        # Insert usermeta entry. This should never happen.
                        safe_database.insert('usermeta', {'user':post['username'], 'data':'pathalias:' + default_database})
                        path_alias = default_database
                    else:
                        output['message'] += 'User meta entry found with text ' + str(user_meta_row) + '. '

                        # Parse the string into json and ensure that the pathalias is in there
                        user_meta_dict = datalib.parseoptions(user_meta_row['data'])
                        if 'pathalias' in user_meta_dict:
                            path_alias = user_meta_dict['pathalias']
                            output['message'] += 'pathalias found: ' + user_meta_dict['pathalias'] + '. '

                            if any(path_alias == stored_path_alias['alias'] for stored_path_alias in path_aliases):
                                output['message'] += 'Verified that default alias exists in pathaliases database. '

                    if path_alias:
                        # reload datapaths with path alias
                        reload_message = inventorylib.reloaddatapaths(pathalias=path_alias)

                        # DEFINITELY COMMENT THIS OUT FOR SECURITY SAKE (absolute paths are secret!!)
                        output['message'] += reload_message

                else:
                    # successful auth
                    output['message'] += 'Failed password check. '
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel':0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True
    else:
        output['authorized'] = False

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''

    if output['authorized'] and action:
        output['action_allowed'] = inventorylib.check_action_auths(action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        # Stock functions
        if action == 'addeditpart':
            output['message'] += 'addpart keyword found. '
            inventorylib.addeditstockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'copypart':
            output['message'] += 'copypart keyword found. '
            inventorylib.copystockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'deleteparts':
            output['message'] += 'deleteparts keyword found. '
            inventorylib.deletestockparts(post, output)
            inventorylib.calcstockfromall()
        elif action == 'gettrackedpartdata':
            output['message'] += 'gettrackedpartdata keyword found. '
            output['data'] = inventorylib.calcstockfromall(**post)['trackedpart']
        elif action =='generateorders':
            output['message'] += 'generate orders keyword found. '
            inventorylib.generateandaddorders()

        # Inventory functions
        # Edit and add are separated, as names are autogenerated
        elif action == 'editinventory':
            output['message'] += 'editinventory keyword found. '
            inventorylib.editinventory(post, output)
            inventorylib.calcstockfromall()
        elif action == 'addinventory':
            output['message'] += 'addinventory keyword found. '
            inventorylib.createnewinventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteinventories':
            output['message'] += 'deleteinventories keyword found. '
            inventorylib.deleteinventories(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditinventorypart':
            output['message'] += 'addeditinventorypart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfrominventory':
            output['message'] += 'deletepartsfrominventory keyword found. '
            inventorylib.deletepartsfrominventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()

        # Order functions
        elif action == 'editorder':
            output['message'] += 'editorder keyword found. '
            inventorylib.editorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addorder':
            output['message'] += 'addorder keyword found. '
            inventorylib.createneworder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteorders':
            output['message'] += 'deleteorders keyword found. '
            inventorylib.deleteorders(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderpart':
            output['message'] += 'addeditorderpart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderparts':
            output['message'] += 'addeditorderparts keyword found. '
            if 'partsdata' in post:
                post['partsdata'] = json.loads(post['partsdata'])
                inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfromorder':
            output['message'] += 'deletepartsfromorder keyword found. '
            inventorylib.deletepartsfromorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()

        # BOM functions
        elif action == 'copybom':
            output['message'] += 'copybom keyword found. '
            inventorylib.copybom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbom':
            output['message'] += 'addeditbom keyword found. '
            inventorylib.addeditbom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbomparts':
            output['message'] += 'addeditbomparts keyword found. '
            # Operate on partsdata
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makebommetadata()
        elif action == 'getbomcalcs':
            output['message'] += 'getbomcalcs keyword found. '
            inventorylib.calcbomprice(post, output)
        elif action == 'getquotecalcs':
            output['message'] += 'getquotecalcs keyword found. '
            output['message'] += 'function not written yet. '
            # inventorylib.calcbomprice(post, output)
        elif action == 'deletepartsfrombom':
            output['message'] += 'deletepartsfrombom keyword found. '
            inventorylib.deletepartsfrombom(post, output)
            inventorylib.makebommetadata()
        elif action == 'deleteboms':
            output['message'] += 'deleteboms keyword found. '
            inventorylib.deleteboms(post, output)
            inventorylib.makebommetadata()

        # Assembly functions
        elif action == 'copyassembly':
            output['message'] += 'copyassembly keyword found. '
            inventorylib.copyassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'copybomintoassembly':
            output['message'] += 'copybomintoassembly keyword found. '
            inventorylib.copybomintoassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassembly':
            output['message'] += 'addeditassembly keyword found. '
            inventorylib.addeditassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassemblyparts':
            output['message'] += 'addeditassemblypart keyword found. '
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
            
        elif action == 'getassemblycalcs':
            output['message'] += 'getassemblycalcs keyword found. '
            inventorylib.calcassemblyprice(post, output)
        elif action == 'deletepartsfromassembly':
            output['message'] += 'deletepartsfromassembly keyword found. '
            inventorylib.deletepartsfromassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteassemblys':
            output['message'] += 'deleteassemblys keyword found. '
            inventorylib.deleteassemblies(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        # Quotes
        elif action == 'deletequotes':
            output['message'] += 'deletequotes keyword found. '
            inventorylib.deletequotes(post, output)
            inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes)
        elif action == 'copyquotetoboms':
            output['message'] += 'copyquotetoboms keyword found. '
            inventorylib.copyquotetoboms(post, output)
            inventorylib.makebommetadata()

        # Export functions

        elif action == 'exportbomtopdf':
            output['message'] += 'exportbomtopdf keyword found. '
            inventorylib.writepanelbomtopdf(post, output)

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            bom = inventorylib.sysvars.dbs.boms.read_table(post['name'])

            cleanbomname = post['name'].replace(' ','_').replace(':','_')
            filename = cleanbomname + '_' + cleantime
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(**{'bomdata': bom,
                                      'title': 'Bom generated from ' + post['name'] + ' ' + cleantime,
                                          'outputfile': outputroot + filename})

            output['data']['weblink'] = weblink

        elif action == 'exportassemblytopdf':
            output['message'] += 'exportassemblytopdf keyword found. '

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            assemblydata = inventorylib.sysvars.dbs.assemblies.read_table(post['name'])

            cleanname = post['name'].replace(' ','_').replace(':','_')
            filename = cleanname + '_' + cleantime + '.pdf'
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(**{'bomdata': assemblydata,
                                      'title': 'Bom generated from ' + post['name'] + ' ' + thetime,
                                          'format':'picklist','outputfile': outputroot + filename})

            output['data'] = {'assemblydata':assemblydata}
            output['weblink'] = weblink

        # Panel builder
        elif action in ['panelcalcs', 'panelcalcsgenquote']:
            output['message'] += 'panelcalc keyword found. '
            import panelbuilder
            for key,value in post.items():
                # print(key, value)
                pass

            if 'paneldesc' in post:
                import json
                post['paneldesc'] = json.loads(post['paneldesc'])

            bomresults = panelbuilder.paneltobom(**post)

            output['data'] = {}
            # d needs to have a 'paneldesc' key with the panel spec data in it.
            output['data']['bomdescription'] = bomresults['bomdescription']
            output['data']['options'] = bomresults['options']
            output['data']['bomcalcs'] = inventorylib.calcbomprice({'bomdictarray':bomresults['bom']})['data']
            output['message'] += bomresults['message']

            # We don't actually want to return the full boms by default. We don't want this in the client, and it's
            # lot of data anyway
            if 'returnfullboms' not in post:
                for option, value in output['data']['options'].items():
                    if 'bom' in value:
                        print('Deleting bom from option ' + str(option))

                        del output['data']['options'][option]['bom']
                    if 'flatbom' in value:
                        print('Deleting flatbom from option ' + str(option))
                        del output['data']['options'][option]['flatbom']

            if action == 'panelcalcsgenquote':
                thetime = datalib.gettimestring()
                cleantime = thetime.replace(' ','_').replace(':','_')
                outputroot = '/var/www/html/panelbuilder/data/downloads/'

                if 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'brewpanel':
                    datedquotefilename = 'panelbuilder_brew_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_brew_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_brew_quote.pdf'
                    genericbomfilename = 'panelbuilder_brew_bom.pdf'
                elif 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'temppanel':
                    datedquotefilename = 'panelbuilder_temp_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_temp_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_temp_quote.pdf'
                    genericbomfilename = 'panelbuilder_temp_bom.pdf'
                else:
                    datedquotefilename = 'panelbuilder_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_quote.pdf'
                    genericbomfilename = 'panelbuilder_bom.pdf'

                weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + datedquotefilename

                # until we can get this to format properly in the pdf, we are going to leave it generic
                # description = output['data']['bomdescription']
                description = 'Control panel quote generated by panelbuilder.'
                datedquotes = True

                # Create quote pdf from BOM
                if datedquotes:

                    inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder   \t\t' +
                        datalib.gettimestring(), 'price': str(output['data']['bomcalcs']['totalprice']),
                    'outputfile': outputroot + datedquotefilename, 'description':description})

                inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder '+ thetime,
                       'price': output['data']['bomcalcs']['totalprice'], 'outputfile':outputroot + genericquotefilename})

                # Create database entry BOM

                # Create table
                # print('** DATABASE')
                # print(panelbuilder.sysvars.dirs.dbs.quotes)

                bomname = 'quote_' + cleantime
                inventorylib.addeditbom({'bomdata':{'name':bomname}, 'database':panelbuilder.sysvars.dirs.dbs.quotes}, output)
                # print('** BOM **')
                # print(bomresults['bom'])
                inserts = []
                for part in bomresults['bom']:
                    inserts.append(dblib.makesqliteinsert(bomname, [part['partid'],part['qty']], ['partid','qty']))
                dblib.sqlitemultquery(inventorylib.sysvars.dirs.dbs.quotes, inserts)
                inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes)

                # inventorylib.addeditpartlist(post, output)


                # Create pdfs

                if datedquotes:
                    inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder '
                      + thetime, 'outputfile': outputroot + datedbomfilename})

                inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'title': 'panelbuilder BOM generated ' + thetime,
                                 'outputfile': outputroot + genericbomfilename, 'totalprice': output['data']['bomcalcs']['totalprice']})

                output['data']['quotelink'] = weblink
                from iiutilities.utility import gmail
                mymail = gmail(subject="Quote generated")
                mymail.message = 'Quote generated at ' + cleantime + '\r\n'

                if 'remote_ip' in output:
                    mymail.message = 'IP address ' + output['remote_ip'] + '\r\n'

                mymail.message += bomresults['bomdescription']
                mymail.recipient = '*****@*****.**'
                mymail.sender = 'II Panelbuilder'
                mymail.send()


        # Multi-use
        elif action == 'reloaditemdatafromstock':
            output['message'] += 'reloaditemdatafromstock keyword found. '
            inventorylib.refreshpartsfromstock(post, output)
            if 'bomname' in post:
                inventorylib.recalcpartdata(bomname=post['bomname'])
                inventorylib.makebommetadata()
            elif 'assemblyame' in post:
                inventorylib.recalcpartdata(assemblyname=post['assemblyname'])
                inventorylib.makeassemblymetadata()

        # Generic functions
        elif action == 'gettablenames':
            dbpath = inventorylib.dbnametopath(post['database'])
            try:
                output['data'] = dblib.gettablenames(dbpath)
            except:
                output['message'] += 'Error getting table names'
        elif action == 'switchtablerows':
            dbpath = inventorylib.dbnametopath(post['database'])
            dblib.switchtablerows(dbpath, post['tablename'], post['row1'], post['row2'], post['uniqueindex'])
        elif action == 'modwsgistatus':
            output['processgroup'] = repr(environ['mod_wsgi.process_group'])
            output['multithread'] = repr(environ['wsgi.multithread'])
        elif action == 'gettabledata':
            output['message']+='Gettabledata. '
            if 'database' in post:
                dbpath = inventorylib.dbnametopath(post['database'])
                if dbpath:
                    output['message'] += 'Friendly name ' + post['database'] + ' translated to path ' + dbpath + ' successfully. '

                    if 'tablenames' in post:  # Get multiple tables
                        output['message'] += 'Multiple tables. '
                        data = []
                        if 'start' in post:
                            fixedstart = int(post['start'])
                        else:
                            fixedstart = 0
                        if 'length' in post:
                            fixedlength = int(post['length'])
                        else:
                            fixedlength = 1
                        if 'lengths' in post:
                            lengths = map(int, post['lengths[]'])
                        else:
                            lengths = []
                        if 'starts' in post:
                            starts = map(int, post['starts'])
                        else:
                            starts = []

                        for index, table in enumerate(post['tablenames[]']):
                            try:
                                length = lengths[index]
                            except IndexError:
                                length = fixedlength
                            try:
                                start = starts[index]
                            except IndexError:
                                start = fixedstart

                            data.append(dblib.dynamicsqliteread(dbpath, table, start, length))
                            output['data']=data
                    elif 'length' in post:  # Handle table row subset
                        output['message']+='Length keyword. '
                        if not 'start' in post:
                            post['start'] = 0
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['start'], post['length'])
                        output['querytime'] = time() - thetime
                    elif 'row' in post:  # Handle table row
                        output['message'] += 'Row keyword. ' + str(post['row'])
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['row'])
                        output['querytime'] = time() - thetime
                    elif 'tablename' in post:  # Handle entire table
                        output['message'] += 'Tablename keyword: ' + post['tablename'] + '. '
                        thetime = time()
                        if 'condition' in post:
                            if not post['condition'] == '':
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], condition=post['condition'])
                            else:
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'])
                        else:
                            try:
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'])
                            except:
                                output['message'] += 'Error retrieving data. '
                            else:
                                output['message'] += 'Data query appears successful. '
                        output['querytime'] = time() - thetime
                else:
                    output['message'] += 'Friendly name ' + post['database'] + ' unsuccessfully translated. '
            else:
                output['message'] += 'No database present in action request'
        else:
            output['message'] = 'no command matched for action "' + action + '"'
    else:
        # status = '403 Forbidden'
        output['message'] += 'Not authorized for this action (or perhaps at all?) '

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(output['data']).encode('utf-8')).hexdigest()
            if 'etag' in post:
                if newetag == post['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag=''
    else:
        newetag=''

    if 'datasize' in post:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    # try:
    foutput = json.dumps(output, indent=1)
    # except:
    #     import csv
    #     w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
    #     for key, val in output.items():
    #         w.writerow([key, val])
    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag',newetag))
    start_response(status, response_headers)

    return foutput.encode('utf-8')
예제 #16
0
def runboot():
    import subprocess
    from time import sleep

    import pilib
    import spilights
    from iiutilities import utility, dblib, datalib

    try:
        pilib.set_all_wal(False)
    except:
        print('error setting wal mode')

    interfaces = pilib.dbs.control.read_table('interfaces')

    # Clear out status bits, if for no other reason to see the LEDs come on
    for statusvalue in [
            'systemstatusstatus', 'hamachistatus', 'picontrolstatus',
            'updateiostatus', 'serialhandlerstatus'
    ]:
        dblib.setsinglevalue(pilib.dirs.dbs.system, 'systemstatus',
                             statusvalue, 0)

    systemstatus = dblib.readonedbrow(pilib.dirs.dbs.system, 'systemstatus')[0]

    # Queue a message indicating we are rebooting
    # TODO: Make this an actions option, or put it somewhere.
    # try:
    import socket
    hostname = socket.gethostname()

    message = 'CuPID is booting:\r\n\r\n'
    notifications_email = '*****@*****.**'
    subject = 'CuPID : ' + hostname + ' : booting'
    notification_database = pilib.cupidDatabase(pilib.dirs.dbs.notifications)
    system_database = pilib.cupidDatabase(pilib.dirs.dbs.system)

    currenttime = datalib.gettimestring()
    notification_database.insert(
        'queued', {
            'type': 'email',
            'message': message,
            'options': 'email:' + notifications_email + ',subject:' + subject,
            'queuedtime': currenttime
        })
    system_database.set_single_value('notifications',
                                     'lastnotification',
                                     currenttime,
                                     condition="item='boot'")

    # except Exception as e:
    #     error_message = 'EXCEPTION in notification: {}'.format(e.message)
    #     print (error_message)
    #     utility.log(pilib.dirs.logs.system, error_message)
    # else:
    #     utility.log(pilib.dirs.logs.system, 'Boot notificaiton complete. ')

    # Start pigpiod

    subprocess.call(['killall', 'pigpiod'])
    sleep(1)
    utility.log(pilib.dirs.logs.system, 'boot: starting pigpio daemon', 3,
                pilib.loglevels.system)
    subprocess.call(['/usr/local/bin/pigpiod'])

    # Start webserver

    subprocess.call(['killall', 'nginx'])
    subprocess.call(['killall', 'uwsgi'])
    subprocess.call(['killall', 'apache2'])

    if systemstatus['webserver'] == 'apache':
        utility.log(pilib.dirs.logs.system, 'boot: starting apache', 3,
                    pilib.loglevels.system)
        subprocess.call(['service', 'apache2', 'start'])
    elif systemstatus['webserver'] == 'nginx':
        utility.log(pilib.dirs.logs.system, 'boot: starting nginx', 3,
                    pilib.loglevels.system)
        subprocess.call(['service', 'nginx', 'start'])

    # Run uwsgi daemon if nginx is running

    try:
        result = subprocess.check_output(['service', 'nginx',
                                          'status']).decode('utf-8')
    except subprocess.CalledProcessError as e:
        result = ''
        # print('I AM FAILING')
        # print e.output

    if result:
        utility.log(pilib.dirs.logs.system,
                    'boot: starting uwsgi based on nginx call', 0)
        subprocess.call([
            'uwsgi', '--emperor', '/usr/lib/iicontrollibs/wsgi/',
            '--daemonize', '/var/log/cupid/uwsgi.log'
        ])
    else:
        # print(' I KNOW NGINX IS NOT RUNNING')
        pass
    # Mount 1wire master

    subprocess.call(['killall', 'owfs'])
    subprocess.call(['killall', 'owserver'])
    subprocess.call(['killall', 'owhttpd'])

    runi2cowfs = True
    runusbowfs = False

    temp_unit = 'C'
    for interface in interfaces:
        if interface['enabled']:
            from iiutilities.datalib import parseoptions
            options_dict = parseoptions(interface['options'])
            if 'tempunit' in options_dict:
                if options_dict['tempunit'] in [
                        'F', 'f', 'Fahrenheit', 'fahrenheit'
                ]:
                    temp_unit = 'F'

            if interface['interface'] == 'I2C' and interface[
                    'type'] == 'DS2483':
                runi2cowfs = True
            if interface['interface'] == 'USB' and interface[
                    'type'] == 'DS9490':
                runusbowfs = True

            if interface['interface'] == 'SPI1' and type == 'CuPIDlights':
                spilights.updatelightsfromdb(pilib.dirs.dbs.control,
                                             'indicators', 1)
            if interface['interface'] == 'SPI0' and type == 'CuPIDlights':
                spilights.updatelightsfromdb(pilib.dirs.dbs.control,
                                             'indicators', 0)

    if runi2cowfs or runusbowfs:
        if runi2cowfs:
            utility.log(pilib.dirs.logs.system, 'boot: Running i2c owserver',
                        3, pilib.loglevels.system)
            try:
                if temp_unit == 'F':
                    subprocess.call([
                        '/opt/owfs/bin/owserver', '-F', '--i2c=/dev/i2c-1:ALL',
                        '-p', '4304'
                    ])
                else:
                    subprocess.call([
                        '/opt/owfs/bin/owserver', '--i2c=/dev/i2c-1:ALL', '-p',
                        '4304'
                    ])
            except:
                utility.log(pilib.dirs.logs.system,
                            'boot: error running i2c owserver', 1,
                            pilib.loglevels.system)
        if runusbowfs:
            utility.log(pilib.dirs.logs.system, 'boot: Running usb owserver',
                        3, pilib.loglevels.system)
            try:
                if temp_unit == 'F':
                    subprocess.call(
                        ['/opt/owfs/bin/owserver', '-F', '-u', '-p', '4304'])
                else:
                    subprocess.call(
                        ['/opt/owfs/bin/owserver', '-u', '-p', '4304'])
            except:
                utility.log(pilib.dirs.logs.system,
                            'error running usb owserver', 1,
                            pilib.loglevels.system)

        utility.log(pilib.dirs.logs.system,
                    'boot: Running owfs/owserver mount', 3,
                    pilib.loglevels.system)
        try:
            if temp_unit == 'F':
                subprocess.call(
                    ['/opt/owfs/bin/owfs', '-F', '-s', '4304', '/var/1wire/'])
            else:
                subprocess.call(
                    ['/opt/owfs/bin/owfs', '-s', '4304', '/var/1wire/'])
        except:
            utility.log(pilib.dirs.logs.system, 'boot: error running owfs', 1,
                        pilib.loglevels.system)

        utility.log(pilib.dirs.logs.system,
                    'boot: Running owhttpd/owserver mount', 3,
                    pilib.loglevels.system)
        try:
            if temp_unit == 'F':
                subprocess.call([
                    '/opt/owfs/bin/owhttpd', '-F', '-s', '4304', '-p', '4305'
                ])
            else:
                subprocess.call(
                    ['/opt/owfs/bin/owhttpd', '-s', '4304', '-p', '4305'])
        except:
            utility.log(pilib.dirs.logs.system, 'boot: error running owhttpd',
                        1, pilib.loglevels.system)

    else:
        utility.log(pilib.dirs.logs.system, 'boot: not running owfs', 3,
                    pilib.loglevels.system)

    # Run netstart script if enabled
    if systemstatus['netconfigenabled']:
        from netconfig import runconfig
        utility.log(pilib.dirs.logs.system, 'boot: running boot netconfig', 2,
                    pilib.loglevels.system)
        runconfig(onboot=True)
예제 #17
0
def runperiodicio(**kwargs):

    settings = {'force_run': False, 'run_once': False, 'debug': False, 'quiet':True, 'logerrors':True}
    settings.update(kwargs)

    systemdb = pilib.cupidDatabase(pilib.dirs.dbs.system, **settings)
    logdb = pilib.cupidDatabase(pilib.dirs.dbs.log, **settings)
    controldb = pilib.cupidDatabase(pilib.dirs.dbs.control, **settings)


    if settings['force_run']:
        updateioenabled = True
    else:
        updateioenabled = systemdb.get_single_value('systemstatus', 'updateioenabled')


    if settings['debug']:
        pilib.set_debug()
        settings['quiet'] = False

    if updateioenabled:
        import pigpio
        pi = pigpio.pi()
        io_objects = {}
        first_run = True

    # print("quiet : {}, {}, {} ".format(systemdb.settings['quiet'], logdb.settings['quiet'], controldb.settings['quiet']))

    while updateioenabled:

        utility.log(pilib.dirs.logs.io, 'Running periodicupdateio', 3, pilib.loglevels.io)
        utility.log(pilib.dirs.logs.system, 'Running periodicupdateio', 3, pilib.loglevels.system)

        # Set last run time
        systemdb.set_single_value('systemstatus', 'lastupdateiopoll', datalib.gettimestring())
        systemdb.set_single_value('systemstatus', 'updateiostatus', '1')

        # Read and record everything as specified in dirs.dbs.control
        # Update database of inputs with read data

        # DEBUG
        runupdate = True
        if runupdate:
            try:
                reply = updateio.updateiodata(piobject=pi, io_objects=io_objects, first_run=first_run, settings=settings)
            except:
                import traceback
                formatted_lines = traceback.format_exc().splitlines()

                error_mail = utility.gmail()
                import socket
                hostname = socket.gethostname()
                error_mail.subject = 'Error running ioupdate on host ' + hostname + '. '
                error_mail.message = error_mail.subject + ' Traceback: ' + '\n'.join(formatted_lines)
                error_mail.send()

        else:
            utility.log(pilib.dirs.logs.io, 'DEBUG: Update IO disabled', 1, pilib.loglevels.io)
            utility.log(pilib.dirs.log.system, 'DEBUG: Update IO disabled', 1, pilib.loglevels.system)

        systemsdict = systemdb.read_table_row('systemstatus')[0]

        readtime = systemsdict['updateiofreq']

        """
        Defaults.
        TODO: We need to get these from a db entry that the user can set on the main control panel. These will live in
        the 'defaults' table. Imagine that.

        Then, we can set the logpoints for each input and channel. We'll store them in the ioinfo table
        """

        plotpoints = 20
        logpoints = 1000

        try:
            logsettings = logdb.read_table('logsettings')
            for setting in logsettings:
                if setting['item'] == 'defaultlogpoints':
                    logpoints = int(setting['value'])
                    # print('logpoints found and set to ' + str(logpoints))
        except:
            pass
            # print('not found or other error. oops. ')

        """
        Update process_values in channels
        """

        channels = controldb.read_table('channels')
        for channel in channels:
            # Get pv_input for each channel
            channelname = channel['name']
            pv_input = channel['pv_input']

            # Get the input for the name from inputs info
            # Then get the value and readtime from the input if it
            # can be found

            if pv_input and pv_input not in ['none', 'None']:

                values = controldb.get_values('inputs', ['value', 'polltime'], condition="name='" + pv_input + "'")
                if values:
                    process_value = values['value']
                    process_value_time = values['polltime']

                    # Only update channel value if value was found
                    # print('process_value: ', process_value)
                    if process_value or process_value == 0:
                        # print('control value for channel ' + channelname + ' = ' + str(process_value))
                        controldb.set_single_value('channels', 'process_value', str(process_value), "pv_input='" + pv_input + "'", queue=True)
                        # pilib.sqlitequery(pilib.dirs.dbs.control, 'update channels set process_value=' + str(
                        #     process_value) + ' where pv_input = ' + "'" + pv_input + "'")
                        controldb.set_single_value('channels', 'process_value_time', str(process_value_time), "pv_input='" + pv_input + "'", queue=True)
                        # pilib.sqlitequery(pilib.dirs.dbs.control,
                        #                   'update channels set process_valuetime=\'' + controltime + '\' where pv_input = ' + "'" + pv_input + "'")
                else:
                    print('input not found. ')

            else:  # input is empty
                controldb.set_single_value('channels', 'statusmessage', 'No pv_input found', "name='" + channelname + "'", queue=True)

                # disable channel
                #pilib.sqlitequery(dirs.dbs.control,"update channels set enabled=0 where pv_input = \'" + pv_input + "'")

            if channel['sv_input'] and channel['sv_input'] not in ['none', 'None']:

                value = controldb.set_single_value('inputs', 'value', "name='" + channel['sv_input'] + "'", queue=True)

                # Only update channel value if value was found
                if value or value==0:
                    # print('control value for channel ' + channelname + ' = ' + str(process_value))
                    controldb.set_single_value('channels', 'setpoint_value', str(value), "sv_input='" + channel['sv_input'] + "'", queue=True)

            if channel['enabled_input'] and channel['enabled_input'] not in ['none', 'None']:

                value = controldb.set_single_value('inputs', 'value', "name='" + channel['enabled_input'] + "'", queue=True)


                # Only update channel value if value was found
                if value or value==0:
                    # print('control value for channel ' + channelname + ' = ' + str(process_value))
                    controldb.set_single_value('channels', 'enabled', str(value), "enabled_input='" + channel['enabled_input'] + "'", queue=True)

        if controldb.queued_queries:
            controldb.execute_queue()

        """
        Log value into tabled log

        Get data for all sensors online
        """

        inputsdata = controldb.read_table('inputs')
        for inputrow in inputsdata:
            logtablename = 'input_' + inputrow['id'] + '_log'
            utility.log(pilib.dirs.logs.io, 'Logging: ' + logtablename, 5, pilib.loglevels.io)
            # print( 'Logging: ' + logtablename, 5, pilib.loglevels.io)

            if datalib.isvalidtimestring(inputrow['polltime']):
                # Create table if it doesn't exist

                # query = 'create table if not exists \'' + logtablename + '\' ( value real, time text primary key)'
                # print('we are logging')

                log_db = pilib.cupidDatabase(pilib.dirs.dbs.log, **settings)

                # Includes 'if not exists' , so will not overwrite
                log_db.create_table(logtablename, pilib.schema.standard_datalog, dropexisting=False, queue=True)
                # dblib.sqlitequery(pilib.dirs.dbs.log, query)

                # Enter row
                insert = {'time':inputrow['polltime'], 'value':inputrow['value']}
                log_db.insert(logtablename, insert, queue=True)

                # query = dblib.makesqliteinsert(logtablename, [inputrow['value'], inputrow['polltime']],['value', 'time'])
                # dblib.sqlitequery(pilib.dirs.dbs.log, query)

                # print(log_db.queued_queries)
                log_db.execute_queue()

                # Clean log
                log_db.clean_log(logtablename)

                # Size log based on specified size
                log_options = datalib.parseoptions(inputrow['log_options'])
                log_db.size_table(logtablename, **log_options)
            else:
                pass
                # print('invalid poll time')

        """
        log metadata
        """

        pilib.get_and_set_logdb_metadata(pilib.dirs.dbs.log)

        if not settings['run_once']:
            utility.log(pilib.dirs.logs.io, 'Sleeping for ' + str(readtime), 1, pilib.loglevels.io)
            sleep(readtime)
        else:
            break

        if not settings['force_run']:
            # Read from systemstatus to make sure we should be running
            updateioenabled = systemdb.get_single_value('systemstatus', 'updateioenabled')

        # Signal to io reads that we just started.
        first_run = False

    systemdb.set_single_value('systemstatus', 'updateiostatus', '0')