示例#1
0
def auto_populate_data_agent_send_items(**kwargs):
    settings = {
        'debug': False,
        'agent_db_path': '/var/www/data/dataagent.db',
        'inputs_db_path': '/var/www/data/control.db',
        'inputs_table': 'inputs',
        'empty_table': True,
        'interface_includes': {
            'gpio':False,
            'mote':True,
            '1wire':True,
            'lan':True
        },
        'tablename':'send_items',
        'debug':True
    }

    settings.update(kwargs)
    inputs_db = dblib.sqliteDatabase(settings['inputs_db_path'])
    agent_db = dblib.sqliteDatabase(settings['agent_db_path'])
    if settings['empty_table']:
        agent_db.empty_table(settings['tablename'], queue=True)

    for input_entry in inputs_db.read_table('inputs'):
        insert_this_entry = False
        if input_entry['interface'].lower() in settings['interface_includes'] and settings['interface_includes'][input_entry['interface'].lower()]:
            insert_this_entry = True

        if insert_this_entry:
            agent_db.insert(settings['tablename'], {'id': input_entry['id'], 'last_transmit': '',
                                               'options': json.dumps(da_vars.default_agent_item_options)}, queue=True)

    if settings['debug']:
        print(agent_db.queued_queries)
    agent_db.execute_queue()
示例#2
0
def copy_log_to_archive(log_name, **kwargs):

    settings = {
        'archive_name': None,
        'force_extension': True,
        'extension':'.db',
        'directory': dirs.archive
    }
    settings.update(kwargs)

    from iiutilities.datalib import gettimestring

    if not settings['archive_name']:
        settings['archive_name'] = log_name + gettimestring() + '.db'

    if settings['force_suffix'] and settings['archive_name'][-3:] != settings['suffix']:
        settings['archive_name'] += '.db'

    # Determine type by log name

    from iiutilities.datalib import gettimestring
    archive_db = dblib.sqliteDatabase(settings['directory'] + settings['archive_name'])
    logs_db = dblib.sqliteDatabase(dirs.dbs.log)

    existing_table = logs_db.read_table(log_name)
    existing_schema = logs_db.get_schema(log_name)
    archive_db.create_table('data', existing_schema, queue=True)
    archive_db.insert('data', existing_table, queue=True)

    archive_db.create_table('info', schema.data_items, queue=True)
    archive_db.insert('info', {'valuename': 'created', 'value': gettimestring()}, queue=True)
    archive_db.insert('info', {'valuename': 'name', 'value': log_name}, queue=True)

    archive_db.execute_queue()
示例#3
0
def get_and_set_logdb_metadata(database, **kwargs):

    meta = getdatameta(database, **kwargs)

    the_db = dblib.sqliteDatabase(database)
    the_schema = dblib.sqliteTableSchema([{
        'name': 'tablename',
        'primary': True
    }, {
        'name': 'numpoints',
        'type': 'integer'
    }, {
        'name': 'timespan',
        'type': 'real'
    }, {
        'name': 'type'
    }, {
        'name': 'subtype'
    }, {
        'name': 'id'
    }])
    the_db.create_table('metadata', the_schema, dropexisting=True, queue=True)
    for meta_item in meta:
        the_db.insert('metadata', {
            'tablename': meta_item['tablename'],
            'numpoints': meta_item['numpoints'],
            'timespan': meta_item['timespan'],
            'type': meta_item['type'],
            'subtype': meta_item['subtype'],
            'id': meta_item['id']
        },
                      queue=True)
    the_db.execute_queue()
示例#4
0
def writedbtabletopdf(**kwargs):

    output = {'status':0,'message':''}
    requiredarguments = ['database', 'tablename', 'outputfile']
    for argument in requiredarguments:
        if argument not in kwargs:
            output['message'] += argument + ' argument required. Exiting. '
            output['status'] = 1
            return output

    from iiutilities import dblib

    this_database = dblib.sqliteDatabase(kwargs['database'])
    tabledata = this_database.read_table(kwargs['tablename'])

    if tabledata:

        columnames=[]
        for key in tabledata[0]:
            columnames.append(key)

    else:
        output['message'] += 'No tabledata retrieved (Error or empty table). '
        output['status'] = 1
        return output

    returnstatus = writetabletopdf(tabledata)

    output['message'] += 'Routine finished. '
    output['status'] = returnstatus
    return output
示例#5
0
def init_access_database(path=access_dbpath):
    from iiutilities import dblib

    the_database = dblib.sqliteDatabase(path)
    schema = dblib.sqliteTableSchema([{
        'name': 'time',
        'primary': True
    }, {
        'name': 'full'
    }, {
        'name': 'remote_address'
    }, {
        'name': 'user'
    }, {
        'name': 'request_time'
    }, {
        'name': 'full_request'
    }, {
        'name': 'request'
    }, {
        'name': 'status'
    }, {
        'name': 'body_bytes'
    }, {
        'name': 'origin_url'
    }, {
        'name': 'host'
    }, {
        'name': 'referer'
    }, {
        'name': 'http_fwd'
    }, {
        'name': 'user_agent'
    }])
    the_database.create_table('access_log', schema)
示例#6
0
def writedbtabletopdf(**kwargs):

    output = {'status': 0, 'message': ''}
    requiredarguments = ['database', 'tablename', 'outputfile']
    for argument in requiredarguments:
        if argument not in kwargs:
            output['message'] += argument + ' argument required. Exiting. '
            output['status'] = 1
            return output

    from iiutilities import dblib

    this_database = dblib.sqliteDatabase(kwargs['database'])
    tabledata = this_database.read_table(kwargs['tablename'])

    if tabledata:

        columnames = []
        for key in tabledata[0]:
            columnames.append(key)

    else:
        output['message'] += 'No tabledata retrieved (Error or empty table). '
        output['status'] = 1
        return output

    returnstatus = writetabletopdf(tabledata)

    output['message'] += 'Routine finished. '
    output['status'] = returnstatus
    return output
示例#7
0
def rebuild_data_agent_db(**kwargs):

    settings = {
        'path':'/var/www/data/dataagent.db',
        'tablelist':['send_items'],
        'migrate':True,
        'data_loss_ok':True
    }
    settings.update(kwargs)

    data_agent_db = dblib.sqliteDatabase(settings['path'])

    ### Data Agent table
    tablename = 'send_items'
    if tablename in settings['tablelist']:
        print('rebuilding {}'.format(tablename))

        if settings['migrate']:
            data_agent_db.migrate_table(tablename, schema=da_vars.schema.send_items, queue=True,
                                          data_loss_ok=settings['data_loss_ok'])
        else:
            data_agent_db.create_table(tablename, schema=da_vars.schema.send_items, queue=True)

        data_agent_db.insert(tablename, {'id': 'MOTE1_vbat', 'last_transmit': '',
                                             'options': json.dumps(da_vars.default_agent_item_options)}, queue=True)
        data_agent_db.insert(tablename, {'id': 'MOTE1_vout', 'last_transmit': '',
                                             'options': json.dumps(da_vars.default_agent_item_options)}, queue=True)

    # TODO: Add more details settings tables here.
    data_agent_db.execute_queue()
示例#8
0
def copy_log_to_archive(log_name, **kwargs):

    settings = {
        'archive_name': None,
        'force_extension': True,
        'extension': '.db',
        'directory': dirs.archive
    }
    settings.update(kwargs)

    from iiutilities.datalib import gettimestring

    if not settings['archive_name']:
        settings['archive_name'] = log_name + gettimestring() + '.db'

    if settings['force_suffix'] and settings['archive_name'][-3:] != settings[
            'suffix']:
        settings['archive_name'] += '.db'

    # Determine type by log name

    from iiutilities.datalib import gettimestring
    archive_db = dblib.sqliteDatabase(settings['directory'] +
                                      settings['archive_name'])
    logs_db = dblib.sqliteDatabase(dirs.dbs.log)

    existing_table = logs_db.read_table(log_name)
    existing_schema = logs_db.get_schema(log_name)
    archive_db.create_table('data', existing_schema, queue=True)
    archive_db.insert('data', existing_table, queue=True)

    archive_db.create_table('info', schema.data_items, queue=True)
    archive_db.insert('info', {
        'valuename': 'created',
        'value': gettimestring()
    },
                      queue=True)
    archive_db.insert('info', {
        'valuename': 'name',
        'value': log_name
    },
                      queue=True)

    archive_db.execute_queue()
示例#9
0
def insertuser(database, username, password, salt, **kwargs):

    from iiutilities import dblib, datalib
    settings = {
        'schema': dblib.sqliteTableSchema([
            {'name':'id','type':'integer', 'primary':True},
            {'name':'name', 'unique':True},
            {'name':'password'},
            {'name':'accesskeywords'},
            {'name':'admin'},
            {'name':'email'},
            {'name':'temp'},
            {'name':'authlevel','type':'integer','default':0}
        ])
    }
    settings.update(kwargs)

    this_database = dblib.sqliteDatabase(database)

    entry = {'name':username,'password':password, 'email':'','accesskeywords':'','authlevel':1,'temp':'','admin':0}
    entry.update(kwargs)

    # entries = [{'name': 'creese', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'iiinventory,demo', 'authlevel': 5, 'temp': '', 'admin': 1},
    #            {'name': 'iwalker', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 4, 'temp': '', 'admin': 0},
    #            {'name': 'demo', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 2, 'temp': '', 'admin': 0},
    #            {'name': 'mbertram', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 2, 'temp': '', 'admin': 0}]

    existingentries = this_database.read_table('users')

    usercount = len(existingentries)
    existingindices = [existingentry['id'] for existingentry in existingentries]
    existingnames = [existingentry['id'] for existingentry in existingentries]

    print('EXISTING ENTRIES:')
    print(existingentries)

    newindex = usercount+1
    while newindex in existingindices:
        newindex += 1

    hashedentry = datalib.gethashedentry(entry['name'], entry['password'], salt=salt)

    this_database.create_table('users', schema=settings['schema'], queue=True, migrate=True)

    # insert = {'id':newindex, 'name':entry['name'], 'password':hashedentry, 'email':entry['email']}
    entry['id'] = newindex
    entry['password'] = hashedentry
    this_database.insert('users', entry, queue=True)

    this_database.execute_queue()
示例#10
0
def parse_and_table_nginx_access_log(logpath='/var/log/nginx/access.log'):
    requests = parse_nginx_log(logpath)
    from iiutilities import dblib

    access_db = dblib.sqliteDatabase(access_dbpath)
    for request in requests:
        access_db.insert('access_log', request, queue=True)

    # print(len(access_db.queued_queries))
    # print(access_db.queued_queries)
    access_db.execute_queue()
示例#11
0
def table_access_histo_data(access_meta):
    from iiutilities import dblib
    access_db = dblib.sqliteDatabase(access_meta['dbpath'])
    access_db_tablenames = access_db.get_table_names()

    not_found_schema = dblib.sqliteTableSchema([{
        'name': 'time',
        'primary': True
    }, {
        'name': 'url'
    }])
    access_db.create_table('404s', not_found_schema)
    for nf in access_meta['not_found']:
        access_db.insert('404s', nf)
    access_db.execute_queue()

    histo_schema = dblib.sqliteTableSchema([{
        'name': 'time',
        'primary': True
    }, {
        'name': 'count',
        'type': 'integer'
    }])

    for domain_name, domain_data in access_meta['remote_hits'].items():
        tablename = domain_name + '_remotehisto'
        if tablename not in access_db_tablenames:
            access_db.create_table(tablename, histo_schema, queue=True)
        for histo_time, histo_count in domain_data['histo_data'].items():
            access_db.insert(tablename, {
                'time': histo_time,
                'count': histo_count
            },
                             queue=True)

    for domain_name, domain_data in access_meta['total_hits'].items():
        tablename = domain_name + '_totalhisto'
        if tablename not in access_db_tablenames:
            access_db.create_table(tablename, histo_schema, queue=True)
        for histo_time, histo_count in domain_data['histo_data'].items():
            access_db.insert(tablename, {
                'time': histo_time,
                'count': histo_count
            },
                             queue=True)

    if access_db.queued_queries:
        access_db.execute_queue()
示例#12
0
def test_conc(**kwargs):

    settings = {
        'database': dblib.sqliteDatabase(pilib.dirs.dbs.system),
        'tablename': 'logconfig',
        'sessions': 5,
        'timeout': 0,
        'reads': 1000
    }
    # settings.update(kwargs)

    import threading
    from time import sleep

    def read_table(i):

        results = {
            'name': threading.currentThread().getName(),
            'success': 0,
            'fail': 0
        }

        for iteration in range(settings['reads']):
            """thread worker function"""
            thread_name = threading.currentThread().getName()
            print(thread_name + ' reading table ' + settings['tablename'])
            try:
                # print('something . ')
                the_table = settings['database'].read_table_smart(
                    settings['tablename'])
            except:
                print('Worker ' + thread_name + ' FAIL')
                results['fail'] += 1
            else:
                results['success'] += 1

            wait_time = 0.1 + i * 0.01
            # print('sleeping ' + str(wait_time))
            sleep(wait_time)

        print(results)
        return

    for i in range(settings['sessions']):
        threading.Thread(target=read_table,
                         args=(i, ),
                         name='Worker ' + str(i)).start()
示例#13
0
def addversionentry(database_path, tablename, entrydict):
    import iiutilities.dblib as dblib
    from iiutilities.datalib import gettimestring
    versions_db = dblib.sqliteDatabase(database_path)
    tablenames = versions_db.get_table_names()
    if not tablename in tablenames:
        versions_schema = dblib.sqliteTableSchema([
            {'name': 'item', 'primary': True},
            {'name': 'version'},
            {'name': 'versiontime'},
            {'name': 'updatetime'}
        ])
        versions_db.create_table(tablename, versions_schema)
    insert = {'item':entrydict['repo'],
              'version':entrydict['headcommithexsha'],
              'versiontime':gettimestring(entrydict['headcommitdate']),
              'updatetime': gettimestring()
              }
    versions_db.insert(tablename, insert)
示例#14
0
def get_and_set_logdb_metadata(database, **kwargs):

    meta = getdatameta(database, **kwargs)

    the_db = dblib.sqliteDatabase(database)
    the_schema = dblib.sqliteTableSchema([
        {'name':'tablename','primary':True},
        {'name':'numpoints','type':'integer'},
        {'name':'timespan','type':'real'},
        {'name':'type'},
        {'name':'subtype'},
        {'name':'id'}
    ])
    the_db.create_table('metadata', the_schema, dropexisting=True, queue=True)
    for meta_item in meta:
        the_db.insert('metadata', {'tablename':meta_item['tablename'], 'numpoints':meta_item['numpoints'],
                                   'timespan':meta_item['timespan'], 'type':meta_item['type'],
                                   'subtype':meta_item['subtype'], 'id':meta_item['id']}, queue=True)
    the_db.execute_queue()
示例#15
0
def get_and_log_netstats(path=netstats_dbpath, **kwargs):
    import speedtest
    from iiutilities import dblib
    from iiutilities.datalib import gettimestring

    results = speedtest.call_tester(**kwargs)

    the_database = dblib.sqliteDatabase(path)
    download = round(results.download / 1000000, 2)
    upload = round(results.upload / 1000000, 2)

    if 'wired' not in the_database.get_table_names():
        the_database.create_table('wired', netspeed_schema)
    the_database.insert(
        'wired', {
            'time': gettimestring(),
            'download': download,
            'upload': upload,
            'ping': round(results.ping, 2)
        })
示例#16
0
def test_conc(**kwargs):

    settings = {
        'database':dblib.sqliteDatabase(pilib.dirs.dbs.system),
        'tablename':'logconfig',
        'sessions':5,
        'timeout':0,
        'reads':1000
    }
    # settings.update(kwargs)

    import threading
    from time import sleep

    def read_table(i):

        results = {'name':threading.currentThread().getName(), 'success':0,'fail':0}

        for iteration in range(settings['reads']):
            """thread worker function"""
            thread_name = threading.currentThread().getName()
            print(thread_name + ' reading table ' + settings['tablename'])
            try:
                # print('something . ')
                the_table = settings['database'].read_table_smart(settings['tablename'])
            except:
                print('Worker ' + thread_name + ' FAIL')
                results['fail'] += 1
            else:
                results['success'] += 1

            wait_time = 0.1 + i*0.01
            # print('sleeping ' + str(wait_time))
            sleep(wait_time)

        print(results)
        return

    for i in range(settings['sessions']):
        threading.Thread(target=read_table, args=(i,), name='Worker ' + str(i)).start()
示例#17
0
def addversionentry(database_path, tablename, entrydict):
    import iiutilities.dblib as dblib
    from iiutilities.datalib import gettimestring
    versions_db = dblib.sqliteDatabase(database_path)
    tablenames = versions_db.get_table_names()
    if not tablename in tablenames:
        versions_schema = dblib.sqliteTableSchema([{
            'name': 'item',
            'primary': True
        }, {
            'name': 'version'
        }, {
            'name': 'versiontime'
        }, {
            'name': 'updatetime'
        }])
        versions_db.create_table(tablename, versions_schema)
    insert = {
        'item': entrydict['repo'],
        'version': entrydict['headcommithexsha'],
        'versiontime': gettimestring(entrydict['headcommitdate']),
        'updatetime': gettimestring()
    }
    versions_db.insert(tablename, insert)
示例#18
0
    rotate_result = split_and_trim_db_by_date(netstats_dbpath)

    # Could run a meta on netstats here, using modified results and current results.

    print('NETSTATS', rotate_result)

    print('parsing nginx access logs ...')
    parse_and_table_nginx_access_log()
    print('done parsing logs')

    print('Trimming and sorting entries')
    rotate_result = split_and_trim_db_by_date(access_dbpath)
    print('ACCESS', rotate_result)

    print('Trimming complete.')
    print('Deleting empty tables ...')

    access_db = dblib.sqliteDatabase(access_dbpath)
    access_db.drop_empty_tables()

    modified_dbs = rotate_result['modified_dbs']
    modified_dbs.append(access_dbpath)

    for db in modified_dbs:

        print(db)
        access_meta = analyze_and_histo_access_db(db)
        table_access_histo_data(access_meta)
        metadata = create_access_histo_metadata(access_meta)
        table_access_histo_metadata(metadata, dbpath=db)
示例#19
0
def processremotedata(datadict, stringmessage):
    import cupid.pilib as pilib
    from iiutilities import dblib, datalib, utility

    control_db = pilib.dbs.control
    motes_db = pilib.dbs.motes
    log_db = pilib.dbs.log

    print('PROCESSING REMOTE DATA')
    print(datadict)
    if 'nodeid' in datadict:
        """
        We are going to search for keywords. Message type will not be explicitly declared so
        as not to waste precious message space in transmission. Or we could tack these on in
        the gateway, but we won't yet.
        """
        """
        Then we have to construct a query where we will replace a unique item
        This will take the form :
          update or replace in remotes where nodeid=3 and msgtype='iovalue' and iopin=3
          update or repalce in remotes where nodeid=2 and msgtype='owdev' and owrom='28XXXXXXXXXXXXXX'
                      (and later which IO on this device)


          update or replace in remotes where nodeid=2 and msgtype='chanstat' channum=1
        """
        """
                      (need to see if all channel variables can be fit into one message:
                      channum, sv,pv,mode,state
        """
        runquery = False
        nodeid = datadict['nodeid']

        # We are going to use this to filter datadict entries into remote channels. More later.
        allowedfieldnames = [
            'nodeid', 'sv', 'pv', 'htcool', 'run', 'treg', 'prop', 'p', 'i',
            'd'
        ]

        control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control)

        # Command responses, including value requests

        # Node status values

        value_types = [
            'vbat', 'vout', 'autoboot', 'output', 'batterylow', 'sigbootok',
            'sigshutoff'
        ]
        # sprintf(buff, "nodeid:1,vbat:%01d.%02d,vout:%01d.%02d,autoboot:%01d,output:%01d", wholevoltage, fractvoltage,
        #        wholevoltage2, fractvoltage2, autobootenabled, outputstate);
        # Serial.println(buff);
        # sprintf(buff, "batterylow:%01d,sigbootok:%01d,sigshutoff:%01d", batteryLow, bootok, sigshutoff);

        for value_type in value_types:
            if value_type in datadict:

                insert = {
                    'nodeid': nodeid,
                    'msgtype': 'nodestatus',
                    'keyvaluename': value_type,
                    'keyvalue': datadict[value_type],
                    'data': stringmessage.replace('\x00', ''),
                    'time': datalib.gettimestring()
                }
                control_db.query(dblib.makedeletesinglevaluequery(
                    'remotes', {
                        'conditionnames': ['nodeid', 'keyvaluename'],
                        'conditionvalues': [nodeid, insert['keyvaluename']]
                    }),
                                 queue=True)
                control_db.insert('remotes', insert, queue=True)

        # Node system events

        if 'event' in datadict:
            insert = {
                'nodeid': nodeid,
                'msgtype': 'event',
                'keyvaluename': datadict['event'],
                'keyvalue': datalib.gettimestring(),
                'data': stringmessage.replace('\x00', ''),
                'time': datalib.gettimestring()
            }
            control_db.query(dblib.makedeletesinglevaluequery(
                'remotes', {
                    'conditionnames': ['nodeid', 'keyvaluename'],
                    'conditionvalues': [nodeid, insert['keyvaluename']]
                }),
                             queue=True)
            control_db.insert('remotes', insert, queue=True)

            # Also queue an email message to cupid_status
            import socket
            hostname = socket.gethostname()

            message = 'CuPID system event : {} \r\n\r\n'.format(
                insert['keyvaluename'])
            notifications_email = '*****@*****.**'
            subject = 'CuPID : {} : {} '.format(hostname,
                                                insert['keyvaluename'])
            notification_database = pilib.cupidDatabase(
                pilib.dirs.dbs.notifications)
            system_database = pilib.cupidDatabase(pilib.dirs.dbs.system)

            currenttime = datalib.gettimestring()
            notification_database.insert(
                'queued', {
                    'type': 'email',
                    'message': message,
                    'options':
                    'email:' + notifications_email + ',subject:' + subject,
                    'queuedtime': currenttime
                })
            system_database.set_single_value('notifications',
                                             'lastnotification',
                                             currenttime,
                                             condition="item='boot'")

        if 'cmd' in datadict:
            if datadict['cmd'] == 'lp':
                # Remove command key and process remaining data
                del datadict['cmd']
                motetablename = 'node_' + nodeid + '_status'

                # Create table if it doesn't exist
                motes_db.create_table(motetablename,
                                      pilib.schema.mote,
                                      queue=True)

                for key in datadict:
                    thetime = datalib.gettimestring()
                    if key in [
                            'iov', 'iov2', 'iov3', 'pv', 'pv2', 'sv', 'sv2',
                            'iomd', 'ioen', 'iordf', 'iorpf', 'chen', 'chmd',
                            'chnf', 'chpf', 'chdb', 'chsv', 'chsv2', 'chpv',
                            'chpv2'
                    ]:
                        # We need to process these specially, going back to the original message
                        values = datadict[key]
                        valuelist = values.split('|')
                        print(valuelist)
                        index = 0
                        if key in ['iov', 'iov2', 'iov3']:
                            base = 'iov_'
                            if key == 'iov2':
                                index = 5
                            elif key == 'iov3':
                                index = 9
                        elif key in ['pv', 'pv2']:
                            base = 'pv_'
                            if key == 'pv2':
                                index = 5
                        elif key in ['sv', 'sv2']:
                            base = 'sv_'
                            if key == 'sv2':
                                index = 5
                        else:
                            base = key + '_'

                        querylist = []
                        for value in valuelist:
                            query = dblib.makesqliteinsert(
                                motetablename,
                                [thetime, base + str(index), value])
                            motes_db.query(query, queue=True)
                            # querylist.append(dblib.makesqliteinsert(motetablename, [thetime, base + str(index), value]))
                            index += 1

                    # Update table entry. Each entry has a unique key
                    # updatetime, keyname, data
                    else:
                        motes_db.insert(motetablename, {
                            'time': thetime,
                            'message': key,
                            'value': datadict[key]
                        },
                                        queue=True)
                        # print('inserted ' + thetime + ' ' + key + ' ' + datadict[key])

                    if motes_db.queued_queries:
                        motes_db.execute_queue()

        # This is for values that are reported by the node
        elif 'ioval' in datadict:
            # check to see if entry exists with node and ionum. Need to generalize these.
            # Might make sense to put then into an ID to compare. Other database, compatible?
            # iovalue type message
            try:
                msgtype = 'iovalue'
                keyvalue = datadict['iopin']
                keyvaluename = 'iopin'
            except:
                print('oops')
            else:
                control_db.insert()

        elif 'owdev' in datadict:
            try:
                msgtype = 'owdev'
                keyvalue = datadict['owrom'][2:]
                keyvaluename = 'owrom'
                if len(keyvalue) != 16:
                    raise NameError('invalid ROM length')
                else:
                    for romcbar in keyvalue:
                        hexchars = [
                            '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
                            'A', 'B', 'C', 'D', 'E', 'F', 'a', 'b', 'c', 'd',
                            'e', 'f'
                        ]
                        if romcbar not in hexchars:
                            raise NameError('Invalid ROM hex character')
            except:
                print("oops")
            else:
                runquery = True

        elif 'chan' in datadict:
            # insert or update remotes database value
            # first need to get existing entry if one exists
            msgtype = 'channel'
            keyvalue = str(int(datadict['chan']))  # Zeroes bad
            keyvaluename = str(int(datadict['chan']))

            # conditions = '"nodeid"=2 and "msgtype"=\'channel\' and "keyvalue"=\'' + keyvalue + '\'"'

            # Should be able to offer all conditions, but it is not working for some reason, so we will
            # iterate over list to find correct enty

            # Here, get all remote entries for the specific node id
            conditions = '"nodeid"=\'' + datadict[
                'nodeid'] + '\' and "msgtype"=\'channel\''
            chanentries = control_db.read_table('remotes', conditions)

            # parse through to get data from newdata
            newdata = {}
            import string
            printable = set(string.printable)
            for key, value in datadict.items():
                if key not in ['chan', 'nodeid']:
                    if key in allowedfieldnames:
                        filteredvalue = filter(lambda x: x in printable, value)
                        newdata[key] = filteredvalue

            updateddata = newdata.copy()

            # This does not take time into account. This should not be an issue, as there should only be one entry
            # Now match entry from node. Here, for example, keyvaluename could be channel, and keyvalue representing the
            # channel or controller on the node.

            for chanentry in chanentries:
                if (str(int(chanentry['keyvalue']))) == keyvalue:
                    # print('I FOUND')

                    # newdata  = {'fakedatatype':'fakedata', 'anotherfakedatatype':'morefakedata'}
                    olddata = datalib.parseoptions(chanentry['data'])

                    olddata.update(updateddata)
                    updateddata = olddata.copy()

                    newqueries = []
                    conditions += ' and "keyvalue"=\'' + keyvalue + "\'"

            # Ok, so here we are. We have either added new data to old data, or we have the new data alone.
            # We take our dictionary and convert it back to json and put it in the text entry

            updatedjsonentry = datalib.dicttojson(updateddata)

            conditions += 'and "keyvalue"=\'' + keyvalue + '\''
            deletequery = dblib.makedeletesinglevaluequery(
                'remotes', conditions)

            # hardcode this for now, should supply valuename list.
            addquery = dblib.makesqliteinsert('remotes', [
                datadict['nodeid'], 'channel', keyvalue, 'channel',
                updatedjsonentry,
                datalib.gettimestring()
            ])
            print(deletequery)
            print(addquery)

            control_db.queries([deletequery, addquery])

        elif 'scalevalue' in datadict:
            # TODO : What is this?
            # querylist.append('create table if not exists scalevalues (value float, time string)')
            # querylist.append(dblib.makesqliteinsert('scalevalues', [datadict['scalevalue'], datalib.gettimestring()], ['value', 'time']))
            # log_db.queries(querylist)
            pass

        if control_db.queued_queries:
            control_db.execute_queue()

            return
        else:
            # print('not running query')
            pass
    return
示例#20
0
def split_and_trim_db_by_date(logpath, **kwargs):

    from iiutilities import dblib
    from iiutilities.datalib import gettimestring
    import time

    settings = {'division': 'day', 'timekey': 'time', 'remove': 'true'}
    settings.update(kwargs)

    data_by_date = split_time_db(logpath, **settings)
    dates = [date for date in data_by_date]
    dates.sort(reverse=True)

    # print('Most recent date', dates[0])
    if dates:
        current_date = dates[0]
    else:
        # print('NO time yet.')
        current_date = time.gmtime()

    # print(current_date)
    dates.reverse()

    log_db = dblib.sqliteDatabase(logpath)

    modified_dbs = []

    for date in data_by_date:

        # Prune off time.
        timestring = gettimestring(time.mktime(date)).split(' ')[0]
        # print(timestring, 'tables: ' +str(len([tablename for tablename in data_by_date[date]])))

        # for table in data_by_date[date]:
        #     print(table)
        new_db_path = logpath.split(
            '.')[0] + '_' + timestring + '.' + logpath.split('.')[1]

        modified_dbs.append(new_db_path)
        new_db = dblib.sqliteDatabase(new_db_path)

        # if table doesn't exist, we create it
        new_db.tablenames = new_db.get_table_names()
        # print('existing tablenames: ')
        # print(new_db.tablenames)
        for tablename in data_by_date[date]:
            if tablename not in new_db.tablenames:
                # print('creating table ' + tablename)
                new_db.create_table(tablename,
                                    data_by_date[date][tablename]['schema'],
                                    queue=True)

            # print(data_by_date[date][tablename]['data'][0])
            # print(data_by_date[date][tablename]['schema'].items)
            new_db.insert(tablename,
                          data_by_date[date][tablename]['data'],
                          queue=True)

        # print(new_db.queued_queries)

        new_db.execute_queue()

        # Now we need to remove the old entries
        if date != current_date:
            for tablename in data_by_date[date]:
                for datum in data_by_date[date][tablename]['data']:
                    log_db.delete(tablename,
                                  '"' + settings['timekey'] + '"=' + "'" +
                                  datum[settings['timekey']] + "'",
                                  queue=True)

    # print(log_db.queued_queries)
    # print('Deletes',len(log_db.queued_queries))
    log_db.execute_queue()

    return {'modified_dbs': modified_dbs}
示例#21
0
def table_access_histo_metadata(metadata, dbpath=access_dbpath):
    from iiutilities import dblib
    metadata_schema = dblib.sqliteTableSchema([{
        'name': 'domain',
        'primary': True
    }, {
        'name': 'this_hour'
    }, {
        'name': 'prev_hour'
    }, {
        'name': 'this_day'
    }, {
        'name': 'prev_day'
    }])
    access_db = dblib.sqliteDatabase(dbpath)
    access_db_tablenames = access_db.get_table_names()
    """
    metadata = {
        'metadata_remote': {
            'somedomain.com': {
                'prev_day':integer,
                'prev_hou':integer,
                'this_day':integer,
                'this_hour':integer
            }
            'someotherdomain.com': {
                'prev_day':integer,
                'prev_hou':integer,
                'this_day':integer,
                'this_hour':integer
            }
        }
        'metadata_total': {
            'somedomain.com': {
                'prev_day':integer,
                'prev_hou':integer,
                'this_day':integer,
                'this_hour':integer
            }
            'someotherdomain.com': {
                'prev_day':integer,
                'prev_hou':integer,
                'this_day':integer,
                'this_hour':integer
            }
        }
    }

    """
    for metadata_type, metadata_data in metadata.items():
        tablename = metadata_type
        if tablename not in access_db_tablenames:
            access_db.create_table(tablename, metadata_schema, queue=True)

        for domain_name, domain_data in metadata_data.items():
            domain_data['domain'] = domain_name
            access_db.insert(tablename, domain_data, queue=True)

    if access_db.queued_queries:
        # print('THERE ARE QUERIES')
        access_db.execute_queue()
    else:
        # print('there are no queries?')
        pass
示例#22
0
def application(environ, start_response):

    import cgi
    import json

    import os, sys, inspect

    # Set top folder to allow import of modules

    top_folder = os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0,top_folder)

    from cupid import pilib, controllib
    from iiutilities import dblib, utility, datalib

    # post_env = environ.copy()
    # post_env['QUERY_STRING'] = ''
    # post = cgi.FieldStorage(
    #     fp=environ['wsgi.input'],
    #     environ=post_env,
    #     keep_blank_values=True
    # )
    #
    # formname=post.getvalue('name')
    # output = {}
    # output['message'] = 'Output Message: '
    # for k in post.keys():
    #     d[k] = post.getvalue(k)

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    post = json.loads(request_body.decode('utf-8'))

    output = {}
    output['message'] = ''

    status = '200 OK'
    wsgiauth = True
    authverified = False

    if wsgiauth:
        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in database.
        import hashlib

        safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.users)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row('users', condition=condition)[0]
            except:
                output['message'] += 'Error in user sqlite query for session user "' + post['username'] + '". '
                output['message'] += 'Condition: ' + condition + '. Path: ' + pilib.dirs.dbs.safe
                user_data = {'accesskeywords': 'demo', 'admin': False}
            else:
                # Get session hpass to verify credentials
                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'])
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update(hashedname + pilib.salt + hashedpassword)
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True

                    # TODO: implement usermeta
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel':0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''
    else:
        output['message'] += '{} action keyword found'.format(action)

    if output['authorized'] and action:
        output['action_allowed'] = pilib.check_action_auths(action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        output['message'] += 'Found action. '

        if action == 'testdbvn':
            from iiutilities.dblib import dbvntovalue
            try:
                output['data'] = dbvntovalue(post['dbvn'])
            except:
                output['message'] += 'Error in dbvn evaluation. '
                output['data'] = 'error'
            else:
                output['message'] += 'Seems to have worked out. '
        elif action == 'testlogical':
            from iiutilities.datalib import evaldbvnformula
            try:
                output['data'] = evaldbvnformula(post['logical'])
            except:
                output['message'] += 'Error in logical evaluation. '
                output['data'] = 'error'
            else:
                output['message'] += 'Seems to have worked out. '

        elif action == 'testmodule':
            output['message'] += 'Testing module: '
            if 'modulename' in post:
                import cupid.cupidunittests
                output['message'] += post['modulename']
                output['data'] = cupid.cupidunittests.testmodule(post['modulename'])
            else:
                output['message'] += 'Modulename not found. '
        elif action == 'testfunction':
            output['message'] += 'Testing function: '
            if 'testname' in post:
                import cupid.cupidunittests
                output['message'] += post['testname']
                # output['data'] = cupid.tests.testfunction(d['testname'])
                output['data'] = cupid.cupidunittests.testfunction(post['testname'])
                # output['data'] = str(cupid.tests.testfunction('systemstatus'))
            else:
                output['message'] += 'Testname not found. '

        elif action == 'modifychannelalarm':
            controllib.handle_modify_channel_alarm(post, output)
            from cupid.actions import processactions

            # process only this action.
            processactions(name=post['actionname'])

        elif action == 'modifychannel':
            controllib.handle_modify_channel(post, output)

        elif action == 'getalarmscount':
            control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control)
            actions = control_db.read_table('actions')
            output['data'] = {'totalalarms':len(actions),'channelalarms':0, 'activealarms':0, 'activechannelalarms':0}
            for action in actions:
                if action['conditiontype'] == 'channel':
                    output['data']['channelalarms'] += 1
                    if action['active']:
                        output['data']['activechannelalarms'] += 1

                if action['active']:
                    output['data']['activealarms'] += 1

        elif action == 'copy_log_to_archive':
            pilib.app_copy_log_to_archive(post, output)

        elif action == 'getlogscount':
            logtablenames = dblib.sqliteDatabase(pilib.dirs.dbs.log).get_table_names()
            output['data'] = {'logscount':len(logtablenames)}


        elif action == 'test_action':
            output['message'] += 'Testing action. '
            controldb = dblib.sqliteDatabase(pilib.dirs.dbs.control)
            actiondict = controldb.read_table('actions',condition='"name"=\'' + post['actionname'] + "'")[0]
            from cupid.actions import action
            test_action = action(actiondict)
            test_action.test()

        elif action == 'update_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            safe_database.set_single_value('wireless', 'password', post['password'], "SSID='" + post['ssid'] + "'")

        elif action == 'add_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            insert = {'SSID':post['ssid'], 'auto':1, 'priority':1}
            if 'password' in post:
                insert['password'] = post['password']
            safe_database.insert('wireless',insert)

        elif action == 'delete_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            safe_database.delete('wireless', "SSID='" + post['ssid'] + "'")

        # elif action == 'dump':
        #     # this has to go.
        #     if 'database' in d:
        #         dbpath = pilib.dbnametopath(d['database'])
        #         if dbpath:
        #             if 'tablelist' in d and 'outputfile' in d:
        #                 dbpath = pilib.dbnametopath(d['database'])
        #                 dblib.sqlitedatadump(dbpath, d['tablelist'], d['outputfile'])
        #                 output['message'] = 'data dumped'
        #             elif 'tablename' in d and 'outputfile' in d:
        #                 dblib.sqlitedatadump(dbpath, [d['tablename']], d['outputfile'])
        #                 output['message'] = 'data dumped. '
        #             else:
        #                 output['message'] += 'keys not present for dump. '
        #         else:
        #             output['message'] += 'keys not present for dump. '
        #     else:
        #         output['message'] += 'keys not present for dump. '
        elif action in ['userdelete', 'useradd', 'usermodify']:
            """
            This needs to be consolidate with the other useradd, modify algorithm written already.
            Probably do this when we update the user permissions interface.
            """
            # Ensure that we are authorized for this action
            if action == 'userdelete':
                try:
                    dblib.sqlitequery(pilib.dirs.dbs.users, "delete from users where name='" + post['usertodelete'] + "'")
                except:
                    output['message'] += 'Error in delete query. '
                else:
                    output['message'] += 'Successful delete query. '
            elif action == 'usermodify':

                if 'usertomodify' in post:
                    querylist=[]
                    if 'newpass' in post:
                        from pilib import salt
                        # Get session hpass to verify credentials
                        hashedpassword = post['newpass']
                        hname = hashlib.new('sha1')
                        hname.update(post['usertomodify'])
                        hashedname = hname.hexdigest()
                        hentry = hashlib.new('md5')
                        hentry.update(hashedname + salt + hashedpassword)
                        hashedentry = hentry.hexdigest()
                        querylist.append('update users set password='******'" + post['usertomodify'] + "'")

                    if 'newemail' in post:
                        querylist.append("update users set email='" + post['newemail'] + "' where name='" + post['usertomodify'] + "'")
                    if 'newauthlevel' in post:
                        querylist.append("update users set authlevel='" + post['newauthlevel'] + "' where name='" + post['usertomodify'] + "'")

                    try:
                        dblib.sqlitemultquery(pilib.dirs.dbs.users, querylist)
                    except:
                        output['message'] += 'Error in modify/add query: ' + ",".join(querylist)
                    else:
                        output['message'] += 'Successful modify/add query. ' + ",".join(querylist)
                else:
                    output['message'] += 'Need usertomodify in query. '
            elif action == 'useradd':
                try:
                    username = post['newusername']
                except:
                    username = '******'
                try:
                    newemail = post['newemail']
                except:
                    newemail = '*****@*****.**'
                try:
                    newauthlevel = post['newauthlevel']
                except:
                    newauthlevel = 0
                    query = "insert into users values(NULL,'" + username + "','','" + newemail + "',''," + str(newauthlevel) + ")"
                try:
                    dblib.sqlitequery(pilib.dirs.dbs.users, query)
                except:
                    output['message'] += "Error in useradd sqlite query: " + query + ' . '
                else:
                    output['message'] += "Successful query: " + query + ' . '
        elif action == 'getfiletext':
            try:
                filepath = post['filepath']
                if 'numlines' in post:
                    numlines = int(post['numlines'])
                else:
                    numlines = 9999
                output['message'] += 'Using numlines: ' + str(numlines) + ' for read action. '
                if 'startposition' in post:
                    startposition = post['startposition']
                else:
                    startposition = 'end'
                output['message'] += 'Reading from position ' + startposition + '. '
            except KeyError:
                output['message'] += 'Sufficient keys for action getfile text do not exist. '
            except:
                output['message'] += 'Uncaught error in getfiletext. '
            else:
                try:
                    file = open(filepath)
                    lines = file.readlines()
                except:
                    output['message'] += 'Error reading file in getfiletext action. '
                else:
                    output['data'] = []
                    if startposition == 'end':
                        try:
                            output['data'] = datalib.tail(file, numlines)[0]
                        except:
                            output['message'] += 'Error in tail read. '
                    else:
                        linecount = 0
                        for line in lines:
                            linecount += 1
                            if linecount > numlines:
                                break
                            else:
                                output['data'].append(line)
        elif action == 'getmbtcpdata':
            try:
                clientIP = post['clientIP']
                register = post['register']
                length = post['length']
            except KeyError:
                output['message'] += 'Sufficient keys do not exist for the command. Requires clientIP, register, and length. '
            else:
                from iiutilities.netfun import readMBcodedaddresses
                # try:
                output['response'] = readMBcodedaddresses(clientIP, int(register), int(length))
        elif action == 'queuemessage':
            output['message'] += 'Queue message. '
            if 'message' in post:
                try:
                    dblib.sqliteinsertsingle(pilib.dirs.dbs.motes, 'queuedmessages', [datalib.gettimestring(), post['message']])
                except:
                    import traceback
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    output['message'] += 'Error in queue insert query: {}. '.format(traceback.format_exc())
                else:
                    output['message'] += 'Message insert successful'
            else:
                output['message'] += 'No message present. '

        elif action == 'setsystemflag' and 'systemflag' in post:
            database = pilib.dirs.dbs.system
            dblib.setsinglevalue(database, 'systemflags', 'value', 1, "name=\'" + post['systemflag'] + "'")
        elif action == 'rundaemon':
            from cupiddaemon import rundaemon
            rundaemon()

        # TODO: Eliminate this scary thing.
        elif action == 'setvalue':
            utility.log(pilib.dirs.logs.control, "Setting value in wsgi", 1, 1)

            # we use the auxiliary 'setsinglecontrolvalue' to add additional actions to update
            if all(k in post for k in ('database', 'table', 'valuename', 'value')):
                dbpath = pilib.dbnametopath(post['database'])
                if dbpath:
                    output['message'] += 'Carrying out setvalue for value ' + post['valuename'] + ' on ' + post['table'] + ' in '  + dbpath
                    if 'condition' in post:
                        pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], post['condition'])
                    elif 'index' in post:
                        condition = 'rowid= ' + post['index']
                        pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'], condition)
                    else:
                        pilib.setsinglecontrolvalue(dbpath, post['table'], post['valuename'], post['value'])
                else:
                    output['message'] += 'Problem translating dbpath from friendly name: ' + post['database']
            else:
                output['message'] += 'Insufficient data for setvalue '
        elif action == 'updateioinfo':
            if all(k in post for k in ['database', 'ioid', 'value']):
                query = dblib.makesqliteinsert('ioinfo', [post['ioid'], post['value']], ['id', 'name'])
                try:
                    dblib.sqlitequery(pilib.dirs.dbs.control, query)
                except:
                    output['message'] += 'Error in updateioinfo query execution: ' + query +'. into database: ' + pilib.dirs.dbs.control
                    output['message'] += 'ioid: ' + post['ioid'] + ' . '
                else:
                    output['message'] += 'Executed updateioinfo query. '
            else:
                output['message'] += 'Insufficient data for updateioinfo query ! '


        # TODO: properly incorporate and test channel class functions here, and then sub it.
        elif action == 'modify_channel':
            controllib.app_modify_channel(post, output)

        elif action == 'deletechannelbyname' and 'database' in post and 'channelname' in post:
            dbpath = pilib.dbnametopath(post['database'])
            dblib.sqlitequery(dbpath, 'delete channelname from channels where name=\"' + post['channelname'] + '\"')
        elif action == 'updatecameraimage':
            output['message'] += 'Take camera image keyword. '
            import cupid.camera
            if 'width' in post:
                width = post['width']
            else:
                width = 800
            try:
                values = cupid.camera.takesnap(width=width)
            except:
                output['message'] += 'Error taking image. '
            else:
                output['message'] += 'Appears successful. Path : ' + values['imagepath'] + '. Timestamp : ' + values['timestamp'] + '. '
                output['data'] = values
        elif action == 'getcurrentcamtimestamp':
            output['message'] += 'getcurrentcamtimestamp keyword found. '
            try:
                with open('/var/www/webcam/images/current.jpg.timestamp') as f:
                    data = f.read()
            except:
                output['message'] += 'Error reading file as requested. '
            else:
                output['data'] = data
        else:
            output['message'] += 'Action keyword present(' + action + '), but not handled. '
    else:
        output['message'] += 'Authentication unsuccessful or action not authorized.'
        status = '401 Not Authorized'

    foutput = json.dumps(output, indent=1)

    response_headers = [('Content-type', 'application/json')]
    start_response(status, response_headers)

    return [foutput]
示例#23
0
def init_netstats_database(path=netstats_dbpath):
    from iiutilities import dblib

    the_database = dblib.sqliteDatabase(path)
    the_database.create_table('wired', netspeed_schema)
    the_database.create_table('wireless', netspeed_schema)
示例#24
0
def application(environ, start_response):
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import inventorylib
    from iiutilities import dblib, datalib
    from time import time

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    try:
        post = json.loads(request_body.decode('utf-8'))
    except:
        print('Error decoding: ')
        print(request_body.decode('utf-8'))
        post = {}

    output = {'message': ''}
    status = '200 OK'

    try:
        try:
            output['remote_ip'] = environ['HTTP_X_FORWARDED_FOR'].split(
                ',')[-1].strip()
        except KeyError:
            output['remote_ip'] = environ['REMOTE_ADDR']
    except:
        output['remote_ip'] = 'Error getting IP address'
    """
    Here we verify credentials of session data against those in the database.
    While we authenticate in the browser, this does not stop POST queries to the API without the page provided
    So we take the hpass stored in the dictionary and verify.

    * Which databases are available are stored in users table, column accesskeywords
    * Which one is currently in use is stored in table usermeta, data where user=username. data is json-encoded metadata
        pathalias field

    * What path extension this corresponds to is stored in pathaliases

    """

    # I dont' think this will be used. We will get pathalias from database. Let's deal with changing it later.

    # First, let's get our pathalias and translate to a path, using our path reloader

    # if 'pathalias' in post:
    #     output['message'] += inventorylib.reloaddatapaths(pathalias=post['pathalias'])
    # else:
    #     output['message'] += 'No pathalias found in postictionary. '

    wsgiauth = True
    authverified = False

    if wsgiauth:

        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in postatabase.
        import hashlib

        safe_database = dblib.sqliteDatabase(
            inventorylib.sysvars.dirs.dbs.safe)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row(
                    'users', condition=condition)[0]
            except:
                output[
                    'message'] += 'error in user sqlite query for session user "' + post[
                        'username'] + '". '
                user_data = {'accesskeywords': 'demo', 'admin': False}
            else:
                # Get session hpass to verify credentials

                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'].encode('utf-8'))
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update((hashedname + inventorylib.sysvars.salt +
                               hashedpassword).encode('utf-8'))
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True
                    # output['message'] += 'accesskeywords : ' + str(userdata)
                    output['accesskeywords'] = user_data['accesskeywords']
                    if output['accesskeywords'].find(',') >= 0:
                        accesskeywords = output['accesskeywords'].split(',')
                        accesskeywords = [
                            accesskeyword.strip()
                            for accesskeyword in accesskeywords
                        ]
                    else:
                        accesskeywords = output['accesskeywords'].strip()

                    path_aliases = safe_database.read_table('pathaliases')

                    # Find usermeta entry and grab which database is selected. If one is not selected, update selection
                    # to first that user is allowed to access
                    try:
                        user_meta_row = safe_database.read_table_row(
                            'usermeta',
                            condition="user='******'username'] + "'")[0]
                    except:
                        print('error getting usermeta for username ' +
                              post['username'])
                        output[
                            'message'] += 'error getting usermeta for username ' + post[
                                'username']
                        user_meta_row = []
                        return

                    path_alias = ''
                    if not user_meta_row:
                        output[
                            'message'] += 'User meta entry not found. Attempting to create. '

                        # assign default database
                        default_database = accesskeywords[0]

                        output[
                            'message'] += 'Choosing pathalias from first in keywords: ' + default_database + '. '
                        if any(default_database == path_alias['alias']
                               for path_alias in path_aliases):
                            output[
                                'message'] += 'Verified that default alias exists in pathaliases database. '
                        else:
                            output[
                                'message'] += 'ERROR: first entry in keywords (' + default_database + ') not found in aliases. '

                        # Insert usermeta entry. This should never happen.
                        safe_database.insert(
                            'usermeta', {
                                'user': post['username'],
                                'data': 'pathalias:' + default_database
                            })
                        path_alias = default_database
                    else:
                        output[
                            'message'] += 'User meta entry found with text ' + str(
                                user_meta_row) + '. '

                        # Parse the string into json and ensure that the pathalias is in there
                        user_meta_dict = datalib.parseoptions(
                            user_meta_row['data'])
                        if 'pathalias' in user_meta_dict:
                            path_alias = user_meta_dict['pathalias']
                            output[
                                'message'] += 'pathalias found: ' + user_meta_dict[
                                    'pathalias'] + '. '

                            if any(path_alias == stored_path_alias['alias']
                                   for stored_path_alias in path_aliases):
                                output[
                                    'message'] += 'Verified that default alias exists in pathaliases database. '

                    if path_alias:
                        # reload datapaths with path alias
                        reload_message = inventorylib.reloaddatapaths(
                            pathalias=path_alias)

                        # DEFINITELY COMMENT THIS OUT FOR SECURITY SAKE (absolute paths are secret!!)
                        output['message'] += reload_message

                else:
                    # successful auth
                    output['message'] += 'Failed password check. '
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel': 0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True
    else:
        output['authorized'] = False

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''

    if output['authorized'] and action:
        output['action_allowed'] = inventorylib.check_action_auths(
            action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        # Stock functions
        if action == 'addeditpart':
            output['message'] += 'addpart keyword found. '
            inventorylib.addeditstockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'copypart':
            output['message'] += 'copypart keyword found. '
            inventorylib.copystockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'deleteparts':
            output['message'] += 'deleteparts keyword found. '
            inventorylib.deletestockparts(post, output)
            inventorylib.calcstockfromall()
        elif action == 'gettrackedpartdata':
            output['message'] += 'gettrackedpartdata keyword found. '
            output['data'] = inventorylib.calcstockfromall(
                **post)['trackedpart']
        elif action == 'generateorders':
            output['message'] += 'generate orders keyword found. '
            inventorylib.generateandaddorders()

        # Inventory functions
        # Edit and add are separated, as names are autogenerated
        elif action == 'editinventory':
            output['message'] += 'editinventory keyword found. '
            inventorylib.editinventory(post, output)
            inventorylib.calcstockfromall()
        elif action == 'addinventory':
            output['message'] += 'addinventory keyword found. '
            inventorylib.createnewinventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteinventories':
            output['message'] += 'deleteinventories keyword found. '
            inventorylib.deleteinventories(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditinventorypart':
            output['message'] += 'addeditinventorypart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfrominventory':
            output['message'] += 'deletepartsfrominventory keyword found. '
            inventorylib.deletepartsfrominventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()

        # Order functions
        elif action == 'editorder':
            output['message'] += 'editorder keyword found. '
            inventorylib.editorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addorder':
            output['message'] += 'addorder keyword found. '
            inventorylib.createneworder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteorders':
            output['message'] += 'deleteorders keyword found. '
            inventorylib.deleteorders(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderpart':
            output['message'] += 'addeditorderpart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderparts':
            output['message'] += 'addeditorderparts keyword found. '
            if 'partsdata' in post:
                post['partsdata'] = json.loads(post['partsdata'])
                inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfromorder':
            output['message'] += 'deletepartsfromorder keyword found. '
            inventorylib.deletepartsfromorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()

        # BOM functions
        elif action == 'copybom':
            output['message'] += 'copybom keyword found. '
            inventorylib.copybom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbom':
            output['message'] += 'addeditbom keyword found. '
            inventorylib.addeditbom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbomparts':
            output['message'] += 'addeditbomparts keyword found. '
            # Operate on partsdata
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makebommetadata()
        elif action == 'getbomcalcs':
            output['message'] += 'getbomcalcs keyword found. '
            inventorylib.calcbomprice(post, output)
        elif action == 'getquotecalcs':
            output['message'] += 'getquotecalcs keyword found. '
            output['message'] += 'function not written yet. '
            # inventorylib.calcbomprice(post, output)
        elif action == 'deletepartsfrombom':
            output['message'] += 'deletepartsfrombom keyword found. '
            inventorylib.deletepartsfrombom(post, output)
            inventorylib.makebommetadata()
        elif action == 'deleteboms':
            output['message'] += 'deleteboms keyword found. '
            inventorylib.deleteboms(post, output)
            inventorylib.makebommetadata()

        # Assembly functions
        elif action == 'copyassembly':
            output['message'] += 'copyassembly keyword found. '
            inventorylib.copyassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'copybomintoassembly':
            output['message'] += 'copybomintoassembly keyword found. '
            inventorylib.copybomintoassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassembly':
            output['message'] += 'addeditassembly keyword found. '
            inventorylib.addeditassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassemblyparts':
            output['message'] += 'addeditassemblypart keyword found. '
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        elif action == 'getassemblycalcs':
            output['message'] += 'getassemblycalcs keyword found. '
            inventorylib.calcassemblyprice(post, output)
        elif action == 'deletepartsfromassembly':
            output['message'] += 'deletepartsfromassembly keyword found. '
            inventorylib.deletepartsfromassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteassemblys':
            output['message'] += 'deleteassemblys keyword found. '
            inventorylib.deleteassemblies(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        # Quotes
        elif action == 'deletequotes':
            output['message'] += 'deletequotes keyword found. '
            inventorylib.deletequotes(post, output)
            inventorylib.makebommetadata(
                database=inventorylib.sysvars.dbs.quotes)
        elif action == 'copyquotetoboms':
            output['message'] += 'copyquotetoboms keyword found. '
            inventorylib.copyquotetoboms(post, output)
            inventorylib.makebommetadata()

        # Export functions

        elif action == 'exportbomtopdf':
            output['message'] += 'exportbomtopdf keyword found. '
            inventorylib.writepanelbomtopdf(post, output)

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            bom = inventorylib.sysvars.dbs.boms.read_table(post['name'])

            cleanbomname = post['name'].replace(' ', '_').replace(':', '_')
            filename = cleanbomname + '_' + cleantime
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(
                **{
                    'bomdata': bom,
                    'title': 'Bom generated from ' + post['name'] + ' ' +
                    cleantime,
                    'outputfile': outputroot + filename
                })

            output['data']['weblink'] = weblink

        elif action == 'exportassemblytopdf':
            output['message'] += 'exportassemblytopdf keyword found. '

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            assemblydata = inventorylib.sysvars.dbs.assemblies.read_table(
                post['name'])

            cleanname = post['name'].replace(' ', '_').replace(':', '_')
            filename = cleanname + '_' + cleantime + '.pdf'
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(
                **{
                    'bomdata': assemblydata,
                    'title': 'Bom generated from ' + post['name'] + ' ' +
                    thetime,
                    'format': 'picklist',
                    'outputfile': outputroot + filename
                })

            output['data'] = {'assemblydata': assemblydata}
            output['weblink'] = weblink

        # Panel builder
        elif action in ['panelcalcs', 'panelcalcsgenquote']:
            output['message'] += 'panelcalc keyword found. '
            import panelbuilder
            for key, value in post.items():
                # print(key, value)
                pass

            if 'paneldesc' in post:
                import json
                post['paneldesc'] = json.loads(post['paneldesc'])

            bomresults = panelbuilder.paneltobom(**post)

            output['data'] = {}
            # d needs to have a 'paneldesc' key with the panel spec data in it.
            output['data']['bomdescription'] = bomresults['bomdescription']
            output['data']['options'] = bomresults['options']
            output['data']['bomcalcs'] = inventorylib.calcbomprice(
                {'bomdictarray': bomresults['bom']})['data']
            output['message'] += bomresults['message']

            # We don't actually want to return the full boms by default. We don't want this in the client, and it's
            # lot of data anyway
            if 'returnfullboms' not in post:
                for option, value in output['data']['options'].items():
                    if 'bom' in value:
                        print('Deleting bom from option ' + str(option))

                        del output['data']['options'][option]['bom']
                    if 'flatbom' in value:
                        print('Deleting flatbom from option ' + str(option))
                        del output['data']['options'][option]['flatbom']

            if action == 'panelcalcsgenquote':
                thetime = datalib.gettimestring()
                cleantime = thetime.replace(' ', '_').replace(':', '_')
                outputroot = '/var/www/html/panelbuilder/data/downloads/'

                if 'paneltype' in post['paneldesc'] and post['paneldesc'][
                        'paneltype'] == 'brewpanel':
                    datedquotefilename = 'panelbuilder_brew_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_brew_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_brew_quote.pdf'
                    genericbomfilename = 'panelbuilder_brew_bom.pdf'
                elif 'paneltype' in post['paneldesc'] and post['paneldesc'][
                        'paneltype'] == 'temppanel':
                    datedquotefilename = 'panelbuilder_temp_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_temp_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_temp_quote.pdf'
                    genericbomfilename = 'panelbuilder_temp_bom.pdf'
                else:
                    datedquotefilename = 'panelbuilder_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_quote.pdf'
                    genericbomfilename = 'panelbuilder_bom.pdf'

                weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + datedquotefilename

                # until we can get this to format properly in the pdf, we are going to leave it generic
                # description = output['data']['bomdescription']
                description = 'Control panel quote generated by panelbuilder.'
                datedquotes = True

                # Create quote pdf from BOM
                if datedquotes:

                    inventorylib.writepanelquotetopdf(
                        **{
                            'bomdata':
                            bomresults['bom'],
                            'options':
                            bomresults['options'],
                            'title':
                            'Quote auto-generated by panelbuilder   \t\t' +
                            datalib.gettimestring(),
                            'price':
                            str(output['data']['bomcalcs']['totalprice']),
                            'outputfile':
                            outputroot + datedquotefilename,
                            'description':
                            description
                        })

                inventorylib.writepanelquotetopdf(
                    **{
                        'bomdata': bomresults['bom'],
                        'options': bomresults['options'],
                        'title': 'Quote auto-generated by panelbuilder ' +
                        thetime,
                        'price': output['data']['bomcalcs']['totalprice'],
                        'outputfile': outputroot + genericquotefilename
                    })

                # Create database entry BOM

                # Create table
                # print('** DATABASE')
                # print(panelbuilder.sysvars.dirs.dbs.quotes)

                bomname = 'quote_' + cleantime
                inventorylib.addeditbom(
                    {
                        'bomdata': {
                            'name': bomname
                        },
                        'database': panelbuilder.sysvars.dirs.dbs.quotes
                    }, output)
                # print('** BOM **')
                # print(bomresults['bom'])
                inserts = []
                for part in bomresults['bom']:
                    inserts.append(
                        dblib.makesqliteinsert(bomname,
                                               [part['partid'], part['qty']],
                                               ['partid', 'qty']))
                dblib.sqlitemultquery(inventorylib.sysvars.dirs.dbs.quotes,
                                      inserts)
                inventorylib.makebommetadata(
                    database=inventorylib.sysvars.dbs.quotes)

                # inventorylib.addeditpartlist(post, output)

                # Create pdfs

                if datedquotes:
                    inventorylib.writepanelbomtopdf(
                        **{
                            'bomdata': bomresults['bom'],
                            'options': bomresults['options'],
                            'title': 'Quote auto-generated by panelbuilder ' +
                            thetime,
                            'outputfile': outputroot + datedbomfilename
                        })

                inventorylib.writepanelbomtopdf(
                    **{
                        'bomdata': bomresults['bom'],
                        'title': 'panelbuilder BOM generated ' + thetime,
                        'outputfile': outputroot + genericbomfilename,
                        'totalprice': output['data']['bomcalcs']['totalprice']
                    })

                output['data']['quotelink'] = weblink
                from iiutilities.utility import gmail
                mymail = gmail(subject="Quote generated")
                mymail.message = 'Quote generated at ' + cleantime + '\r\n'

                if 'remote_ip' in output:
                    mymail.message = 'IP address ' + output[
                        'remote_ip'] + '\r\n'

                mymail.message += bomresults['bomdescription']
                mymail.recipient = '*****@*****.**'
                mymail.sender = 'II Panelbuilder'
                mymail.send()

        # Multi-use
        elif action == 'reloaditemdatafromstock':
            output['message'] += 'reloaditemdatafromstock keyword found. '
            inventorylib.refreshpartsfromstock(post, output)
            if 'bomname' in post:
                inventorylib.recalcpartdata(bomname=post['bomname'])
                inventorylib.makebommetadata()
            elif 'assemblyame' in post:
                inventorylib.recalcpartdata(assemblyname=post['assemblyname'])
                inventorylib.makeassemblymetadata()

        # Generic functions
        elif action == 'gettablenames':
            dbpath = inventorylib.dbnametopath(post['database'])
            try:
                output['data'] = dblib.gettablenames(dbpath)
            except:
                output['message'] += 'Error getting table names'
        elif action == 'switchtablerows':
            dbpath = inventorylib.dbnametopath(post['database'])
            dblib.switchtablerows(dbpath, post['tablename'], post['row1'],
                                  post['row2'], post['uniqueindex'])
        elif action == 'modwsgistatus':
            output['processgroup'] = repr(environ['mod_wsgi.process_group'])
            output['multithread'] = repr(environ['wsgi.multithread'])
        elif action == 'gettabledata':
            output['message'] += 'Gettabledata. '
            if 'database' in post:
                dbpath = inventorylib.dbnametopath(post['database'])
                if dbpath:
                    output['message'] += 'Friendly name ' + post[
                        'database'] + ' translated to path ' + dbpath + ' successfully. '

                    if 'tablenames' in post:  # Get multiple tables
                        output['message'] += 'Multiple tables. '
                        data = []
                        if 'start' in post:
                            fixedstart = int(post['start'])
                        else:
                            fixedstart = 0
                        if 'length' in post:
                            fixedlength = int(post['length'])
                        else:
                            fixedlength = 1
                        if 'lengths' in post:
                            lengths = map(int, post['lengths[]'])
                        else:
                            lengths = []
                        if 'starts' in post:
                            starts = map(int, post['starts'])
                        else:
                            starts = []

                        for index, table in enumerate(post['tablenames[]']):
                            try:
                                length = lengths[index]
                            except IndexError:
                                length = fixedlength
                            try:
                                start = starts[index]
                            except IndexError:
                                start = fixedstart

                            data.append(
                                dblib.dynamicsqliteread(
                                    dbpath, table, start, length))
                            output['data'] = data
                    elif 'length' in post:  # Handle table row subset
                        output['message'] += 'Length keyword. '
                        if not 'start' in post:
                            post['start'] = 0
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(
                            dbpath, post['tablename'], post['start'],
                            post['length'])
                        output['querytime'] = time() - thetime
                    elif 'row' in post:  # Handle table row
                        output['message'] += 'Row keyword. ' + str(post['row'])
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(
                            dbpath, post['tablename'], post['row'])
                        output['querytime'] = time() - thetime
                    elif 'tablename' in post:  # Handle entire table
                        output['message'] += 'Tablename keyword: ' + post[
                            'tablename'] + '. '
                        thetime = time()
                        if 'condition' in post:
                            if not post['condition'] == '':
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath,
                                    post['tablename'],
                                    condition=post['condition'])
                            else:
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath, post['tablename'])
                        else:
                            try:
                                output['data'] = dblib.dynamicsqliteread(
                                    dbpath, post['tablename'])
                            except:
                                output['message'] += 'Error retrieving data. '
                            else:
                                output[
                                    'message'] += 'Data query appears successful. '
                        output['querytime'] = time() - thetime
                else:
                    output['message'] += 'Friendly name ' + post[
                        'database'] + ' unsuccessfully translated. '
            else:
                output['message'] += 'No database present in action request'
        else:
            output[
                'message'] = 'no command matched for action "' + action + '"'
    else:
        # status = '403 Forbidden'
        output[
            'message'] += 'Not authorized for this action (or perhaps at all?) '

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(
                output['data']).encode('utf-8')).hexdigest()
            if 'etag' in post:
                if newetag == post['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag = ''
    else:
        newetag = ''

    if 'datasize' in post:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    # try:
    foutput = json.dumps(output, indent=1)
    # except:
    #     import csv
    #     w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
    #     for key, val in output.items():
    #         w.writerow([key, val])
    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag', newetag))
    start_response(status, response_headers)

    return foutput.encode('utf-8')
示例#25
0
def analyze_and_histo_access_db(dbpath=access_dbpath):
    from iiutilities import dblib
    from iiutilities import datalib

    tablename = 'access_log'
    access_db = dblib.sqliteDatabase(dbpath)
    access_db_tablenames = access_db.get_table_names()
    access_records = access_db.read_table(tablename)

    access_meta = {
        'total_hits': {},
        'remote_hits': {},
        'hourly_hits': {},
        'not_found': [],
        'dbpath': dbpath,
        'tablename': tablename
    }
    for record in access_records:
        analyze_access_entry(record)
        if not record['domain']:
            pass
            # print('no domain for entry')
            # print(record)
        if record['domain'] in access_meta['total_hits']:
            access_meta['total_hits'][record['domain']]['times'].append(
                record['time'])
        else:
            access_meta['total_hits'][record['domain']] = {
                'times': [record['time']]
            }

        if not record['local']:
            if record['domain'] in access_meta['remote_hits']:
                access_meta['remote_hits'][record['domain']]['times'].append(
                    record['time'])
            else:
                access_meta['remote_hits'][record['domain']] = {
                    'times': [record['time']]
                }

        if record['status'] == '404':
            access_meta['not_found'].append({
                'url': record['full_request'],
                'time': record['time']
            })

    # NOw process time resolved data into tables
    # this should be better iterate (DRY) but this works
    for domain_name, domain_data in access_meta['total_hits'].items():

        domain_data['times'].sort()

        # Find first time
        first_time = datalib.timestringtoseconds(domain_data['times'][0])

        # Go back to last incremental hour
        first_hour_time_seconds = first_time - first_time % 3600

        # Find last hour (this actually just means that all are within the hour following this)
        last_time = datalib.timestringtoseconds(domain_data['times'][-1])

        last_hour_time_seconds = last_time - last_time % 3600

        bin_times = []
        bin_values = []
        num_bins = int(last_hour_time_seconds -
                       first_hour_time_seconds) / 3600 + 1
        for i in range(num_bins):
            bin_times.append(first_hour_time_seconds + i * 3600)
            bin_values.append(0)

        for time in domain_data['times']:
            time_seconds = datalib.timestringtoseconds(time)
            for index, bin_time in enumerate(bin_times):
                if index == num_bins - 1 or time_seconds < bin_times[index +
                                                                     1]:
                    bin_values[index] += 1
                    break

        domain_data['histo_data'] = {}

        for bin_time, bin_value in zip(bin_times, bin_values):
            # Put time in middle of hour
            domain_data['histo_data'][datalib.gettimestring(bin_time +
                                                            1800)] = bin_value

    for domain_name, domain_data in access_meta['remote_hits'].items():

        domain_data['times'].sort()

        # Find first time
        first_time = datalib.timestringtoseconds(domain_data['times'][0])

        # Go back to last incremental hour
        first_hour_time_seconds = first_time - first_time % 3600

        # Find last hour (this actually just means that all are within the hour following this)
        last_time = datalib.timestringtoseconds(domain_data['times'][-1])

        last_hour_time_seconds = last_time - last_time % 3600

        bin_times = []
        bin_values = []
        num_bins = int(last_hour_time_seconds -
                       first_hour_time_seconds) / 3600 + 1
        for i in range(num_bins):
            bin_times.append(first_hour_time_seconds + i * 3600)
            bin_values.append(0)

        for time in domain_data['times']:
            time_seconds = datalib.timestringtoseconds(time)
            for index, bin_time in enumerate(bin_times):
                if index == num_bins - 1 or time_seconds < bin_times[index +
                                                                     1]:
                    bin_values[index] += 1
                    break

        domain_data['histo_data'] = {}

        for bin_time, bin_value in zip(bin_times, bin_values):
            # Put time in middle of hour
            domain_data['histo_data'][datalib.gettimestring(bin_time +
                                                            1800)] = bin_value

    if access_db.queued_queries:
        access_db.execute_queue()

    return access_meta
示例#26
0
def application(environ, start_response):
    import cgi
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    from cupid import pilib
    from iiutilities import dblib
    from time import time

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    try:
        post = json.loads(request_body.decode('utf-8'))
    except:
        print('Error decoding: ')
        print(request_body.decode('utf-8'))
        post = {}

    output = {'message': ''}

    status = '200 OK'
    # Run stuff as requested
    # We use the dynamic function to allow various
    # types of queries
    output['data'] = []
    output['message'] = ''

    wsgiauth = True
    authverified = False

    if wsgiauth:
        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in database.
        import hashlib

        safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.users)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            condition = "name='" + post['username'] + "'"
            user_data = safe_database.read_table_row('users',
                                                     condition=condition)[0]

            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row(
                    'users', condition=condition)[0]
            except:
                output[
                    'message'] += 'Error in user sqlite query for session user "' + post[
                        'username'] + '". '
                # output['message'] += 'Condition {}'.format(condition)
                output[
                    'message'] += 'Condition: ' + condition + '. Path: ' + pilib.dirs.dbs.safe
                user_data = {'accesskeywords': 'demo', 'admin': False}
            else:
                # Get session hpass to verify credentials
                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'])
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update(hashedname + pilib.salt + hashedpassword)
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True

                    # TODO: implement usermeta

    else:
        output['message'] += 'WSGI authorization not enabled. '
    if authverified or not wsgiauth:
        try:
            action = post['action']
        except KeyError:
            output['message'] = 'no action in request'
        else:
            if action == 'gettablenames':
                dbpath = pilib.dbnametopath(post['database'])
                try:
                    output['data'] = dblib.gettablenames(dbpath)
                except:
                    output['message'] += 'Error getting table names'
            elif action == 'modwsgistatus':
                output['processgroup'] = repr(
                    environ['mod_wsgi.process_group'])
                output['multithread'] = repr(environ['wsgi.multithread'])
            elif action == 'gettabledata':
                output['message'] += 'Gettabledata. '
                if 'database' in post:
                    dbpath = pilib.dbnametopath(post['database'])
                    if dbpath:
                        output['message'] += 'Friendly name ' + post[
                            'database'] + ' translated to path ' + dbpath + ' successfully. '

                        the_database = dblib.sqliteDatabase(dbpath)
                        if 'tablenames' in post:  # Get multiple tables
                            output['message'] += 'Multiple tables. '
                            data = []
                            if 'start' in post:
                                fixedstart = int(post['start'])
                            else:
                                fixedstart = 0
                            if 'length' in post:
                                fixedlength = int(post['length'])
                            else:
                                fixedlength = 0
                            if 'lengths' in post:
                                lengths = map(int, post['lengths[]'])
                            else:
                                lengths = []
                            if 'starts' in post:
                                starts = map(int, post['starts'])
                            else:
                                starts = []

                            for index, table in enumerate(post['tablenames']):
                                output[
                                    'message'] += 'Reading table {}. '.format(
                                        table)
                                try:
                                    length = lengths[index]
                                except IndexError:
                                    length = fixedlength
                                try:
                                    start = starts[index]
                                except IndexError:
                                    start = fixedstart
                                if not fixedlength:  # get all rows if length not specified
                                    db_data = the_database.read_table(table)
                                else:
                                    db_data = the_database.read_table_rows(
                                        table, start, length)
                                output[
                                    'message'] += 'Read {} rows of data. '.format(
                                        len(db_data))
                                data.append(db_data)
                                output['data'] = data

                        elif 'length' in post:  # Handle table row subset
                            output['message'] += 'Length keyword. '
                            if not 'start' in post:
                                post['start'] = 0
                            thetime = time()
                            output['data'] = the_database.read_table_rows(
                                post['tablename'], post['start'],
                                post['length'])
                            output['querytime'] = time() - thetime
                        elif 'row' in post:  # Handle table row
                            output['message'] += 'Row keyword. ' + str(
                                post['row'])
                            thetime = time()
                            output['data'] = the_database.read_table_rows(
                                post['tablename'], post['row'])
                            output['querytime'] = time() - thetime
                        elif 'tablename' in post:  # Handle entire table
                            output[
                                'message'] += 'Tablename keyword: {}. '.format(
                                    post['tablename'])
                            thetime = time()
                            if 'condition' in post:
                                if not post['condition'] == '':
                                    output[
                                        'message'] += 'Condition : "{}" .'.format(
                                            post['condition'])
                                    output['data'] = the_database.read_table(
                                        post['tablename'],
                                        condition=post['condition'])
                                else:
                                    output['data'] = the_database.read_table(
                                        post['tablename'])
                            else:
                                try:
                                    output['data'] = the_database.read_table(
                                        post['tablename'])
                                except:
                                    output[
                                        'message'] += 'Error retrieving data. '
                                else:
                                    output[
                                        'message'] += 'Data query appears successful. '
                                    # output['message'] += str(output['data'][0])
                            output['querytime'] = time() - thetime

                    else:
                        output['message'] += 'Friendly name ' + post[
                            'database'] + ' unsuccessfully translated. '
                else:
                    output[
                        'message'] += 'No database present in action request'

            elif action == 'get_archive_info':
                from iiutilities.utility import get_directory_listing
                directory_list = get_directory_listing(pilib.dirs.archive)
                output['data'] = {}
                output['data']['lognames'] = []
                for filename in directory_list['filenames']:
                    if filename[-3:] == '.db':
                        output['data']['lognames'].append(filename[:-3])
                    else:
                        directory_list['filenames'].remove(filename)

                output['data']['metadata'] = []
                output['message'] += 'Retrieved db logs {}. '.format(
                    directory_list['filenames'])
                for filename, logname in zip(directory_list['filenames'],
                                             output['data']['lognames']):

                    archive_db = dblib.sqliteDatabase(pilib.dirs.archive +
                                                      filename)
                    try:
                        metadata = archive_db.read_table('metadata')[0]
                    except:
                        output[
                            'message'] += 'Error retrieving metadata for log table {}. '.format(
                                filename)
                        output['data']['metadata'].append({})
                    else:
                        metadata['name'] = logname
                        output['data']['metadata'].append(metadata)

            else:
                output['message'] = 'no command matched for action ' + action
    else:
        output['message'] += 'Authentication unsuccessful'

    if output['data']:
        newetag = hashlib.md5(str(output['data'])).hexdigest()
        if 'etag' in post:
            if newetag == post['etag']:
                status = '304 Not Modified'
                output['data'] = ''
    else:
        newetag = ''

    if 'datasize' in post:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag

    try:
        foutput = json.dumps(output, indent=1)
    except:
        print('*** THERE WAS AN ERROR DECODING DATA ***')
        print(output)
        foutput = json.dumps({'message': 'Error in json dumps'})

    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag', newetag))
    start_response(status, response_headers)

    return [foutput]
示例#27
0
def run_data_agent(**kwargs):

    from iiutilities.datalib import gettimestring, timestringtoseconds

    settings = {
        'debug':False,
        'agent_db_path':'/var/www/data/dataagent.db',
        'inputs_db_path':'/var/www/data/control.db',
        'inputs_table':'inputs',
        'send_all':False
    }
    settings.update(kwargs)

    data_agent_db = dblib.sqliteDatabase(settings['agent_db_path'])
    inputs_db = dblib.sqliteDatabase(settings['inputs_db_path'])

    # get data_agent items
    data_agent_entries = data_agent_db.read_table('send_items')

    inputs = inputs_db.read_table('inputs')
    inputs_dict = {}
    for input in inputs:
        inputs_dict[input['id']] = input

    current_time = gettimestring()

    """ 
    Loop through to find things that definitely need to be transmitted. 
    Also, find if there are things that should be transmitted within a fixed window (bunch_period)
    If we are going to transmit anyway, attach these items. This way if we barely miss a transmit event, we will
    still send it and not waste data on two sets of headers.
    """

    """ 
    Data has following format:
    post_data = 
    {
      'post_time':current_time,
      'data': [
        {
          id : data_id,
          name : common name (optional)
          data : [
            data entry,
            data entry,
            ...
        }
      ],
      ...
    }
    """
    post_data = {
        'post_time': current_time,
        'data': []
    }
    maybe_xmit = []

    for agent_entry in data_agent_entries:
        if agent_entry['enabled']:
            if settings['debug']:
                print('{} Enabled '.format(agent_entry['id']))
            options = json.loads(agent_entry['options'])

            da_vars.default_agent_item_options.update(options)
            options = da_vars.default_agent_item_options

            # TODO: Build in other modes besides single.
            # Build in modularity for other ordinates.

            # Create the entry
            if agent_entry['id'] not in inputs_dict:
                if settings['debug']:
                    print('input id {} not found '.format(agent_entry['id']))
                continue

            inputs_entry = inputs_dict[agent_entry['id']]

            send_entry = {
                'id': agent_entry['id']
            }
            if 'name' in inputs_dict[agent_entry['id']]:
                send_entry['name'] = inputs_entry['name']

            if options['full_entry']:
                send_entry['data'] = [inputs_entry]
            else:
                send_entry['data'] = [{'id': agent_entry['id'], 'polltime':inputs_entry['polltime'],
                                   'value': inputs_entry['value']}]

            send = False
            maybe_send = False
            if not agent_entry['last_transmit'] or settings['send_all']:
                send = True
            else:
                elapsed_since_xmit = timestringtoseconds(current_time) - timestringtoseconds(agent_entry['last_transmit'])
                if elapsed_since_xmit > options['transmit_period']:
                    send = True
                elif (elapsed_since_xmit + options['bunch_period']) > options['transmit_period']:
                    maybe_send = True

        else:
            if settings['debug']:
                print('{} Disabled '.format(agent_entry['id']))

        if send:
            if settings['debug']:
                print('Sending "{}"'.format(agent_entry['id']))
            post_data['data'].append(send_entry)

        elif maybe_send:
            if settings['debug']:
                print('Sending "{}"'.format(agent_entry['id']))
            maybe_send.append(send_entry)

        else:
            if settings['debug']:
                print('Not sending {}'.format(agent_entry['id']))
    """
    Now determine whether we have data that definitely needs to be sent. If so, throw the bunch data in.
    """

    if post_data['data']:
        post_data['data'].extend(maybe_xmit)
        if settings['debug']:
            print('TIME TO SEND THIS STUFF')
            print(post_data)

    try:
        response = post_client_data(**{'post_data':post_data})
    except:

        import traceback
        trace_message = traceback.format_exc()
        if settings['debug']:
            print('Error, traceback: \n{}'.format(trace_message))
        return {'status':1, 'message':trace_message}
    else:
        if settings['debug']:
            print('SUCCESS')

        # Now we need to mark entries as sent
        for entry in post_data['data']:
            data_agent_db.set_single_value('send_items', 'last_transmit', current_time, condition="id='{}'".format(entry['id']), queue=True)

        data_agent_db.execute_queue()

    return response
示例#28
0
def application(environ, start_response):
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import inventorylib
    from iiutilities import dblib, datalib
    from time import time

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    try:
        post = json.loads(request_body.decode('utf-8'))
    except:
        print('Error decoding: ')
        print(request_body.decode('utf-8'))
        post = {}

    output = {'message': ''}
    status = '200 OK'

    try:
        try:
            output['remote_ip'] = environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip()
        except KeyError:
            output['remote_ip'] = environ['REMOTE_ADDR']
    except:
        output['remote_ip'] = 'Error getting IP address'


    """
    Here we verify credentials of session data against those in the database.
    While we authenticate in the browser, this does not stop POST queries to the API without the page provided
    So we take the hpass stored in the dictionary and verify.

    * Which databases are available are stored in users table, column accesskeywords
    * Which one is currently in use is stored in table usermeta, data where user=username. data is json-encoded metadata
        pathalias field

    * What path extension this corresponds to is stored in pathaliases

    """

    # I dont' think this will be used. We will get pathalias from database. Let's deal with changing it later.

    # First, let's get our pathalias and translate to a path, using our path reloader

    # if 'pathalias' in post:
    #     output['message'] += inventorylib.reloaddatapaths(pathalias=post['pathalias'])
    # else:
    #     output['message'] += 'No pathalias found in postictionary. '

    wsgiauth = True
    authverified = False

    if wsgiauth:

        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in postatabase.
        import hashlib

        safe_database = dblib.sqliteDatabase(inventorylib.sysvars.dirs.dbs.safe)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row('users', condition=condition)[0]
            except:
                output['message'] += 'error in user sqlite query for session user "' + post['username'] + '". '
                user_data = {'accesskeywords':'demo','admin':False}
            else:
                # Get session hpass to verify credentials

                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'].encode('utf-8'))
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update((hashedname + inventorylib.sysvars.salt + hashedpassword).encode('utf-8'))
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True
                    # output['message'] += 'accesskeywords : ' + str(userdata)
                    output['accesskeywords'] = user_data['accesskeywords']
                    if output['accesskeywords'].find(',') >= 0:
                        accesskeywords = output['accesskeywords'].split(',')
                        accesskeywords = [accesskeyword.strip() for accesskeyword in accesskeywords]
                    else:
                        accesskeywords = output['accesskeywords'].strip()

                    path_aliases = safe_database.read_table('pathaliases')

                    # Find usermeta entry and grab which database is selected. If one is not selected, update selection
                    # to first that user is allowed to access
                    try:
                        user_meta_row = safe_database.read_table_row('usermeta', condition="user='******'username'] + "'")[0]
                    except:
                        print('error getting usermeta for username ' + post['username'])
                        output['message'] += 'error getting usermeta for username ' + post['username']
                        user_meta_row = []
                        return

                    path_alias = ''
                    if not user_meta_row:
                        output['message'] += 'User meta entry not found. Attempting to create. '

                        # assign default database
                        default_database = accesskeywords[0]

                        output['message'] += 'Choosing pathalias from first in keywords: ' + default_database + '. '
                        if any(default_database == path_alias['alias'] for path_alias in path_aliases):
                            output['message'] += 'Verified that default alias exists in pathaliases database. '
                        else:
                            output['message'] += 'ERROR: first entry in keywords (' +default_database + ') not found in aliases. '

                        # Insert usermeta entry. This should never happen.
                        safe_database.insert('usermeta', {'user':post['username'], 'data':'pathalias:' + default_database})
                        path_alias = default_database
                    else:
                        output['message'] += 'User meta entry found with text ' + str(user_meta_row) + '. '

                        # Parse the string into json and ensure that the pathalias is in there
                        user_meta_dict = datalib.parseoptions(user_meta_row['data'])
                        if 'pathalias' in user_meta_dict:
                            path_alias = user_meta_dict['pathalias']
                            output['message'] += 'pathalias found: ' + user_meta_dict['pathalias'] + '. '

                            if any(path_alias == stored_path_alias['alias'] for stored_path_alias in path_aliases):
                                output['message'] += 'Verified that default alias exists in pathaliases database. '

                    if path_alias:
                        # reload datapaths with path alias
                        reload_message = inventorylib.reloaddatapaths(pathalias=path_alias)

                        # DEFINITELY COMMENT THIS OUT FOR SECURITY SAKE (absolute paths are secret!!)
                        output['message'] += reload_message

                else:
                    # successful auth
                    output['message'] += 'Failed password check. '
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel':0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True
    else:
        output['authorized'] = False

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''

    if output['authorized'] and action:
        output['action_allowed'] = inventorylib.check_action_auths(action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        # Stock functions
        if action == 'addeditpart':
            output['message'] += 'addpart keyword found. '
            inventorylib.addeditstockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'copypart':
            output['message'] += 'copypart keyword found. '
            inventorylib.copystockpart(post, output)
            inventorylib.calcstockfromall()
        elif action == 'deleteparts':
            output['message'] += 'deleteparts keyword found. '
            inventorylib.deletestockparts(post, output)
            inventorylib.calcstockfromall()
        elif action == 'gettrackedpartdata':
            output['message'] += 'gettrackedpartdata keyword found. '
            output['data'] = inventorylib.calcstockfromall(**post)['trackedpart']
        elif action =='generateorders':
            output['message'] += 'generate orders keyword found. '
            inventorylib.generateandaddorders()

        # Inventory functions
        # Edit and add are separated, as names are autogenerated
        elif action == 'editinventory':
            output['message'] += 'editinventory keyword found. '
            inventorylib.editinventory(post, output)
            inventorylib.calcstockfromall()
        elif action == 'addinventory':
            output['message'] += 'addinventory keyword found. '
            inventorylib.createnewinventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteinventories':
            output['message'] += 'deleteinventories keyword found. '
            inventorylib.deleteinventories(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditinventorypart':
            output['message'] += 'addeditinventorypart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfrominventory':
            output['message'] += 'deletepartsfrominventory keyword found. '
            inventorylib.deletepartsfrominventory(post, output)
            inventorylib.makeinventorymetadata()
            inventorylib.calcstockfromall()

        # Order functions
        elif action == 'editorder':
            output['message'] += 'editorder keyword found. '
            inventorylib.editorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addorder':
            output['message'] += 'addorder keyword found. '
            inventorylib.createneworder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteorders':
            output['message'] += 'deleteorders keyword found. '
            inventorylib.deleteorders(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderpart':
            output['message'] += 'addeditorderpart keyword found. '
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditorderparts':
            output['message'] += 'addeditorderparts keyword found. '
            if 'partsdata' in post:
                post['partsdata'] = json.loads(post['partsdata'])
                inventorylib.addeditpartlist(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()
        elif action == 'deletepartsfromorder':
            output['message'] += 'deletepartsfromorder keyword found. '
            inventorylib.deletepartsfromorder(post, output)
            inventorylib.makeordermetadata()
            inventorylib.calcstockfromall()

        # BOM functions
        elif action == 'copybom':
            output['message'] += 'copybom keyword found. '
            inventorylib.copybom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbom':
            output['message'] += 'addeditbom keyword found. '
            inventorylib.addeditbom(post, output)
            inventorylib.makebommetadata()
        elif action == 'addeditbomparts':
            output['message'] += 'addeditbomparts keyword found. '
            # Operate on partsdata
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makebommetadata()
        elif action == 'getbomcalcs':
            output['message'] += 'getbomcalcs keyword found. '
            inventorylib.calcbomprice(post, output)
        elif action == 'getquotecalcs':
            output['message'] += 'getquotecalcs keyword found. '
            output['message'] += 'function not written yet. '
            # inventorylib.calcbomprice(post, output)
        elif action == 'deletepartsfrombom':
            output['message'] += 'deletepartsfrombom keyword found. '
            inventorylib.deletepartsfrombom(post, output)
            inventorylib.makebommetadata()
        elif action == 'deleteboms':
            output['message'] += 'deleteboms keyword found. '
            inventorylib.deleteboms(post, output)
            inventorylib.makebommetadata()

        # Assembly functions
        elif action == 'copyassembly':
            output['message'] += 'copyassembly keyword found. '
            inventorylib.copyassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'copybomintoassembly':
            output['message'] += 'copybomintoassembly keyword found. '
            inventorylib.copybomintoassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassembly':
            output['message'] += 'addeditassembly keyword found. '
            inventorylib.addeditassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'addeditassemblyparts':
            output['message'] += 'addeditassemblypart keyword found. '
            post['partsdata'] = json.loads(post['partsdata'])
            inventorylib.addeditpartlist(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
            
        elif action == 'getassemblycalcs':
            output['message'] += 'getassemblycalcs keyword found. '
            inventorylib.calcassemblyprice(post, output)
        elif action == 'deletepartsfromassembly':
            output['message'] += 'deletepartsfromassembly keyword found. '
            inventorylib.deletepartsfromassembly(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()
        elif action == 'deleteassemblys':
            output['message'] += 'deleteassemblys keyword found. '
            inventorylib.deleteassemblies(post, output)
            inventorylib.makeassemblymetadata()
            inventorylib.calcstockfromall()

        # Quotes
        elif action == 'deletequotes':
            output['message'] += 'deletequotes keyword found. '
            inventorylib.deletequotes(post, output)
            inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes)
        elif action == 'copyquotetoboms':
            output['message'] += 'copyquotetoboms keyword found. '
            inventorylib.copyquotetoboms(post, output)
            inventorylib.makebommetadata()

        # Export functions

        elif action == 'exportbomtopdf':
            output['message'] += 'exportbomtopdf keyword found. '
            inventorylib.writepanelbomtopdf(post, output)

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            bom = inventorylib.sysvars.dbs.boms.read_table(post['name'])

            cleanbomname = post['name'].replace(' ','_').replace(':','_')
            filename = cleanbomname + '_' + cleantime
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(**{'bomdata': bom,
                                      'title': 'Bom generated from ' + post['name'] + ' ' + cleantime,
                                          'outputfile': outputroot + filename})

            output['data']['weblink'] = weblink

        elif action == 'exportassemblytopdf':
            output['message'] += 'exportassemblytopdf keyword found. '

            thetime = datalib.gettimestring()
            cleantime = thetime.replace(' ', '_').replace(':', '_')

            # Get bom from boms database
            assemblydata = inventorylib.sysvars.dbs.assemblies.read_table(post['name'])

            cleanname = post['name'].replace(' ','_').replace(':','_')
            filename = cleanname + '_' + cleantime + '.pdf'
            outputroot = '/var/www/html/panelbuilder/data/downloads/'

            weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + filename

            inventorylib.writepanelbomtopdf(**{'bomdata': assemblydata,
                                      'title': 'Bom generated from ' + post['name'] + ' ' + thetime,
                                          'format':'picklist','outputfile': outputroot + filename})

            output['data'] = {'assemblydata':assemblydata}
            output['weblink'] = weblink

        # Panel builder
        elif action in ['panelcalcs', 'panelcalcsgenquote']:
            output['message'] += 'panelcalc keyword found. '
            import panelbuilder
            for key,value in post.items():
                # print(key, value)
                pass

            if 'paneldesc' in post:
                import json
                post['paneldesc'] = json.loads(post['paneldesc'])

            bomresults = panelbuilder.paneltobom(**post)

            output['data'] = {}
            # d needs to have a 'paneldesc' key with the panel spec data in it.
            output['data']['bomdescription'] = bomresults['bomdescription']
            output['data']['options'] = bomresults['options']
            output['data']['bomcalcs'] = inventorylib.calcbomprice({'bomdictarray':bomresults['bom']})['data']
            output['message'] += bomresults['message']

            # We don't actually want to return the full boms by default. We don't want this in the client, and it's
            # lot of data anyway
            if 'returnfullboms' not in post:
                for option, value in output['data']['options'].items():
                    if 'bom' in value:
                        print('Deleting bom from option ' + str(option))

                        del output['data']['options'][option]['bom']
                    if 'flatbom' in value:
                        print('Deleting flatbom from option ' + str(option))
                        del output['data']['options'][option]['flatbom']

            if action == 'panelcalcsgenquote':
                thetime = datalib.gettimestring()
                cleantime = thetime.replace(' ','_').replace(':','_')
                outputroot = '/var/www/html/panelbuilder/data/downloads/'

                if 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'brewpanel':
                    datedquotefilename = 'panelbuilder_brew_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_brew_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_brew_quote.pdf'
                    genericbomfilename = 'panelbuilder_brew_bom.pdf'
                elif 'paneltype' in post['paneldesc'] and post['paneldesc']['paneltype'] == 'temppanel':
                    datedquotefilename = 'panelbuilder_temp_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_temp_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_temp_quote.pdf'
                    genericbomfilename = 'panelbuilder_temp_bom.pdf'
                else:
                    datedquotefilename = 'panelbuilder_quote_' + cleantime + '.pdf'
                    datedbomfilename = 'panelbuilder_bom_' + cleantime + '.pdf'
                    genericquotefilename = 'panelbuilder_quote.pdf'
                    genericbomfilename = 'panelbuilder_bom.pdf'

                weblink = 'https://panelbuilder.interfaceinnovations.org/data/downloads/' + datedquotefilename

                # until we can get this to format properly in the pdf, we are going to leave it generic
                # description = output['data']['bomdescription']
                description = 'Control panel quote generated by panelbuilder.'
                datedquotes = True

                # Create quote pdf from BOM
                if datedquotes:

                    inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder   \t\t' +
                        datalib.gettimestring(), 'price': str(output['data']['bomcalcs']['totalprice']),
                    'outputfile': outputroot + datedquotefilename, 'description':description})

                inventorylib.writepanelquotetopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder '+ thetime,
                       'price': output['data']['bomcalcs']['totalprice'], 'outputfile':outputroot + genericquotefilename})

                # Create database entry BOM

                # Create table
                # print('** DATABASE')
                # print(panelbuilder.sysvars.dirs.dbs.quotes)

                bomname = 'quote_' + cleantime
                inventorylib.addeditbom({'bomdata':{'name':bomname}, 'database':panelbuilder.sysvars.dirs.dbs.quotes}, output)
                # print('** BOM **')
                # print(bomresults['bom'])
                inserts = []
                for part in bomresults['bom']:
                    inserts.append(dblib.makesqliteinsert(bomname, [part['partid'],part['qty']], ['partid','qty']))
                dblib.sqlitemultquery(inventorylib.sysvars.dirs.dbs.quotes, inserts)
                inventorylib.makebommetadata(database=inventorylib.sysvars.dbs.quotes)

                # inventorylib.addeditpartlist(post, output)


                # Create pdfs

                if datedquotes:
                    inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'options': bomresults['options'],
                        'title':'Quote auto-generated by panelbuilder '
                      + thetime, 'outputfile': outputroot + datedbomfilename})

                inventorylib.writepanelbomtopdf(**{'bomdata': bomresults['bom'], 'title': 'panelbuilder BOM generated ' + thetime,
                                 'outputfile': outputroot + genericbomfilename, 'totalprice': output['data']['bomcalcs']['totalprice']})

                output['data']['quotelink'] = weblink
                from iiutilities.utility import gmail
                mymail = gmail(subject="Quote generated")
                mymail.message = 'Quote generated at ' + cleantime + '\r\n'

                if 'remote_ip' in output:
                    mymail.message = 'IP address ' + output['remote_ip'] + '\r\n'

                mymail.message += bomresults['bomdescription']
                mymail.recipient = '*****@*****.**'
                mymail.sender = 'II Panelbuilder'
                mymail.send()


        # Multi-use
        elif action == 'reloaditemdatafromstock':
            output['message'] += 'reloaditemdatafromstock keyword found. '
            inventorylib.refreshpartsfromstock(post, output)
            if 'bomname' in post:
                inventorylib.recalcpartdata(bomname=post['bomname'])
                inventorylib.makebommetadata()
            elif 'assemblyame' in post:
                inventorylib.recalcpartdata(assemblyname=post['assemblyname'])
                inventorylib.makeassemblymetadata()

        # Generic functions
        elif action == 'gettablenames':
            dbpath = inventorylib.dbnametopath(post['database'])
            try:
                output['data'] = dblib.gettablenames(dbpath)
            except:
                output['message'] += 'Error getting table names'
        elif action == 'switchtablerows':
            dbpath = inventorylib.dbnametopath(post['database'])
            dblib.switchtablerows(dbpath, post['tablename'], post['row1'], post['row2'], post['uniqueindex'])
        elif action == 'modwsgistatus':
            output['processgroup'] = repr(environ['mod_wsgi.process_group'])
            output['multithread'] = repr(environ['wsgi.multithread'])
        elif action == 'gettabledata':
            output['message']+='Gettabledata. '
            if 'database' in post:
                dbpath = inventorylib.dbnametopath(post['database'])
                if dbpath:
                    output['message'] += 'Friendly name ' + post['database'] + ' translated to path ' + dbpath + ' successfully. '

                    if 'tablenames' in post:  # Get multiple tables
                        output['message'] += 'Multiple tables. '
                        data = []
                        if 'start' in post:
                            fixedstart = int(post['start'])
                        else:
                            fixedstart = 0
                        if 'length' in post:
                            fixedlength = int(post['length'])
                        else:
                            fixedlength = 1
                        if 'lengths' in post:
                            lengths = map(int, post['lengths[]'])
                        else:
                            lengths = []
                        if 'starts' in post:
                            starts = map(int, post['starts'])
                        else:
                            starts = []

                        for index, table in enumerate(post['tablenames[]']):
                            try:
                                length = lengths[index]
                            except IndexError:
                                length = fixedlength
                            try:
                                start = starts[index]
                            except IndexError:
                                start = fixedstart

                            data.append(dblib.dynamicsqliteread(dbpath, table, start, length))
                            output['data']=data
                    elif 'length' in post:  # Handle table row subset
                        output['message']+='Length keyword. '
                        if not 'start' in post:
                            post['start'] = 0
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['start'], post['length'])
                        output['querytime'] = time() - thetime
                    elif 'row' in post:  # Handle table row
                        output['message'] += 'Row keyword. ' + str(post['row'])
                        thetime = time()
                        output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], post['row'])
                        output['querytime'] = time() - thetime
                    elif 'tablename' in post:  # Handle entire table
                        output['message'] += 'Tablename keyword: ' + post['tablename'] + '. '
                        thetime = time()
                        if 'condition' in post:
                            if not post['condition'] == '':
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'], condition=post['condition'])
                            else:
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'])
                        else:
                            try:
                                output['data'] = dblib.dynamicsqliteread(dbpath, post['tablename'])
                            except:
                                output['message'] += 'Error retrieving data. '
                            else:
                                output['message'] += 'Data query appears successful. '
                        output['querytime'] = time() - thetime
                else:
                    output['message'] += 'Friendly name ' + post['database'] + ' unsuccessfully translated. '
            else:
                output['message'] += 'No database present in action request'
        else:
            output['message'] = 'no command matched for action "' + action + '"'
    else:
        # status = '403 Forbidden'
        output['message'] += 'Not authorized for this action (or perhaps at all?) '

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(output['data']).encode('utf-8')).hexdigest()
            if 'etag' in post:
                if newetag == post['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag=''
    else:
        newetag=''

    if 'datasize' in post:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    # try:
    foutput = json.dumps(output, indent=1)
    # except:
    #     import csv
    #     w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
    #     for key, val in output.items():
    #         w.writerow([key, val])
    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag',newetag))
    start_response(status, response_headers)

    return foutput.encode('utf-8')
示例#29
0
def split_time_db(path, **kwargs):

    """
    This beast will go through an entire database and size every log with a well-formed time column
    """
    settings = {
        'division':'day',
        'timekey':'time'
    }
    settings.update(kwargs)

    from iiutilities import dblib
    from iiutilities.datalib import gettimestring
    import time

    database = dblib.sqliteDatabase(path)

    tablenames = database.get_table_names()
    # print(tablenames)
    all_dates = []

    """
    [timestring, timestring, timestring]
    """

    sorted_data = {}
    """
    {
    tableaame:
        {
        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
            [
            timekey:timestring, 'valuename':'value',
            timekey:timestring, 'valuename':'value'
            ]
        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
            [
            timekey:timestring, 'valuename':'value',
            timekey:timestring, 'valuename':'value'
            ]
        }
    }
    """

    for tablename in tablenames:
        raw_data = database.read_table(tablename)
        schema = database.get_schema(tablename)

        """
        [
        {timekey:'atimestring','value':'somevalue'},
        {timekey:'anothertimestring','value':'somevalue'}
        ]
        """

        if raw_data:
            if settings['timekey'] in raw_data[0]:
                """
                sorted_data[tablename] [=]
                {
                    'data':
                    {
                        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                        [
                            timekey:timestring, 'valuename':'value',
                            timekey:timestring, 'valuename':'value'
                        ]
                        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                        [
                            timekey:timestring, 'valuename':'value',
                            timekey:timestring, 'valuename':'value'
                        ]
                    }
                    'schema':sqliteTableSchema object
                }
                """

                # print(schema)
                sorted_data[tablename] = {'data':split_time_log(raw_data), 'schema':schema}
            else:
                print('Table ' + tablename + ' does not have a time column. Skipping. ')
                continue

            # add date tuples to master list
            for key in sorted_data[tablename]['data']:
                if key not in all_dates:
                    all_dates.append(key)

    data_by_date = {}

    """
    data_by_date [=]
    {
        timestruct:
        {
            datatablename:
            {
                'data':
                {
                    struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                    [
                        timekey:timestring, 'valuename':'value',
                    timekey:timestring, 'valuename':'value'
                    ]
                    struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                    [
                        timekey:timestring, 'valuename':'value',
                        timekey:timestring, 'valuename':'value'
                    ]
                },
                'schema':sqliteTableSchema object
            }
        }
    }
    """

    for date in all_dates:
        data_by_date[date] = {}
        for tablename in sorted_data:
            if date in sorted_data[tablename]['data']:
                data_by_date[date][tablename] = {'data':sorted_data[tablename]['data'][date],
                                                 'schema':sorted_data[tablename]['schema']}


    # print('dates: ' + str(len(all_dates)))
    return data_by_date
示例#30
0
def application(environ, start_response):
    import cgi
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import ii_netstats

    from iiutilities import dblib, datalib
    from iiutilities.utility import newunmangle
    from time import time

    # post_env = environ.copy()
    # post_env['QUERY_STRING'] = ''
    # post = cgi.FieldStorage(
    #     fp=environ['wsgi.input'],
    #     environ=post_env,
    #     keep_blank_values=True
    # )
    # formname = post.getvalue('name')
    #
    # output = {}
    #
    # d = {}
    # for k in post.keys():
    #     # print(k)
    #     d[k] = post.getvalue(k)

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    post = json.loads(request_body.decode('utf-8'))


    status = '200 OK'
    output = {'data': [], 'message': ''}

    d = post

    wsgiauth = False
    authverified = False

    if wsgiauth:
        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in database.
        import hashlib

        if 'sessionuser' in d:
            output['message'] += 'Session user is ' + d['sessionuser'] + '. '
        else:
            output['message'] += 'No session user found. '
            d['sessionuser'] = ''

        try:
            condition = "name='" + d['sessionuser'] + "'"
            userdata = dblib.readonedbrow(inventorylib.sysvars.dirs.dbs.safe, 'users', condition=condition)[0]
        except:
            output['message'] += 'error in user sqlite query for session user "' + d['sessionuser'] + '". '
            userdata = {'accesskeywords':'demo','admin':False}
        else:
            # Get session hpass to verify credentials
            hashedpassword = d['sessionhpass']
            hname = hashlib.new('sha1')
            hname.update(d['sessionuser'])
            hashedname = hname.hexdigest()
            hentry = hashlib.new('md5')
            hentry.update(hashedname + netstats.salt + hashedpassword)
            hashedentry = hentry.hexdigest()
            if hashedentry == userdata['password']:
                # successful auth
                output['message'] += 'Password verified. '
                authverified = True
    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        try:
            action = d['action']
        except KeyError:
            output['message'] = 'no action in request'
        else:
            # Stock functions
            if action == 'getnetstatsdata':
                output['message'] += 'getting netstats keyword found. '
                import datetime
                the_day = datetime.date.today()
                if 'day' in d:
                    # We will pass in a day in format yyyy-mm-dd or keywords, like 'today'
                    import datetime, time
                    today = datetime.date.today()
                    if d['day'] == 'today':
                        pass
                    elif d['day'] == 'prev_day':
                        the_day = today - datetime.timedelta(days=1)
                    elif d['day'] == 'prev_2_day':
                        the_day = today - datetime.timedelta(days=2)
                    elif d['day'] == 'prev_3_day':
                        the_day = today - datetime.timedelta(days=3)
                    elif d['day'] == 'prev_4_day':
                        the_day = today - datetime.timedelta(days=4)

                if the_day == datetime.date.today():
                    db_path = ii_netstats.netstats_dbpath
                else:
                    db_path_root = ii_netstats.netstats_dbpath.split('.db')[0]
                    date_string = '{}-{:02d}-{:02d}'.format(the_day.year, the_day.month, the_day.day)
                    db_path = '{}_{}.db'.format(db_path_root, date_string)

                print('** DBPATH: {} '.format(db_path))
                netstats_db = dblib.sqliteDatabase(db_path)

                output['message'] += 'db path {} chosen. '.format(db_path)

                wired_history =netstats_db.read_table('wired')
                if 'dataperiod' in d:
                    output['message'] += 'Limiting returned time to ' + d['dataperiod'] + '. '
                    # default 6hrs
                    period = 6 * 3600
                    if d['dataperiod'] == '6_hrs':
                        period = 6 * 3600
                    elif d['dataperiod'] == '12_hrs':
                        period = 12 * 3600
                    elif d['dataperiod'] == '24_hrs':
                        period = 24 * 3600
                    elif d['dataperiod'] == '48_hrs':
                        period = 48 * 3600
                    elif d['dataperiod'] == '7_days':
                        period = 7 * 24 * 3600

                    unmodified_length = len(wired_history)

                    # return only data within last period
                    from operator import itemgetter
                    from iiutilities.datalib import timestringtoseconds
                    new_list = sorted(wired_history, key=itemgetter('time'), reverse=True)

                    output['message'] += 'Most recent data point: ' + new_list[0]['time'] + '. '
                    new_history = []
                    most_recent_time_in_seconds = timestringtoseconds(new_list[0]['time'])
                    output['message'] += 'Most recent time in seconds ' + str(most_recent_time_in_seconds) + '. '

                    output['message'] += 'Oldest time in seconds ' + str(timestringtoseconds(new_list[-1]['time']))
                    output['message'] += 'Span of ' + str(most_recent_time_in_seconds - timestringtoseconds(new_list[-1]['time']))  + '. '
                    output['message'] += 'Period of ' + str(period) + '. '


                    for item in new_list:
                        if most_recent_time_in_seconds - timestringtoseconds(item['time']) < period:
                            new_history.append(item)
                    output['data'] = new_history
                    modified_length = len(wired_history)

                    output['message'] += 'Shortened data from ' + str(unmodified_length) + ' to ' + str(modified_length)
                else:
                    output['data'] = wired_history
                try:
                    from urllib2 import urlopen
                    my_ip = urlopen('http://ip.42.pl/raw').read()
                except:
                    my_ip = 'unknown'
                output['host'] = my_ip
            elif action == 'gettraffichistodata':
                output['message'] += 'gettraffic histo keyword found. '
                access_db = dblib.sqliteDatabase(ii_netstats.access_dbpath)

                access_db_tablenames = access_db.get_table_names()
                # output['message'] += 'Tables to search through: ' + str(access_db_tablenames) + '. '

                tables_to_fetch = []
                for tablename in access_db_tablenames:
                    if tablename.find('remotehisto') >= 0 or tablename.find('metadata') >= 0:
                        tables_to_fetch.append(tablename)
                # output['message'] += 'Fetching tables ' + str(tables_to_fetch) + '. '
                output['data'] = {}
                for table_to_fetch in tables_to_fetch:
                    output['data'][table_to_fetch] = access_db.read_table(table_to_fetch)

            elif action == 'postwirelessdata':
                output['message'] += 'postwirelessdata keyword found. '

                # nothing here yet

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(output['data'])).hexdigest()
            if 'etag' in d:
                if newetag == d['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag=''
    else:
        newetag=''

    if 'datasize' in d:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    try:
        foutput = json.dumps(output, indent=1)
    except:
        import csv
        w = csv.writer(open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
        for key, val in output.items():
            w.writerow([key, val])

    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag',newetag))
    start_response(status, response_headers)

    return [foutput]
示例#31
0
def insertuser(database, username, password, salt, **kwargs):

    from iiutilities import dblib, datalib
    settings = {
        'schema':
        dblib.sqliteTableSchema([{
            'name': 'id',
            'type': 'integer',
            'primary': True
        }, {
            'name': 'name',
            'unique': True
        }, {
            'name': 'password'
        }, {
            'name': 'accesskeywords'
        }, {
            'name': 'admin'
        }, {
            'name': 'email'
        }, {
            'name': 'temp'
        }, {
            'name': 'authlevel',
            'type': 'integer',
            'default': 0
        }])
    }
    settings.update(kwargs)

    this_database = dblib.sqliteDatabase(database)

    entry = {
        'name': username,
        'password': password,
        'email': '',
        'accesskeywords': '',
        'authlevel': 1,
        'temp': '',
        'admin': 0
    }
    entry.update(kwargs)

    # entries = [{'name': 'creese', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'iiinventory,demo', 'authlevel': 5, 'temp': '', 'admin': 1},
    #            {'name': 'iwalker', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 4, 'temp': '', 'admin': 0},
    #            {'name': 'demo', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 2, 'temp': '', 'admin': 0},
    #            {'name': 'mbertram', 'password': '******', 'email': '*****@*****.**',
    #             'accesskeywords': 'demo', 'authlevel': 2, 'temp': '', 'admin': 0}]

    existingentries = this_database.read_table('users')

    usercount = len(existingentries)
    existingindices = [
        existingentry['id'] for existingentry in existingentries
    ]
    existingnames = [existingentry['id'] for existingentry in existingentries]

    print('EXISTING ENTRIES:')
    print(existingentries)

    newindex = usercount + 1
    while newindex in existingindices:
        newindex += 1

    hashedentry = datalib.gethashedentry(entry['name'],
                                         entry['password'],
                                         salt=salt)

    this_database.create_table('users',
                               schema=settings['schema'],
                               queue=True,
                               migrate=True)

    # insert = {'id':newindex, 'name':entry['name'], 'password':hashedentry, 'email':entry['email']}
    entry['id'] = newindex
    entry['password'] = hashedentry
    this_database.insert('users', entry, queue=True)

    this_database.execute_queue()
示例#32
0
def split_time_db(path, **kwargs):
    """
    This beast will go through an entire database and size every log with a well-formed time column
    """
    settings = {'division': 'day', 'timekey': 'time'}
    settings.update(kwargs)

    from iiutilities import dblib
    from iiutilities.datalib import gettimestring
    import time

    database = dblib.sqliteDatabase(path)

    tablenames = database.get_table_names()
    # print(tablenames)
    all_dates = []
    """
    [timestring, timestring, timestring]
    """

    sorted_data = {}
    """
    {
    tableaame:
        {
        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
            [
            timekey:timestring, 'valuename':'value',
            timekey:timestring, 'valuename':'value'
            ]
        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
            [
            timekey:timestring, 'valuename':'value',
            timekey:timestring, 'valuename':'value'
            ]
        }
    }
    """

    for tablename in tablenames:
        raw_data = database.read_table(tablename)
        schema = database.get_schema(tablename)
        """
        [
        {timekey:'atimestring','value':'somevalue'},
        {timekey:'anothertimestring','value':'somevalue'}
        ]
        """

        if raw_data:
            if settings['timekey'] in raw_data[0]:
                """
                sorted_data[tablename] [=]
                {
                    'data':
                    {
                        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                        [
                            timekey:timestring, 'valuename':'value',
                            timekey:timestring, 'valuename':'value'
                        ]
                        struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                        [
                            timekey:timestring, 'valuename':'value',
                            timekey:timestring, 'valuename':'value'
                        ]
                    }
                    'schema':sqliteTableSchema object
                }
                """

                # print(schema)
                sorted_data[tablename] = {
                    'data': split_time_log(raw_data),
                    'schema': schema
                }
            else:
                print('Table ' + tablename +
                      ' does not have a time column. Skipping. ')
                continue

            # add date tuples to master list
            for key in sorted_data[tablename]['data']:
                if key not in all_dates:
                    all_dates.append(key)

    data_by_date = {}
    """
    data_by_date [=]
    {
        timestruct:
        {
            datatablename:
            {
                'data':
                {
                    struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                    [
                        timekey:timestring, 'valuename':'value',
                    timekey:timestring, 'valuename':'value'
                    ]
                    struct_time(the_time.tm_year, the_time.tm_mon, the_time.tm_mday, ....):
                    [
                        timekey:timestring, 'valuename':'value',
                        timekey:timestring, 'valuename':'value'
                    ]
                },
                'schema':sqliteTableSchema object
            }
        }
    }
    """

    for date in all_dates:
        data_by_date[date] = {}
        for tablename in sorted_data:
            if date in sorted_data[tablename]['data']:
                data_by_date[date][tablename] = {
                    'data': sorted_data[tablename]['data'][date],
                    'schema': sorted_data[tablename]['schema']
                }

    # print('dates: ' + str(len(all_dates)))
    return data_by_date
示例#33
0
def application(environ, start_response):

    import cgi
    import json

    import os, sys, inspect

    # Set top folder to allow import of modules

    top_folder = os.path.split(
        os.path.realpath(
            os.path.abspath(
                os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    from cupid import pilib, controllib
    from iiutilities import dblib, utility, datalib

    # post_env = environ.copy()
    # post_env['QUERY_STRING'] = ''
    # post = cgi.FieldStorage(
    #     fp=environ['wsgi.input'],
    #     environ=post_env,
    #     keep_blank_values=True
    # )
    #
    # formname=post.getvalue('name')
    # output = {}
    # output['message'] = 'Output Message: '
    # for k in post.keys():
    #     d[k] = post.getvalue(k)

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    post = json.loads(request_body.decode('utf-8'))

    output = {}
    output['message'] = ''

    status = '200 OK'
    wsgiauth = True
    authverified = False

    if wsgiauth:
        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in database.
        import hashlib

        safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.users)
        if 'username' in post and post['username']:
            output['message'] += 'Session user is ' + post['username'] + '. '
        else:
            output['message'] += 'No session user found. '
            post['username'] = ''

        if post['username']:
            try:
                condition = "name='" + post['username'] + "'"
                user_data = safe_database.read_table_row(
                    'users', condition=condition)[0]
            except:
                output[
                    'message'] += 'Error in user sqlite query for session user "' + post[
                        'username'] + '". '
                output[
                    'message'] += 'Condition: ' + condition + '. Path: ' + pilib.dirs.dbs.safe
                user_data = {'accesskeywords': 'demo', 'admin': False}
            else:
                # Get session hpass to verify credentials
                hashedpassword = post['hpass']
                hname = hashlib.new('sha1')
                hname.update(post['username'])
                hashedname = hname.hexdigest()
                hentry = hashlib.new('md5')
                hentry.update(hashedname + pilib.salt + hashedpassword)
                hashedentry = hentry.hexdigest()
                if hashedentry == user_data['password']:
                    # successful auth
                    output['message'] += 'Password verified. '
                    authverified = True

                    # TODO: implement usermeta
        else:
            # Demo status
            authverified = True
            user_data = {'authlevel': 0}

    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        output['authorized'] = True

    try:
        action = post['action']
    except KeyError:
        output['message'] = 'no action in request'
        action = ''
    else:
        output['message'] += '{} action keyword found'.format(action)

    if output['authorized'] and action:
        output['action_allowed'] = pilib.check_action_auths(
            action, user_data['authlevel'])
    else:
        output['action_allowed'] = False

    if output['authorized'] and output['action_allowed']:

        output['message'] += 'Found action. '

        if action == 'testdbvn':
            from iiutilities.dblib import dbvntovalue
            try:
                output['data'] = dbvntovalue(post['dbvn'])
            except:
                output['message'] += 'Error in dbvn evaluation. '
                output['data'] = 'error'
            else:
                output['message'] += 'Seems to have worked out. '
        elif action == 'testlogical':
            from iiutilities.datalib import evaldbvnformula
            try:
                output['data'] = evaldbvnformula(post['logical'])
            except:
                output['message'] += 'Error in logical evaluation. '
                output['data'] = 'error'
            else:
                output['message'] += 'Seems to have worked out. '

        elif action == 'testmodule':
            output['message'] += 'Testing module: '
            if 'modulename' in post:
                import cupid.cupidunittests
                output['message'] += post['modulename']
                output['data'] = cupid.cupidunittests.testmodule(
                    post['modulename'])
            else:
                output['message'] += 'Modulename not found. '
        elif action == 'testfunction':
            output['message'] += 'Testing function: '
            if 'testname' in post:
                import cupid.cupidunittests
                output['message'] += post['testname']
                # output['data'] = cupid.tests.testfunction(d['testname'])
                output['data'] = cupid.cupidunittests.testfunction(
                    post['testname'])
                # output['data'] = str(cupid.tests.testfunction('systemstatus'))
            else:
                output['message'] += 'Testname not found. '

        elif action == 'modifychannelalarm':
            controllib.handle_modify_channel_alarm(post, output)
            from cupid.actions import processactions

            # process only this action.
            processactions(name=post['actionname'])

        elif action == 'modifychannel':
            controllib.handle_modify_channel(post, output)

        elif action == 'getalarmscount':
            control_db = dblib.sqliteDatabase(pilib.dirs.dbs.control)
            actions = control_db.read_table('actions')
            output['data'] = {
                'totalalarms': len(actions),
                'channelalarms': 0,
                'activealarms': 0,
                'activechannelalarms': 0
            }
            for action in actions:
                if action['conditiontype'] == 'channel':
                    output['data']['channelalarms'] += 1
                    if action['active']:
                        output['data']['activechannelalarms'] += 1

                if action['active']:
                    output['data']['activealarms'] += 1

        elif action == 'copy_log_to_archive':
            pilib.app_copy_log_to_archive(post, output)

        elif action == 'getlogscount':
            logtablenames = dblib.sqliteDatabase(
                pilib.dirs.dbs.log).get_table_names()
            output['data'] = {'logscount': len(logtablenames)}

        elif action == 'test_action':
            output['message'] += 'Testing action. '
            controldb = dblib.sqliteDatabase(pilib.dirs.dbs.control)
            actiondict = controldb.read_table('actions',
                                              condition='"name"=\'' +
                                              post['actionname'] + "'")[0]
            from cupid.actions import action
            test_action = action(actiondict)
            test_action.test()

        elif action == 'update_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            safe_database.set_single_value('wireless', 'password',
                                           post['password'],
                                           "SSID='" + post['ssid'] + "'")

        elif action == 'add_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            insert = {'SSID': post['ssid'], 'auto': 1, 'priority': 1}
            if 'password' in post:
                insert['password'] = post['password']
            safe_database.insert('wireless', insert)

        elif action == 'delete_network':
            safe_database = dblib.sqliteDatabase(pilib.dirs.dbs.safe)
            safe_database.delete('wireless', "SSID='" + post['ssid'] + "'")

        # elif action == 'dump':
        #     # this has to go.
        #     if 'database' in d:
        #         dbpath = pilib.dbnametopath(d['database'])
        #         if dbpath:
        #             if 'tablelist' in d and 'outputfile' in d:
        #                 dbpath = pilib.dbnametopath(d['database'])
        #                 dblib.sqlitedatadump(dbpath, d['tablelist'], d['outputfile'])
        #                 output['message'] = 'data dumped'
        #             elif 'tablename' in d and 'outputfile' in d:
        #                 dblib.sqlitedatadump(dbpath, [d['tablename']], d['outputfile'])
        #                 output['message'] = 'data dumped. '
        #             else:
        #                 output['message'] += 'keys not present for dump. '
        #         else:
        #             output['message'] += 'keys not present for dump. '
        #     else:
        #         output['message'] += 'keys not present for dump. '
        elif action in ['userdelete', 'useradd', 'usermodify']:
            """
            This needs to be consolidate with the other useradd, modify algorithm written already.
            Probably do this when we update the user permissions interface.
            """
            # Ensure that we are authorized for this action
            if action == 'userdelete':
                try:
                    dblib.sqlitequery(
                        pilib.dirs.dbs.users,
                        "delete from users where name='" +
                        post['usertodelete'] + "'")
                except:
                    output['message'] += 'Error in delete query. '
                else:
                    output['message'] += 'Successful delete query. '
            elif action == 'usermodify':

                if 'usertomodify' in post:
                    querylist = []
                    if 'newpass' in post:
                        from pilib import salt
                        # Get session hpass to verify credentials
                        hashedpassword = post['newpass']
                        hname = hashlib.new('sha1')
                        hname.update(post['usertomodify'])
                        hashedname = hname.hexdigest()
                        hentry = hashlib.new('md5')
                        hentry.update(hashedname + salt + hashedpassword)
                        hashedentry = hentry.hexdigest()
                        querylist.append('update users set password='******'" +
                                         post['usertomodify'] + "'")

                    if 'newemail' in post:
                        querylist.append("update users set email='" +
                                         post['newemail'] + "' where name='" +
                                         post['usertomodify'] + "'")
                    if 'newauthlevel' in post:
                        querylist.append("update users set authlevel='" +
                                         post['newauthlevel'] +
                                         "' where name='" +
                                         post['usertomodify'] + "'")

                    try:
                        dblib.sqlitemultquery(pilib.dirs.dbs.users, querylist)
                    except:
                        output[
                            'message'] += 'Error in modify/add query: ' + ",".join(
                                querylist)
                    else:
                        output[
                            'message'] += 'Successful modify/add query. ' + ",".join(
                                querylist)
                else:
                    output['message'] += 'Need usertomodify in query. '
            elif action == 'useradd':
                try:
                    username = post['newusername']
                except:
                    username = '******'
                try:
                    newemail = post['newemail']
                except:
                    newemail = '*****@*****.**'
                try:
                    newauthlevel = post['newauthlevel']
                except:
                    newauthlevel = 0
                    query = "insert into users values(NULL,'" + username + "','','" + newemail + "',''," + str(
                        newauthlevel) + ")"
                try:
                    dblib.sqlitequery(pilib.dirs.dbs.users, query)
                except:
                    output[
                        'message'] += "Error in useradd sqlite query: " + query + ' . '
                else:
                    output['message'] += "Successful query: " + query + ' . '
        elif action == 'getfiletext':
            try:
                filepath = post['filepath']
                if 'numlines' in post:
                    numlines = int(post['numlines'])
                else:
                    numlines = 9999
                output['message'] += 'Using numlines: ' + str(
                    numlines) + ' for read action. '
                if 'startposition' in post:
                    startposition = post['startposition']
                else:
                    startposition = 'end'
                output[
                    'message'] += 'Reading from position ' + startposition + '. '
            except KeyError:
                output[
                    'message'] += 'Sufficient keys for action getfile text do not exist. '
            except:
                output['message'] += 'Uncaught error in getfiletext. '
            else:
                try:
                    file = open(filepath)
                    lines = file.readlines()
                except:
                    output[
                        'message'] += 'Error reading file in getfiletext action. '
                else:
                    output['data'] = []
                    if startposition == 'end':
                        try:
                            output['data'] = datalib.tail(file, numlines)[0]
                        except:
                            output['message'] += 'Error in tail read. '
                    else:
                        linecount = 0
                        for line in lines:
                            linecount += 1
                            if linecount > numlines:
                                break
                            else:
                                output['data'].append(line)
        elif action == 'getmbtcpdata':
            try:
                clientIP = post['clientIP']
                register = post['register']
                length = post['length']
            except KeyError:
                output[
                    'message'] += 'Sufficient keys do not exist for the command. Requires clientIP, register, and length. '
            else:
                from iiutilities.netfun import readMBcodedaddresses
                # try:
                output['response'] = readMBcodedaddresses(
                    clientIP, int(register), int(length))
        elif action == 'queuemessage':
            output['message'] += 'Queue message. '
            if 'message' in post:
                try:
                    dblib.sqliteinsertsingle(
                        pilib.dirs.dbs.motes, 'queuedmessages',
                        [datalib.gettimestring(), post['message']])
                except:
                    import traceback
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    output[
                        'message'] += 'Error in queue insert query: {}. '.format(
                            traceback.format_exc())
                else:
                    output['message'] += 'Message insert successful'
            else:
                output['message'] += 'No message present. '

        elif action == 'setsystemflag' and 'systemflag' in post:
            database = pilib.dirs.dbs.system
            dblib.setsinglevalue(database, 'systemflags', 'value', 1,
                                 "name=\'" + post['systemflag'] + "'")
        elif action == 'rundaemon':
            from cupiddaemon import rundaemon
            rundaemon()

        # TODO: Eliminate this scary thing.
        elif action == 'setvalue':
            utility.log(pilib.dirs.logs.control, "Setting value in wsgi", 1, 1)

            # we use the auxiliary 'setsinglecontrolvalue' to add additional actions to update
            if all(k in post
                   for k in ('database', 'table', 'valuename', 'value')):
                dbpath = pilib.dbnametopath(post['database'])
                if dbpath:
                    output[
                        'message'] += 'Carrying out setvalue for value ' + post[
                            'valuename'] + ' on ' + post[
                                'table'] + ' in ' + dbpath
                    if 'condition' in post:
                        pilib.setsinglecontrolvalue(dbpath, post['table'],
                                                    post['valuename'],
                                                    post['value'],
                                                    post['condition'])
                    elif 'index' in post:
                        condition = 'rowid= ' + post['index']
                        pilib.setsinglecontrolvalue(dbpath, post['table'],
                                                    post['valuename'],
                                                    post['value'], condition)
                    else:
                        pilib.setsinglecontrolvalue(dbpath, post['table'],
                                                    post['valuename'],
                                                    post['value'])
                else:
                    output[
                        'message'] += 'Problem translating dbpath from friendly name: ' + post[
                            'database']
            else:
                output['message'] += 'Insufficient data for setvalue '
        elif action == 'updateioinfo':
            if all(k in post for k in ['database', 'ioid', 'value']):
                query = dblib.makesqliteinsert('ioinfo',
                                               [post['ioid'], post['value']],
                                               ['id', 'name'])
                try:
                    dblib.sqlitequery(pilib.dirs.dbs.control, query)
                except:
                    output[
                        'message'] += 'Error in updateioinfo query execution: ' + query + '. into database: ' + pilib.dirs.dbs.control
                    output['message'] += 'ioid: ' + post['ioid'] + ' . '
                else:
                    output['message'] += 'Executed updateioinfo query. '
            else:
                output[
                    'message'] += 'Insufficient data for updateioinfo query ! '

        # TODO: properly incorporate and test channel class functions here, and then sub it.
        elif action == 'modify_channel':
            controllib.app_modify_channel(post, output)

        elif action == 'deletechannelbyname' and 'database' in post and 'channelname' in post:
            dbpath = pilib.dbnametopath(post['database'])
            dblib.sqlitequery(
                dbpath, 'delete channelname from channels where name=\"' +
                post['channelname'] + '\"')
        elif action == 'updatecameraimage':
            output['message'] += 'Take camera image keyword. '
            import cupid.camera
            if 'width' in post:
                width = post['width']
            else:
                width = 800
            try:
                values = cupid.camera.takesnap(width=width)
            except:
                output['message'] += 'Error taking image. '
            else:
                output['message'] += 'Appears successful. Path : ' + values[
                    'imagepath'] + '. Timestamp : ' + values['timestamp'] + '. '
                output['data'] = values
        elif action == 'getcurrentcamtimestamp':
            output['message'] += 'getcurrentcamtimestamp keyword found. '
            try:
                with open('/var/www/webcam/images/current.jpg.timestamp') as f:
                    data = f.read()
            except:
                output['message'] += 'Error reading file as requested. '
            else:
                output['data'] = data
        else:
            output[
                'message'] += 'Action keyword present(' + action + '), but not handled. '
    else:
        output[
            'message'] += 'Authentication unsuccessful or action not authorized.'
        status = '401 Not Authorized'

    foutput = json.dumps(output, indent=1)

    response_headers = [('Content-type', 'application/json')]
    start_response(status, response_headers)

    return [foutput]
示例#34
0
def application(environ, start_response):
    import cgi
    import json
    import hashlib

    # Set top folder to allow import of modules

    import os, sys, inspect

    top_folder = \
        os.path.split(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0])))[0]
    if top_folder not in sys.path:
        sys.path.insert(0, top_folder)

    import ii_netstats

    from iiutilities import dblib, datalib
    from iiutilities.utility import newunmangle
    from time import time

    # post_env = environ.copy()
    # post_env['QUERY_STRING'] = ''
    # post = cgi.FieldStorage(
    #     fp=environ['wsgi.input'],
    #     environ=post_env,
    #     keep_blank_values=True
    # )
    # formname = post.getvalue('name')
    #
    # output = {}
    #
    # d = {}
    # for k in post.keys():
    #     # print(k)
    #     d[k] = post.getvalue(k)

    try:
        request_body_size = int(environ.get('CONTENT_LENGTH', 0))
    except ValueError:
        request_body_size = 0

    request_body = environ['wsgi.input'].read(request_body_size)
    post = json.loads(request_body.decode('utf-8'))

    status = '200 OK'
    output = {'data': [], 'message': ''}

    d = post

    wsgiauth = False
    authverified = False

    if wsgiauth:
        # Verfiy that session login information is legit: hashed password, with salt and username, match
        # hash stored in database.
        import hashlib

        if 'sessionuser' in d:
            output['message'] += 'Session user is ' + d['sessionuser'] + '. '
        else:
            output['message'] += 'No session user found. '
            d['sessionuser'] = ''

        try:
            condition = "name='" + d['sessionuser'] + "'"
            userdata = dblib.readonedbrow(inventorylib.sysvars.dirs.dbs.safe,
                                          'users',
                                          condition=condition)[0]
        except:
            output[
                'message'] += 'error in user sqlite query for session user "' + d[
                    'sessionuser'] + '". '
            userdata = {'accesskeywords': 'demo', 'admin': False}
        else:
            # Get session hpass to verify credentials
            hashedpassword = d['sessionhpass']
            hname = hashlib.new('sha1')
            hname.update(d['sessionuser'])
            hashedname = hname.hexdigest()
            hentry = hashlib.new('md5')
            hentry.update(hashedname + netstats.salt + hashedpassword)
            hashedentry = hentry.hexdigest()
            if hashedentry == userdata['password']:
                # successful auth
                output['message'] += 'Password verified. '
                authverified = True
    else:
        output['message'] += 'WSGI authorization not enabled. '

    if authverified or not wsgiauth:
        try:
            action = d['action']
        except KeyError:
            output['message'] = 'no action in request'
        else:
            # Stock functions
            if action == 'getnetstatsdata':
                output['message'] += 'getting netstats keyword found. '
                import datetime
                the_day = datetime.date.today()
                if 'day' in d:
                    # We will pass in a day in format yyyy-mm-dd or keywords, like 'today'
                    import datetime, time
                    today = datetime.date.today()
                    if d['day'] == 'today':
                        pass
                    elif d['day'] == 'prev_day':
                        the_day = today - datetime.timedelta(days=1)
                    elif d['day'] == 'prev_2_day':
                        the_day = today - datetime.timedelta(days=2)
                    elif d['day'] == 'prev_3_day':
                        the_day = today - datetime.timedelta(days=3)
                    elif d['day'] == 'prev_4_day':
                        the_day = today - datetime.timedelta(days=4)

                if the_day == datetime.date.today():
                    db_path = ii_netstats.netstats_dbpath
                else:
                    db_path_root = ii_netstats.netstats_dbpath.split('.db')[0]
                    date_string = '{}-{:02d}-{:02d}'.format(
                        the_day.year, the_day.month, the_day.day)
                    db_path = '{}_{}.db'.format(db_path_root, date_string)

                print('** DBPATH: {} '.format(db_path))
                netstats_db = dblib.sqliteDatabase(db_path)

                output['message'] += 'db path {} chosen. '.format(db_path)

                wired_history = netstats_db.read_table('wired')
                if 'dataperiod' in d:
                    output['message'] += 'Limiting returned time to ' + d[
                        'dataperiod'] + '. '
                    # default 6hrs
                    period = 6 * 3600
                    if d['dataperiod'] == '6_hrs':
                        period = 6 * 3600
                    elif d['dataperiod'] == '12_hrs':
                        period = 12 * 3600
                    elif d['dataperiod'] == '24_hrs':
                        period = 24 * 3600
                    elif d['dataperiod'] == '48_hrs':
                        period = 48 * 3600
                    elif d['dataperiod'] == '7_days':
                        period = 7 * 24 * 3600

                    unmodified_length = len(wired_history)

                    # return only data within last period
                    from operator import itemgetter
                    from iiutilities.datalib import timestringtoseconds
                    new_list = sorted(wired_history,
                                      key=itemgetter('time'),
                                      reverse=True)

                    output['message'] += 'Most recent data point: ' + new_list[
                        0]['time'] + '. '
                    new_history = []
                    most_recent_time_in_seconds = timestringtoseconds(
                        new_list[0]['time'])
                    output['message'] += 'Most recent time in seconds ' + str(
                        most_recent_time_in_seconds) + '. '

                    output['message'] += 'Oldest time in seconds ' + str(
                        timestringtoseconds(new_list[-1]['time']))
                    output['message'] += 'Span of ' + str(
                        most_recent_time_in_seconds -
                        timestringtoseconds(new_list[-1]['time'])) + '. '
                    output['message'] += 'Period of ' + str(period) + '. '

                    for item in new_list:
                        if most_recent_time_in_seconds - timestringtoseconds(
                                item['time']) < period:
                            new_history.append(item)
                    output['data'] = new_history
                    modified_length = len(wired_history)

                    output['message'] += 'Shortened data from ' + str(
                        unmodified_length) + ' to ' + str(modified_length)
                else:
                    output['data'] = wired_history
                try:
                    from urllib2 import urlopen
                    my_ip = urlopen('http://ip.42.pl/raw').read()
                except:
                    my_ip = 'unknown'
                output['host'] = my_ip
            elif action == 'gettraffichistodata':
                output['message'] += 'gettraffic histo keyword found. '
                access_db = dblib.sqliteDatabase(ii_netstats.access_dbpath)

                access_db_tablenames = access_db.get_table_names()
                # output['message'] += 'Tables to search through: ' + str(access_db_tablenames) + '. '

                tables_to_fetch = []
                for tablename in access_db_tablenames:
                    if tablename.find('remotehisto') >= 0 or tablename.find(
                            'metadata') >= 0:
                        tables_to_fetch.append(tablename)
                # output['message'] += 'Fetching tables ' + str(tables_to_fetch) + '. '
                output['data'] = {}
                for table_to_fetch in tables_to_fetch:
                    output['data'][table_to_fetch] = access_db.read_table(
                        table_to_fetch)

            elif action == 'postwirelessdata':
                output['message'] += 'postwirelessdata keyword found. '

                # nothing here yet

    if 'data' in output:
        if output['data']:
            newetag = hashlib.md5(str(output['data'])).hexdigest()
            if 'etag' in d:
                if newetag == d['etag']:
                    status = '304 Not Modified'
                    output['data'] = ''
        else:
            newetag = ''
    else:
        newetag = ''

    if 'datasize' in d:
        output['datasize'] = sys.getsizeof(output['data'])

    output['etag'] = newetag
    try:
        foutput = json.dumps(output, indent=1)
    except:
        import csv
        w = csv.writer(
            open("/usr/lib/iicontrollibs/inventory/dumperr.log", "w"))
        for key, val in output.items():
            w.writerow([key, val])

    response_headers = [('Content-type', 'application/json')]
    response_headers.append(('Etag', newetag))
    start_response(status, response_headers)

    return [foutput]
示例#35
0
def split_and_trim_db_by_date(logpath, **kwargs):

    from iiutilities import dblib
    from iiutilities.datalib import gettimestring
    import time

    settings = {
        'division': 'day',
        'timekey': 'time',
        'remove': 'true'
    }
    settings.update(kwargs)

    data_by_date = split_time_db(logpath, **settings)
    dates = [date for date in data_by_date]
    dates.sort(reverse=True)

    # print('Most recent date', dates[0])
    if dates:
        current_date = dates[0]
    else:
        # print('NO time yet.')
        current_date = time.gmtime()

    # print(current_date)
    dates.reverse()

    log_db = dblib.sqliteDatabase(logpath)

    modified_dbs = []

    for date in data_by_date:

        # Prune off time.
        timestring = gettimestring(time.mktime(date)).split(' ')[0]
        # print(timestring, 'tables: ' +str(len([tablename for tablename in data_by_date[date]])))

        # for table in data_by_date[date]:
        #     print(table)
        new_db_path = logpath.split('.')[0] + '_' + timestring + '.' + logpath.split('.')[1]

        modified_dbs.append(new_db_path)
        new_db = dblib.sqliteDatabase(new_db_path)

        # if table doesn't exist, we create it
        new_db.tablenames = new_db.get_table_names()
        # print('existing tablenames: ')
        # print(new_db.tablenames)
        for tablename in data_by_date[date]:
            if tablename not in new_db.tablenames:
                # print('creating table ' + tablename)
                new_db.create_table(tablename, data_by_date[date][tablename]['schema'], queue=True)

            # print(data_by_date[date][tablename]['data'][0])
            # print(data_by_date[date][tablename]['schema'].items)
            new_db.insert(tablename, data_by_date[date][tablename]['data'], queue=True)

        # print(new_db.queued_queries)

        new_db.execute_queue()

        # Now we need to remove the old entries
        if date != current_date:
            for tablename in data_by_date[date]:
                for datum in data_by_date[date][tablename]['data']:
                    log_db.delete(tablename, '"' + settings['timekey'] + '"=' + "'" + datum[settings['timekey']] + "'", queue=True)


    # print(log_db.queued_queries)
    # print('Deletes',len(log_db.queued_queries))
    log_db.execute_queue()

    return {'modified_dbs':modified_dbs}