Exemplo n.º 1
0
def update_cache():
    start_time = time.time()

    cur.execute('''
        SELECT
            Cache.id, Types.type, Cache.instance, Properties.property, FacilityTypes.facility_type, Addresses.facility, Addresses.address, Cache.update_timestamp, Cache.access_timestamp
        FROM Cache
            LEFT JOIN Types ON Cache.type_id=Types.id
            LEFT JOIN Properties ON Cache.property_id=Properties.id
            LEFT JOIN Addresses ON Cache.address_id=Addresses.id
            LEFT JOIN FacilityTypes ON Addresses.facility_type_id=FacilityTypes.id
    ''')

    rows = cur.fetchall()

    n_deleted = 0
    n_updated = 0

    for row in rows:

        # Wait
        time.sleep(args.sleep_interval)

        # Calculate time intervals
        now = datetime.datetime.now()
        stale = now - datetime.datetime.fromtimestamp(row[7])
        idle = now - datetime.datetime.fromtimestamp(row[8])

        if idle > idle_max:

            # Entry has not been used; delete it
            delete_entry(row[0])

            n_deleted += 1

        elif stale > stale_max:

            # Entry is stale; post request to update it
            post_request(row[1], row[2], row[3], row[4], row[5], row[6])

            n_updated += 1

    if n_deleted or n_updated:
        db_util.log(
            logpath,
            'Deleted ' + str(n_deleted) + ' and updated ' + str(n_updated) +
            ' of ' + str(len(rows)) + ' entries.  Elapsed time: ' +
            str(datetime.timedelta(seconds=int(time.time() - start_time))))
Exemplo n.º 2
0
def report_baseline( view_id, column_id, row_id, row_label, value, units, facility, oid ):

    # Retrieve sorted, distinct timestamps from database for specific attribute
    cur.execute('''
        SELECT DISTINCT
            timestamp
        FROM Baselines
            LEFT JOIN Timestamps ON Baselines.timestamp_id=Timestamps.id
        WHERE view_id=? AND column_id=? AND row_id=?
        ORDER BY timestamp ASC;
    ''', ( view_id, column_id, row_id ) )
    timestamp_rows = cur.fetchall()

    # Load timestamps into dataframe
    df = pd.DataFrame( timestamp_rows, columns=['timestamp'] )

    # Extract dates
    df = df.multiply( 1000, ['timestamp'] )
    df['datetime'] = pd.to_datetime( df['timestamp'], unit='ms' )
    df['date'] = pd.DatetimeIndex( df['datetime'] ).normalize()

    # Calculate statistics
    stats = ''
    stats += '\nSaved: {0},{1},{2}'.format( row_label, value, units )
    stats += '\nDates found for: {0}, {1}'.format( facility, oid )
    stats += '\n First: ' + str( df['date'].ix[0].date() )
    stats += '\n Last: ' + str( df['date'].ix[len(df)-1].date() )
    stats += '\n Total: ' + str( len( df ) )
    stats += '\nGaps:'

    # Look for gaps
    df['diff'] = df['date'].diff()
    df = df.iloc[1:]
    df = df[ df['diff'].ne( '1 days' ) ]

    if len( df ):
        df = df[ [ 'date', 'diff' ] ]
        df = df.rename( index=str, columns={ 'date': 'Before', 'diff': 'Gap'  } )
        stats += '\n ' + df.to_string( index=False )
    else:
        stats += '\n No gaps found.'

    stats += '\n---'

    # Report statistics
    db_util.log( logpath, stats )

    return
Exemplo n.º 3
0
def report_missing_dates():

    # Retrieve sorted, distinct timestamps from database
    cur.execute('''
        SELECT DISTINCT
            timestamp
        FROM Baselines
            LEFT JOIN Timestamps ON Baselines.timestamp_id=Timestamps.id
        ORDER BY timestamp ASC;
    ''')
    timestamp_rows = cur.fetchall()

    # Load timestamps into dataframe
    df = pd.DataFrame( timestamp_rows, columns=['timestamp'] )

    # Extract dates
    df = df.multiply( 1000, ['timestamp'] )
    df['datetime'] = pd.to_datetime( df['timestamp'], unit='ms' )
    df['date'] = pd.DatetimeIndex( df['datetime'] ).normalize()

    # Calculate statistics
    stats = '\n'
    stats += '\nDates found in database'
    stats += '\n First: ' + str( df['date'].ix[0].date() )
    stats += '\n Last: ' + str( df['date'].ix[len(df)-1].date() )
    stats += '\n Total: ' + str( len( df ) )
    stats += '\nGaps:'

    # Look for gaps
    df['diff'] = df['date'].diff()
    df = df.iloc[1:]
    df = df[ df['diff'].ne( '1 days' ) ]

    if len( df ):
        df = df[ [ 'date', 'diff' ] ]
        df = df.rename( index=str, columns={ 'date': 'Before', 'diff': 'Gap'  } )
        stats += '\n ' + df.to_string( index=False )
    else:
        stats += '\n No gaps found.'

    # Report statistics
    db_util.log( logpath, stats )

    return
Exemplo n.º 4
0
def save_baselines( baselines_row ):

    # Read spreadsheet into a dataframe.
    # Each row contains the following:
    #   - Facility
    #   - Instance ID of power meter
    #   - Instance ID of energy meter
    csv_filename = baselines_row[0]
    df = pd.read_csv( '../csv/' + csv_filename + '.csv', na_filter=False, comment='#' )

    # Output column headings
    db_util.log( logpath, '---' )
    db_util.log( logpath, 'CSV file: ' + csv_filename )
    db_util.log( logpath, '---' )
    column_name = baselines_row[1]

    # Iterate over the rows of the dataframe, getting values for each row
    for index, oid_row in df.iterrows():
        save_baseline( csv_filename, column_name, oid_row )
Exemplo n.º 5
0
    # Find out how many occurrences of this script are running
    dups = ps.count(__file__)

    # If no other occurrences of this script are running, proceed to update cache
    if dups <= 1:

        db = '../../bg_db/cache.sqlite'

        # If database exists...
        if os.path.exists(db):

            # Open database
            conn = sqlite3.connect(db)
            cur = conn.cursor()

            # Get command line arguments
            parser = argparse.ArgumentParser(
                description='Update cache of recently requested values',
                add_help=False)
            parser.add_argument('-h', dest='hostname')
            parser.add_argument('-p', dest='port')
            parser.add_argument('-s', dest='sleep_interval', type=int)
            args = parser.parse_args()

            logpath, notused = db_util.new_logs('../../bg_db/update_cache')
            db_util.log(logpath, os.path.basename(__file__) + ' starting')

            # Update cache continuously
            while True:
                update_cache()
Exemplo n.º 6
0
    parser = argparse.ArgumentParser( description='Save baseline values in database', add_help=False )
    parser.add_argument( '-h', dest='hostname' )
    parser.add_argument( '-p', dest='port' )
    parser.add_argument( '-r', dest='remove', action='store_true' )
    args = parser.parse_args()

    logpath, savepath = db_util.new_logs( '../../bgt_db/save_baselines' )

    try:

        # Open the database
        conn, cur = baselines_db.open_db( remove=args.remove )

        # Save timestamp of this operation
        timestamp_id, timestamp_text = baselines_db.save_timestamp()
        db_util.log( logpath, 'Saving new baselines on ' + timestamp_text )

        # Update the baselines
        with open( 'baselines.csv', newline='' ) as csvfile:

            reader = csv.reader( csvfile )

            for baselines_row in reader:
                save_baselines( baselines_row )

        # Commit changes
        baselines_db.commit()

        # Report missing dates, from the first database entry until today
        report_missing_dates()