'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()

'''
view entity data
'''
# can get entity metadata with following
#
# TODO, for some reason this returns data but also throws exception.
'''
meta = db.get_entity_type(entity_name)
db_schema = meta['schemaName']
table_name = meta['metricTableName']
'''
db_schema = settings.DB_SCHEMA or "BLUADMIN"
table_name = settings.TABLE_NAME or "kalbuildingstest"

df = db.read_table(table_name=table_name, schema=db_schema)
# df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())


print ( 'Done registering  entity %s '  %db.register_functions([MaximoAssetHTTP]) )
示例#2
0
                                '_db_schema': db_schema
                            })

# dimension columns
dimension_columns = []
dimension_columns.append(Column('business', String(50)))
dimension_columns.append(Column('site', String(50)))
dimension_columns.append(Column('equipment_type', String(50)))
dimension_columns.append(Column('train', String(50)))
dimension_columns.append(Column('service', String(50)))
dimension_columns.append(Column('asset_id', String(50)))

entity.register(raise_error=False)
db.register_functions([TurbineHTTPPreload])

df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())

meta = db.get_entity_type(entityType)
jobsettings = {
    '_production_mode': False,
    '_start_ts_override': dt.datetime.utcnow() - dt.timedelta(days=10),
    '_end_ts_override':
    (dt.datetime.utcnow() -
     dt.timedelta(days=1)),  # .strftime('%Y-%m-%d %H:%M:%S'),
    '_db_schema': 'BLUADMIN',
    'save_trace_to_file': True
}

logging.info('Instantiated create compressor job')
示例#3
0
['firmware', 'firmware_version']: ['1.0', '1.12', '1.13', '2.1']
['manufacturer']: ['Rentech', 'GHI Industries']
['zone']: ['27A', '27B', '27C']
['status', 'status_code']: ['inactive', 'active']
['operator', 'operator_id', 'person', 'employee']: ['Fred', 'Joe', 'Mary', 'Steve', 'Henry', 'Jane', 'Hillary', 'Justin', 'Rod']

3.3 Other dimension name
Any other dimension name other than the one above will generate random values
'''
db.drop_table(dim_table_name, schema=schema)
entity_type.make_dimension(dim_table_name, Column('company', String(50)),
                           Column('status', String(50)),
                           Column('operator', String(50)),
                           **{'schema': schema})

entity_type.register()
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''
# entity_type.exec_local_pipeline(**{'_production_mode': False})

ef = db.read_table(entity_type.logical_name,
                   schema=schema,
                   columns=[entity_type._entity_id])
ids = set(ef[entity_type._entity_id].unique())
entity_type.generate_dimension_data(entities=ids)
import datetime as dt
import json
import os
import pandas as pd
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions.preprocessor import BaseTransformer
from iotfunctions.bif import IoTExpression
from iotfunctions.metadata import EntityType, make_sample_entity
from iotfunctions.db import Database

#replace with a credentials dictionary or provide a credentials file
with open('credentials.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())

#create a sample entity to work with
db_schema = None #set if you are not using the default
db = Database(credentials=credentials)
entity = make_sample_entity(db=db, schema = db_schema)

#examine the sample entity
df = db.read_table(entity.name,schema=db_schema)
df.head(1).transpose()

#configure an expression function
expression = 'df["throttle"]/df["grade"]'
fn = IoTExpression(expression=expression, output_name='expression_out')
df = entity.exec_pipeline(fn)
df.head(1).transpose()


示例#5
0
class IotEntity(object):
    '''
    Query Entity objects to establish Events Streams connectivity, manage Stream metadata and sessions
    Parameters:
    -----------
    credentials: dict (optional)
        Database credentials. If none specified use DB_CONNECTION_STRING environment variable
    start_session: bool
        Start a session when establishing connection
    echo: bool
        Output sql to log
    '''
    def __init__(self, entity_type_name=None, entity_name=None):
        # replace with valid table and column names
        self.entity_type_name = entity_type_name
        self.entity_name = entity_name
        self.db_schema = "public"  # only required if you are not using the default
        self.table_name = entity_type_name.upper(
        )  # change to a valid entity time series table name
        self.dim_table_name = "DM_" + self.table_name  # change to a entity dimenstion table name
        self.timestamp = 'evt_timestamp'
        self.credentials = settings.CREDENTIALS
        # logging.info('username %s' %self.credentials['db2']['username'])
        # logging.info('password %s' %self.credentials['db2']['password'])
        # logging.info('host %s' %self.credentials['db2']['host'])
        # logging.info('port %s' %self.credentials['db2']['port'])
        # logging.info('databaseName%s' %self.credentials['db2']['databaseName'])
        self.db = Database(credentials=self.credentials)

    def query(self,
              metrics=None,
              timestamp='evt_timestamp',
              agg_dict=None,
              to_csv=True):
        logging.info("Query %s and output format is %s" % (agg_dict, to_csv))
        # Retrieve a single data item using a standard aggregation function
        #agg = {metrics[0]: ['mean']}
        df = self.db.read_agg(table_name=self.table_name,
                              schema=self.db_schema,
                              timestamp='evt_timestamp',
                              agg_dict=agg,
                              to_csv=to_csv)
        return (df)

    # Works
    def query_entity_data(self, columns=None, start_ts=None, end_ts=None):
        '''
        Read whole table and return chosen metrics for selected start and end time as a dataframe
        Parameters
        -----------
        table_name: str
            Source table name
        schema: str
            Schema name where table is located
        columns: list of strs
            Projection list
        timestamp_col: str
            Name of timestamp column in the table. Required for time filters.
        start_ts: datetime
            Retrieve data from this date
        end_ts: datetime
            Retrieve data up until date
        entities: list of strs
            Retrieve data for a list of deviceids
        dimension: str
            Table name for dimension table. Dimension table will be joined on deviceid.
        parse_dates: list of strs
            Column names to parse as dates
        '''
        logging.info('table_name %s' % self.table_name)
        logging.info('db_schema %s' % self.db_schema)
        columns.append("RCV_TIMESTAMP_UTC")
        logging.info('columns %s' % columns)
        df = self.db.read_table(table_name="IOT_" + self.table_name,
                                schema=self.db_schema,
                                parse_dates=None,
                                columns=columns,
                                timestamp_col='RCV_TIMESTAMP_UTC',
                                start_ts=start_ts,
                                end_ts=end_ts)
        logging.info(df)
        query_data = df.to_json()
        return query_data
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
# When creating a custom preload function you can register it by uncommenting the following lines
# You must unregister_functions if you change the method signature or required inputs.
db.unregister_functions(['Issue455HTTPPreload'])
db.register_functions([Issue455HTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline(**{'_production_mode': False})
'''
view entity data
'''
print("Read Table of new entity")
df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())

print("Read Table of new dimension")
print(entity_dimension)
df = db.read_table(table_name=entity_dimension, schema=db_schema)
print(df.head())

print("Done registering  entity")