class IotEntity(object): ''' Query Entity objects to establish Events Streams connectivity, manage Stream metadata and sessions Parameters: ----------- credentials: dict (optional) Database credentials. If none specified use DB_CONNECTION_STRING environment variable start_session: bool Start a session when establishing connection echo: bool Output sql to log ''' def __init__(self, entity_type_name=None, entity_name=None): # replace with valid table and column names self.entity_type_name = entity_type_name self.entity_name = entity_name self.db_schema = "public" # only required if you are not using the default self.table_name = entity_type_name.upper( ) # change to a valid entity time series table name self.dim_table_name = "DM_" + self.table_name # change to a entity dimenstion table name self.timestamp = 'evt_timestamp' self.credentials = settings.CREDENTIALS # logging.info('username %s' %self.credentials['db2']['username']) # logging.info('password %s' %self.credentials['db2']['password']) # logging.info('host %s' %self.credentials['db2']['host']) # logging.info('port %s' %self.credentials['db2']['port']) # logging.info('databaseName%s' %self.credentials['db2']['databaseName']) self.db = Database(credentials=self.credentials) def query(self, metrics=None, timestamp='evt_timestamp', agg_dict=None, to_csv=True): logging.info("Query %s and output format is %s" % (agg_dict, to_csv)) # Retrieve a single data item using a standard aggregation function #agg = {metrics[0]: ['mean']} df = self.db.read_agg(table_name=self.table_name, schema=self.db_schema, timestamp='evt_timestamp', agg_dict=agg, to_csv=to_csv) return (df) # Works def query_entity_data(self, columns=None, start_ts=None, end_ts=None): ''' Read whole table and return chosen metrics for selected start and end time as a dataframe Parameters ----------- table_name: str Source table name schema: str Schema name where table is located columns: list of strs Projection list timestamp_col: str Name of timestamp column in the table. Required for time filters. start_ts: datetime Retrieve data from this date end_ts: datetime Retrieve data up until date entities: list of strs Retrieve data for a list of deviceids dimension: str Table name for dimension table. Dimension table will be joined on deviceid. parse_dates: list of strs Column names to parse as dates ''' logging.info('table_name %s' % self.table_name) logging.info('db_schema %s' % self.db_schema) columns.append("RCV_TIMESTAMP_UTC") logging.info('columns %s' % columns) df = self.db.read_table(table_name="IOT_" + self.table_name, schema=self.db_schema, parse_dates=None, columns=columns, timestamp_col='RCV_TIMESTAMP_UTC', start_ts=start_ts, end_ts=end_ts) logging.info(df) query_data = df.to_json() return query_data
dim_table_name = 'test_packaging_hopper_june_19_dimension' # change to a entity dimenstion table name timestamp = 'evt_timestamp' with open('credentials_as_dev.json', encoding='utf-8') as F: credentials = json.loads(F.read()) db = Database(credentials=credentials) now = dt.datetime.utcnow() # Retrieve a single data item using a standard aggregation function agg = {'ambient_temp': ['mean']} df = db.read_agg(table_name=table_name, schema=db_schema, timestamp='evt_timestamp', agg_dict=agg, to_csv=True) print(df) # Calculate average for 30 days worth of data df = db.read_agg(table_name=table_name, schema=db_schema, timestamp='evt_timestamp', agg_dict=agg, to_csv=True, end_ts=now, period_type='days', period_count=30) print(df)