def store(self, column_names, fields, raw_data_rows): get_timestamp = operator.itemgetter(1) for timestamp, grouped_rows in grouped_by(raw_data_rows, get_timestamp): rows = [ (dn, values) for dn, _, values in grouped_rows ] entity_ref = EntityDnRef(rows[0][0]) with closing(self.conn.cursor()) as cursor: datasource = DataSource.from_name(cursor, self.datasource) entitytype = entity_ref.get_entitytype(cursor) trendstore = TrendStore.get( cursor, datasource, entitytype, self.granularity ) if not trendstore: partition_size = 86400 trendstore = TrendStore(datasource, entitytype, self.granularity, partition_size, "table").create(cursor) self.conn.commit() utc_timestamp = timestamp.astimezone(pytz.utc) utc_timestamp_str = self.offset(utc_timestamp).strftime("%Y-%m-%dT%H:%M:%S") raw_datapackage = RawDataPackage( self.granularity, utc_timestamp_str, column_names, rows) trendstore.store_raw(raw_datapackage).run(self.conn)
def store(self, column_names, fields, raw_data_rows): rows = list(raw_data_rows) raw_datapackage = RawDataPackage(column_names, rows) attributes = raw_datapackage.deduce_attributes() entity_ref = EntityDnRef(rows[0][0]) with closing(self.conn.cursor()) as cursor: datasource = DataSource.from_name(cursor, self.datasource) entitytype = entity_ref.get_entitytype(cursor) attributestore = AttributeStore.from_attributes( cursor, datasource, entitytype, attributes) self.conn.commit() attributestore.store_raw(raw_datapackage).run(self.conn)