def store(self, column_names, fields, raw_data_rows): get_timestamp = operator.itemgetter(1) for timestamp, grouped_rows in grouped_by(raw_data_rows, get_timestamp): rows = [ (dn, values) for dn, _, values in grouped_rows ] entity_ref = EntityDnRef(rows[0][0]) with closing(self.conn.cursor()) as cursor: datasource = DataSource.from_name(cursor, self.datasource) entitytype = entity_ref.get_entitytype(cursor) trendstore = TrendStore.get( cursor, datasource, entitytype, self.granularity ) if not trendstore: partition_size = 86400 trendstore = TrendStore(datasource, entitytype, self.granularity, partition_size, "table").create(cursor) self.conn.commit() utc_timestamp = timestamp.astimezone(pytz.utc) utc_timestamp_str = self.offset(utc_timestamp).strftime("%Y-%m-%dT%H:%M:%S") raw_datapackage = RawDataPackage( self.granularity, utc_timestamp_str, column_names, rows) trendstore.store_raw(raw_datapackage).run(self.conn)
def merge_packages(packages): result = [] for k, group in grouped_by(packages, RawDataPackage.get_key): l = list(group) result.append(package_group(k, l)) return result
def records_to_packages(records): records_with_key = ((r.get_key(), r) for r in records) def record_has_key(record_with_key): key, record = record_with_key return key is not None records_with_key = filter(record_has_key, records_with_key) return map(expand_args(package), grouped_by(records_with_key, first))
def rows_to_packages(self, column_names, rows): get_timestamp = operator.itemgetter(1) for timestamp, grouped_rows in grouped_by(rows, get_timestamp): yield DefaultPackage( self.granularity, self.offset(timestamp), column_names, [ (dn, values) for dn, _, values in grouped_rows ] )