def store(self, column_names, fields, raw_data_rows): get_timestamp = operator.itemgetter(1) for timestamp, grouped_rows in grouped_by(raw_data_rows, get_timestamp): rows = [ (dn, values) for dn, _, values in grouped_rows ] entity_ref = EntityDnRef(rows[0][0]) with closing(self.conn.cursor()) as cursor: datasource = DataSource.from_name(cursor, self.datasource) entitytype = entity_ref.get_entitytype(cursor) trendstore = TrendStore.get( cursor, datasource, entitytype, self.granularity ) if not trendstore: partition_size = 86400 trendstore = TrendStore(datasource, entitytype, self.granularity, partition_size, "table").create(cursor) self.conn.commit() utc_timestamp = timestamp.astimezone(pytz.utc) utc_timestamp_str = self.offset(utc_timestamp).strftime("%Y-%m-%dT%H:%M:%S") raw_datapackage = RawDataPackage( self.granularity, utc_timestamp_str, column_names, rows) trendstore.store_raw(raw_datapackage).run(self.conn)
def retrieve_related_trend(conn, database_srid, region, region_srid, datasource, entitytype, attribute_name, granularity_str, timestamp, limit=None): granularity = create_granularity(granularity_str) with closing(conn.cursor()) as cursor: trendstore = TrendStore.get(cursor, datasource, entitytype, granularity) partition = trendstore.partition(timestamp) table = partition.table() full_base_tbl_name = table.render() relation_name = get_relation_name(conn, "Cell", entitytype.name) relation_cell_site_name = get_relation_name(conn, "Cell", "Site") bbox2d = transform_srid(set_srid(make_box_2d(region), region_srid), database_srid) query = ( "SELECT r.source_id, r.target_id, base_table.\"{0}\" " "FROM {1} base_table " "JOIN relation.\"{2}\" r ON r.target_id = base_table.entity_id " "JOIN relation.\"{3}\" site_rel on site_rel.source_id = r.source_id " "JOIN gis.site site ON site.entity_id = site_rel.target_id " "AND site.position && {4} " "WHERE base_table.\"timestamp\" = %(timestamp)s").format( attribute_name, full_base_tbl_name, relation_name, relation_cell_site_name, bbox2d) args = { "left": region["left"], "bottom": region["bottom"], "right": region["right"], "top": region["top"], "timestamp": timestamp} with closing(conn.cursor()) as cursor: try: cursor.execute(query, args) except psycopg2.ProgrammingError: conn.rollback() rows = [] else: rows = cursor.fetchall() result = {} for entity_id, related_entity_id, value in rows: if entity_id not in result: result[entity_id] = {} result[entity_id][related_entity_id] = value return result
def get_table_names_v4(cursor, datasources, granularity, entitytype, start, end): """ A get_table_names like function that supports both v3 and v4 trendstores. """ if isinstance(granularity, int): granularity = create_granularity(granularity) trendstores = [TrendStore.get(cursor, datasource, entitytype, granularity) for datasource in datasources] return get_table_names(trendstores, start, end)
def test_get_trendstore(conn, dataset): partition_size = 3600 with closing(conn.cursor()) as cursor: TrendStore(dataset.datasource, dataset.entitytype, dataset.granularity, partition_size, "table").create(cursor) trendstore = TrendStore.get(cursor, dataset.datasource, dataset.entitytype, dataset.granularity) eq_(trendstore.datasource.id, dataset.datasource.id) eq_(trendstore.partition_size, partition_size) assert trendstore.id is not None, "trendstore.id is None" eq_(trendstore.version, 4)
def get_or_create_trendstore(cursor, datasource, entitytype, granularity): trendstore = TrendStore.get(cursor, datasource, entitytype, granularity) if trendstore is None: partition_size = PARTITION_SIZES.get(granularity.name) if partition_size is None: raise Exception("unsupported granularity size '{}'".format( granularity.name)) return TrendStore(datasource, entitytype, granularity, partition_size, "table").create(cursor) else: return trendstore
def test_get(self): granularity = create_granularity("900") partition_size = 3600 type = "table" with closing(self.conn.cursor()) as cursor: TrendStore(self.datasource, self.entitytype, granularity, partition_size, type).create(cursor) trendstore = TrendStore.get(cursor, self.datasource, self.entitytype, granularity) eq_(trendstore.datasource.id, self.datasource.id) eq_(trendstore.partition_size, partition_size) assert trendstore.id is not None, "trendstore.id is None" eq_(trendstore.version, 4)
def load(self, cursor): self.datasource = name_to_datasource(cursor, "testset1") self.entitytype = name_to_entitytype(cursor, self.entitytype_name) self.entities = map(partial(dn_to_entity, cursor), self.dns) datapackage = generate_datapackage_a(self.granularity, self.timestamp, self.entities) self.trendstore = TrendStore.get(cursor, self.datasource, self.entitytype, self.granularity) if not self.trendstore: self.trendstore = TrendStore(self.datasource, self.entitytype, self.granularity, partition_size=86400, type="table").create(cursor) self.partition = store_datapackage(cursor, self.trendstore, datapackage, self.modified)
def test_create_view(conn): testset_small = TestSet1Small() with closing(conn.cursor()) as cursor: testset_small.load(cursor) datasource = name_to_datasource(cursor, "view-test") trendstore = TrendStore.get(cursor, datasource, testset_small.entitytype, testset_small.granularity) if not trendstore: trendstore = TrendStore(datasource, testset_small.entitytype, testset_small.granularity, partition_size=86400, type="view").create(cursor) view_sql = ( "SELECT " "999 AS entity_id, " "'2013-08-26 13:00:00+02:00'::timestamp with time zone AS timestamp, " '10 AS "CntrA"') view = View(trendstore, view_sql).define(cursor).create(cursor) conn.commit() plugin = get_plugin("trend") instance_v4 = plugin(conn, api_version=4) start = testset_small.datasource.tzinfo.localize(datetime.datetime(2013, 8, 26, 13, 0, 0)) end = start result = instance_v4.retrieve(trendstore, ["CntrA"], None, start, end) eq_(len(result), 1)
def get_trendstore(self, datasource, entitytype, granularity): with closing(self.conn.cursor()) as cursor: return TrendStore.get(cursor, datasource, entitytype, granularity)