class NormalSet(DataSet): def load(self, cursor): entitytype = name_to_entitytype(cursor, "materialize_dummytype001") self.datasource = name_to_datasource(cursor, "materialize_src_normal001") view_datasource = name_to_datasource(cursor, "vmaterialize_normal001") granularity = create_granularity('900') self.timestamp = self.datasource.tzinfo.localize( datetime.datetime(2013, 8, 26, 22, 0, 0)) trend_names = ["cntr"] rows_small = [ (1234, (55,)), (1235, (56,))] self.small_datapackage = DataPackage(granularity, self.timestamp, trend_names, rows_small) rows_large = [ (1234, (55243444334,)), (1235, (56242343242,))] self.large_datapackage = DataPackage(granularity, self.timestamp, trend_names, rows_large) self.trendstore = TrendStore(self.datasource, entitytype, granularity, 86400, 'table') self.trendstore.create(cursor) partition = self.trendstore.partition(self.timestamp) partition.create(cursor) self.trendstore.check_columns_exist(trend_names, ["smallint"])(cursor) modified = self.datasource.tzinfo.localize(datetime.datetime.now()) store_copy_from(cursor, partition.table(), self.small_datapackage, modified) mark_modified(cursor, partition.table(), self.timestamp, modified) view_trendstore = TrendStore(view_datasource, entitytype, granularity, 0, 'view').create(cursor) sql = ( "SELECT " "entity_id, " "timestamp, " 'cntr FROM {}').format(self.trendstore.base_table().render()) self.view = View(view_trendstore, sql).define(cursor).create(cursor) def update_type(self, cursor): self.trendstore.clear_timestamp(self.timestamp)(cursor) names = ["cntr"] types = ["bigint"] self.trendstore.check_column_types(names, types)(cursor) partition = self.trendstore.partition(self.timestamp) modified = self.datasource.tzinfo.localize(datetime.datetime.now()) store_copy_from(cursor, partition.table(), self.large_datapackage, modified)
def test_store_copy_from_2(conn): trend_names = ['CCR', 'CCRatts', 'Drops'] data_rows = [ (10023, ('0.9919', '2105', '17')) ] data_types = ['integer', 'smallint', 'smallint'] curr_timezone = timezone("Europe/Amsterdam") timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) modified = curr_timezone.localize(datetime.now()) granularity = create_granularity("900") with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src010") entitytype = name_to_entitytype(cursor, "test-type002") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) table = partition.table() store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 1) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)
def get_partition(cursor, datasource_name, entitytype_name, granularity, timestamp): datasource = get_datasource(cursor, datasource_name) entitytype = get_entitytype(cursor, entitytype_name) granularity = create_granularity(granularity) trendstore = TrendStore(datasource, entitytype, granularity) return trendstore.partition(timestamp)
def test_store_alter_column(conn, dataset): partition_size = 86400 trendstore = TrendStore(dataset.datasource, dataset.entitytype, dataset.granularity, partition_size, "table") with closing(conn.cursor()) as cursor: trendstore.create(cursor) conn.commit() timestamp = dataset.datasource.tzinfo.localize( datetime.datetime(2013, 4, 25, 11, 00)) trends = ["a", "b", "c"] rows = [ (1234, [1, 2, 3]), (2345, [4, 5, 6])] datapackage = DataPackage(dataset.granularity, timestamp, trends, rows) transaction = trendstore.store(datapackage) transaction.run(conn) table = trendstore.partition(timestamp).table() condition = And( Eq(Column("entity_id"), 2345), Eq(Column("timestamp"), timestamp)) query = table.select(Column("c")).where_(condition) with closing(conn.cursor()) as cursor: query.execute(cursor) c, = cursor.fetchone() eq_(c, 6) trends = ["a", "b", "c"] rows = [ (2345, [4, 5, "2013-04-25 11:00:00"])] datapackage = DataPackage(dataset.granularity, timestamp, trends, rows) transaction = trendstore.store(datapackage) transaction.run(conn) with closing(conn.cursor()) as cursor: query.execute(cursor) c, = cursor.fetchone() eq_(c, datetime.datetime(2013, 4, 25, 11, 0, 0))
def test_update_and_modify_columns_fractured(conn): curr_timezone = timezone("Europe/Amsterdam") granularity = create_granularity("900") timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) entity_ids = range(1023, 1023 + 100) trend_names_a = ["CellID", "CCR", "Drops"] data_rows_a = [(i, ("10023", "0.9919", "17")) for i in entity_ids] data_types_a = extract_data_types(data_rows_a) trend_names_b = ["CellID", "Drops"] data_rows_b = [(i, ("10023", "19")) for i in entity_ids] data_types_b = extract_data_types(data_rows_b) with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src009") entitytype = name_to_entitytype(cursor, "test-type001") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) table = partition.table() partition.create(cursor) partition.check_columns_exist(trend_names_a, data_types_a)(cursor) conn.commit() store(conn, SCHEMA, table.name, trend_names_a, timestamp, data_rows_a) time.sleep(0.2) check_columns = map(Column, ["modified", "Drops"]) query = table.select(check_columns) with closing(conn.cursor()) as cursor: query.execute(cursor) row_before = cursor.fetchone() store(conn, SCHEMA, table.name, trend_names_b, timestamp, data_rows_b) query = table.select(check_columns) with closing(conn.cursor()) as cursor: query.execute(cursor) row_after = cursor.fetchone() assert_not_equal(row_before[0], row_after[0]) assert_not_equal(row_before[1], row_after[1])
def test_store_copy_from_1(conn): trend_names = ['CellID', 'CCR', 'CCRatts', 'Drops'] data_rows = [ (10023, ('10023', '0.9919', '2105', '17')), (10047, ('10047', '0.9963', '4906', '18')), (10048, ('10048', '0.9935', '2448', '16')), (10049, ('10049', '0.9939', '5271', '32')), (10050, ('10050', '0.9940', '3693', '22')), (10051, ('10051', '0.9944', '3753', '21')), (10052, ('10052', '0.9889', '2168', '24')), (10053, ('10053', '0.9920', '2372', '19')), (10085, ('10085', '0.9987', '2282', '3')), (10086, ('10086', '0.9972', '1763', '5')), (10087, ('10087', '0.9931', '1453', '10')) ] curr_timezone = timezone("Europe/Amsterdam") data_types = extract_data_types(data_rows) timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) granularity = create_granularity("900") modified = curr_timezone.localize(datetime.now()) with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src009") entitytype = name_to_entitytype(cursor, "test-type001") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) table = partition.table() partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 11) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)
def test_update_modified_column(conn): curr_timezone = timezone("Europe/Amsterdam") trend_names = ['CellID', 'CCR', 'Drops'] data_rows = [ (10023, ('10023', '0.9919', '17')), (10047, ('10047', '0.9963', '18')) ] data_types = extract_data_types(data_rows) update_data_rows = [(10023, ('10023', '0.9919', '17'))] timestamp = curr_timezone.localize(datetime.now()) granularity = create_granularity("900") with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src009") entitytype = name_to_entitytype(cursor, "test-type001") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) table = partition.table() partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) store(conn, SCHEMA, table.name, trend_names, timestamp, data_rows) time.sleep(1) store(conn, SCHEMA, table.name, trend_names, timestamp, update_data_rows) conn.commit() query = table.select([Column("modified")]) query.execute(cursor) modified_list = [modified for modified in cursor.fetchall()] assert_not_equal(modified_list[0], modified_list[1]) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) modified_table.select(Column("end")).where_( Eq(Column("table_name"), table.name)).execute(cursor) end = first(cursor.fetchone()) eq_(end, max_modified)
def test_create_trendstore_with_children(self): granularity = create_granularity("900") partition_size = 3600 with closing(self.conn.cursor()) as cursor: trendstore = TrendStore(self.datasource, self.entitytype, granularity, partition_size, "table").create( cursor ) assert trendstore.id is not None timestamp = self.datasource.tzinfo.localize(datetime.datetime(2013, 5, 6, 14, 45)) partition = trendstore.partition(timestamp) partition.create(cursor)
def test_create_trendstore_with_children(conn, dataset): partition_size = 3600 trendstore = TrendStore(dataset.datasource, dataset.entitytype, dataset.granularity, partition_size, "table") with closing(conn.cursor()) as cursor: trendstore.create(cursor) assert trendstore.id is not None timestamp = dataset.datasource.tzinfo.localize( datetime.datetime(2013, 5, 6, 14, 45)) partition = trendstore.partition(timestamp) partition.create(cursor)
def test_update(conn): trend_names = ["CellID", "CCR", "Drops"] data_rows = [ (10023, ("10023", "0.9919", "17")), (10047, ("10047", "0.9963", "18")) ] data_types = extract_data_types(data_rows) update_data_rows = [(10023, ("10023", "0.5555", "17"))] timestamp = datetime.now() granularity = create_granularity("900") with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src009") entitytype = name_to_entitytype(cursor, "test-type001") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) table = partition.table() partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) store(conn, SCHEMA, table.name, trend_names, timestamp, data_rows) store(conn, SCHEMA, table.name, trend_names, timestamp, update_data_rows) conn.commit() query = table.select([Column("modified"), Column("CCR")]) with closing(conn.cursor()) as cursor: query.execute(cursor) rows = cursor.fetchall() assert_not_equal(rows[0][0], rows[1][0]) assert_not_equal(rows[0][1], rows[1][1])
def test_run(conn): with closing(conn.cursor()) as cursor: reset_db(cursor) conn.commit() minerva_context = MinervaContext(conn, conn) source_granularity = create_granularity("900") dest_granularity = create_granularity("900") with closing(conn.cursor()) as cursor: source_datasource_1 = get_dummy_datasource(cursor, "dummy-src-1") source_datasource_2 = get_dummy_datasource(cursor, "dummy-src-2") dest_datasource = get_dummy_datasource(cursor, "dummy-transform-src") entitytype = get_dummy_entitytype(cursor, dummy_type_name) partition_size = 86400 trendstore_1 = TrendStore( source_datasource_1, entitytype, source_granularity, partition_size, "table") trendstore_1.create(cursor) trendstore_2 = TrendStore( source_datasource_2, entitytype, source_granularity, partition_size, "table") trendstore_2.create(cursor) result_trendstore = TrendStore( dest_datasource, entitytype, dest_granularity, partition_size, "table") result_trendstore.create(cursor) function_mappings = [ add_function_mapping(cursor, None, ["counter_a"], "identity_a"), add_function_mapping(cursor, None, ["counter_b"], "identity_b"), add_function_mapping(cursor, None, ["counter_c"], "identity_c"), add_function_mapping(cursor, "add", ["counter_a", "counter_b"], "add_a_b"), add_function_mapping(cursor, "multiply", ["counter_a", "300"], "a_times_300")] function_mapping_ids = [fm.id for fm in function_mappings] function_set_qtr = add_function_set(cursor, "test_set", "", function_mapping_ids, [source_datasource_1.id, source_datasource_2.id], entitytype.id, source_granularity.name, dest_datasource.id, entitytype.id, dest_granularity.name, None, [], None, True) entities = map(partial(get_or_create_entity, cursor), dns) conn.commit() source_1 = create_source_1(source_granularity, entities) def store_modified_at(trendstore, datapackage, modified): def set_modified(state): state["modified"] = modified partition = trendstore.partition(datapackage.timestamp) set_modified_action = UpdateState(set_modified) copy_from = CopyFrom(k(partition), k(datapackage)) return DbTransaction(set_modified_action, copy_from) transaction = store_modified_at(trendstore_1, source_1, modified_a) transaction.run(conn) source_2 = create_source_2(source_granularity, entities) transaction = store_modified_at(trendstore_2, source_2, modified_a) transaction.run(conn) result_partition = result_trendstore.partition(timestamp) result_table = result_partition.table() conn.commit() logging.debug("source_1") logging.debug(unlines(render_datapackage(source_1))) logging.debug("source_2") logging.debug(unlines(render_datapackage(source_2))) dest_timestamp = timestamp transformation = Transformation(function_set_qtr, dest_timestamp) transformation.execute(minerva_context) columns = map(Column, ["entity_id", "identity_a", "identity_b", "add_a_b", "a_times_300"]) query = result_table.select(columns) with closing(conn.cursor()) as cursor: query.execute(cursor) logging.debug(unlines(render_result(cursor))) src_table_1 = trendstore_1.partition(timestamp).table() query = src_table_1.select(Call("max", Column("modified"))) query.execute(cursor) src1_max_modified = first(cursor.fetchone()) src_table_2 = trendstore_2.partition(timestamp).table() query = src_table_2.select(Call("max", Column("modified"))) query.execute(cursor) src2_max_modified = first(cursor.fetchone()) query = modified_table.select(Column("end")).where_(Eq(Column("table_name"), result_table.name)) query.execute(cursor) query = state_table.select(Column("processed_max_modified")).where_(Eq(Column("function_set_id"))) query.execute(cursor, (function_set_qtr.id,)) processed_max_modified = first(cursor.fetchone()) eq_(max(src1_max_modified, src2_max_modified), processed_max_modified)