def test_store_raw_fractured_large(conn): plugin = TrendPlugin(conn) with closing(conn.cursor()) as cursor: clear_database(cursor) datasource = name_to_datasource(cursor, "test_source003") conn.commit() granularity = 3600 timestamp = "2012-04-19T11:00:00" trend_names_part_1 = ["PART1_COUNTER1", "PART1_COUNTER2", "PART1_COUNTER3"] dn_template = "Network=dummy,Subnetwork=test,Element={}" raw_data_rows_part_1 = [(dn_template.format(i), ("1", "2", "3")) for i in range(100)] plugin.store_raw(datasource, granularity, timestamp, trend_names_part_1, raw_data_rows_part_1) trend_names_part_2 = ["PART2_COUNTER1", "PART2_COUNTER2", "PART2_COUNTER3"] raw_data_rows_part_2 = [(dn_template.format(i), ("4", "5", "6")) for i in range(100)] plugin.store_raw(datasource, granularity, timestamp, trend_names_part_2, raw_data_rows_part_2)
def test_store_raw_fractured_small(conn): plugin = TrendPlugin(conn) with closing(conn.cursor()) as cursor: clear_database(cursor) datasource = name_to_datasource(cursor, "test_source002") conn.commit() granularity = 3600 timestamp = "2012-04-19T11:00:00" trend_names_part_1 = ["PART1_COUNTER1", "PART1_COUNTER2", "PART1_COUNTER3"] raw_data_rows = [ ("Network=dummy,Subnetwork=test,Element=1", ("1", "2", "3")) ] plugin.store_raw(datasource, granularity, timestamp, trend_names_part_1, raw_data_rows) trend_names_part_2 = ["PART2_COUNTER1", "PART2_COUNTER2", "PART2_COUNTER3"] raw_data_rows = [ ("Network=dummy,Subnetwork=test,Element=1", ("4", "5", "6")) ] plugin.store_raw(datasource, granularity, timestamp, trend_names_part_2, raw_data_rows)
def setup(self): self.conn = connect() clear_database(self.conn) with closing(self.conn.cursor()) as cursor: self.datasource = name_to_datasource(cursor, "test-source") self.entitytype = name_to_entitytype(cursor, "test_type") self.conn.commit()
def test_get_function_set_2(conn): clear_database(conn) with closing(conn.cursor()) as cursor: entitytype = get_dummy_entitytype(cursor, 42, "dummy_type") src_datasource = get_dummy_datasource(cursor, 3, "dummy-src-1") dst_datasource = get_dummy_datasource(cursor, 4, "dummy-src-2") args = 14, "test_set", [1, 2, 3], [3], 42, 900, 4, entitytype.id, 900, None, [] add_function_set(cursor, *args) function_set = get_function_set(cursor, 14) eq_(function_set.name, 'test_set')
def test_store_raw1(conn): plugin = TrendPlugin(conn) with closing(conn.cursor()) as cursor: clear_database(cursor) datasource = name_to_datasource(cursor, "test_source001") conn.commit() granularity = 3600 timestamp = "2012-04-19T11:00:00" trend_names = ["COUNTER1", "COUNTER2", "COUNTER3"] rows = [ ("Network=dummy,Subnetwork=test,Element=1", ("1", "2", "3")) ] plugin.store_raw(datasource, granularity, timestamp, trend_names, rows)
def test_retrieve_from_v4_trendstore(conn): plugin = get_plugin("trend") data = TestData() plugin_obj = plugin(conn, api_version=3) with closing(conn.cursor()) as cursor: clear_database(cursor) data.load(cursor) start = data.timestamp_1 end = data.timestamp_1 entity = data.entities[1] entities = [entity.id] column_names = [ "CellID", "CCR", "CCRatts", "Drops"] datasources = [data.datasource_a] entitytype = data.entitytype granularity = 900 r = plugin_obj.retrieve(datasources, granularity, entitytype, column_names, entities, start, end) eq_(len(r), 1) first_result = head(r) entity_id, timestamp, c1, c2, c3, c4 = first_result eq_(entity_id, entity.id) eq_(c4, 18)
def test_run(conn): clear_database(conn) plugin = load_plugin() minerva_context = MinervaContext(conn, conn) instance = plugin(minerva_context) job_id = 67 description = { "function_set_id": 42, "dest_timestamp": timestamp.isoformat(), "processed_max_modified": "2012-12-11 14:03:29+01:00"} config = {} job = instance.create_job(job_id, description, config) assert_not_equal(job, None) dest_granularity = 900 function_mapping_table = Table("transform", "function_mapping") with closing(conn.cursor()) as cursor: state_table.truncate().execute(cursor) function_set_table.truncate(cascade=True).execute(cursor) source_datasource_1 = get_dummy_datasource(cursor, "dummy-src-1") source_datasource_2 = get_dummy_datasource(cursor, "dummy-src-2") dest_datasource = get_dummy_datasource(cursor, "dummy-transform-src") dest_entitytype = get_dummy_entitytype(cursor, "dummy_type_standard") function_mapping_table.truncate().execute(cursor) get_function_mapping(cursor, 1, None, ["counter_a"], "identity_a") get_function_mapping(cursor, 2, None, ["counter_b"], "identity_b") get_function_mapping(cursor, 3, None, ["counter_c"], "identity_c") get_function_mapping(cursor, 4, "add", ["counter_a", "counter_b"], "add_a_b") get_function_mapping(cursor, 5, "multiply", ["counter_a", "300"], "a_times_300") get_function_set(cursor, 42, "test_set", [1, 2, 3, 4, 5], [3, 4], 42, 900, 6, dest_entitytype.id, dest_granularity, None, [], True) args = 1, "unittest", "transform", "" add_job_source(cursor, *args) size = 233 job_source_id = 1 args = job_id, "transform", "", size, "2012-12-11 14:34:00", None, None, None, job_source_id, "running" add_job(cursor, *args) args = 42, description["dest_timestamp"], description["processed_max_modified"], "2012-12-11 13:03:00", job_id add_state(cursor, *args) table_name = "dummy-src-1_dummy_type_standard_qtr_20121211" columns = [ Column("entity_id"), Column("timestamp", type_=SqlType("timestamp with time zone")), Column("modified", type_=SqlType("timestamp with time zone")), Column("counter_a"), Column("counter_b")] src_table_1 = Table("trend", table_name, columns=columns) if table_exists(cursor, src_table_1): src_table_1.drop().execute(cursor) src_table_1.create().execute(cursor) entities = map(partial(get_or_create_entity, cursor), dns) source_1 = create_source_1(entities) store(cursor, src_table_1, source_1) table_name = "dummy-src-2_dummy_type_standard_qtr_20121211" columns = [ Column("entity_id"), Column("timestamp", type_=SqlType("timestamp with time zone")), Column("modified", type_=SqlType("timestamp with time zone")), Column("counter_c")] src_table_2 = Table("trend", table_name, columns=columns) if table_exists(cursor, src_table_2): src_table_2.drop().execute(cursor) src_table_2.create().execute(cursor) source_2 = create_source_2(entities) store(cursor, src_table_2, source_2) result_table = Table("trend", "dummy-transform-src_dummy_type_standard_qtr_20121211") if table_exists(cursor, result_table): result_table.truncate().execute(cursor) conn.commit() print("source_1") print(unlines(render_source(source_1))) print("source_2") print(unlines(render_source(source_2))) job.execute() columns = map(Column, ["entity_id", "identity_a", "identity_b", "add_a_b", "a_times_300"]) query = result_table.select(columns) with closing(conn.cursor()) as cursor: query.execute(cursor, args) print(unlines(render_result(cursor))) query = src_table_1.select(Call("max", Column("modified"))) query.execute(cursor) src1_max_modified = first(cursor.fetchone()) query = src_table_2.select(Call("max", Column("modified"))) query.execute(cursor) src2_max_modified = first(cursor.fetchone()) query = modified_table.select(Column("end")).where_(Eq(Column("table_name"), result_table.name)) query.execute(cursor) query = state_table.select(Column("processed_max_modified")).where_(Eq(Column("function_set_id"))) query.execute(cursor, (42,)) processed_max_modified = first(cursor.fetchone()) eq_(max(src1_max_modified, src2_max_modified), processed_max_modified)
def test_get_function_set_1(conn): clear_database(conn) with closing(conn.cursor()) as cursor: get_function_set(cursor, 12)
def prepare_datebase(conn): with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM directory.entity") cursor.execute("DELETE FROM directory.existence") clear_database(conn)
def test_run(conn): clear_database(conn) plugin = load_plugin() minerva_context = MinervaContext(conn, conn) instance = plugin(minerva_context) job_id = 67 description = { "function_set_id": 43, "dest_timestamp": "2012-12-11 13:00:00", "processed_max_modified": "2012-12-11 13:03:29", } config = {} job = instance.create_job(job_id, description, config) assert_not_equal(job, None) dest_granularity = 3600 with closing(conn.cursor()) as cursor: source_datasource_1 = get_dummy_datasource(cursor, 5, "dummy-src-5") dest_datasource = get_dummy_datasource(cursor, 6, "dummy-transform-src") dest_entitytype = get_dummy_entitytype(cursor, 45, "dummy_type_aggregate") get_function_mapping(cursor, 11, "sum", ["counter_a"], "sum_a") get_function_set( cursor, 43, "test_set_agg", [11], [5], 45, 900, 6, dest_entitytype.id, dest_granularity, None, ["entity_id"] ) args = 1, "unittest", "transform", "" add_job_source(cursor, *args) size = 233 job_source_id = 1 args = job_id, "transform", "", size, "2012-12-11 14:34:00", None, None, None, job_source_id, "running" add_job(cursor, *args) args = 43, description["dest_timestamp"], description["processed_max_modified"], "2012-12-11 13:03:00", job_id add_state(cursor, *args) table_name = "dummy-src-5_dummy_type_aggregate_qtr_20121211" columns = [ Column("entity_id"), Column("timestamp", type_=SqlType("timestamp with time zone")), Column("modified", type_=SqlType("timestamp with time zone")), Column("counter_a"), Column("counter_b"), ] table = Table("trend", table_name, columns=columns) if table_exists(cursor, table): table.drop().execute(cursor) table.create().execute(cursor) store(cursor, table, source_1) result_table = Table("trend", "dummy-transform-src_dummy_type_aggregate_hr_20121207") if table_exists(cursor, result_table): result_table.truncate().execute(cursor) conn.commit() print("source_1") print("\n".join(render_source(source_1))) job.execute() columns = map(Column, ["entity_id", "sum_a"]) query = result_table.select(columns) with closing(conn.cursor()) as cursor: query.execute(cursor, args) print("\n".join(render_result(cursor))) query.execute(cursor, args) row = cursor.fetchone() eq_(row[1], 21)