def test_retrieve_ordered_by_time(self): table_a = self.data.partition_a.table() with closing(self.conn.cursor()) as cursor: eq_(row_count(cursor, table_a), 3) table_names = [table_a.name] start = self.data.timestamp_1 end = self.data.timestamp_1 entity = self.data.entities[1] entities = [entity.id] columns = [ Column(table_a, "CellID"), Column(table_a, "CCR"), Column(table_a, "CCRatts"), Column(table_a, "Drops")] r = retrieve_orderedby_time(self.conn, SCHEMA, table_names, columns, entities, start, end) eq_(len(r), 1) first_result = head(r) entity_id, timestamp, c1, c2, c3, c4 = first_result eq_(entity_id, entity.id) eq_(c4, 18)
def test_store_copy_from_2(conn): trend_names = ['CCR', 'CCRatts', 'Drops'] data_rows = [ (10023, ('0.9919', '2105', '17')) ] data_types = ['integer', 'smallint', 'smallint'] curr_timezone = timezone("Europe/Amsterdam") timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) modified = curr_timezone.localize(datetime.now()) granularity = create_granularity("900") with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src010") entitytype = name_to_entitytype(cursor, "test-type002") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) table = partition.table() store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 1) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)
def test_store_insert_rows(conn): table = Table(SCHEMA, 'storage_tmp_test_table') trend_names = ['CellID', 'CCR', 'Drops'] data_rows = [ (10023, ('10023', '0.9919', '17')), (10047, ('10047', '0.9963', '18')) ] curr_timezone = timezone("Europe/Amsterdam") modified = curr_timezone.localize(datetime.now()) time1 = curr_timezone.localize(datetime.now()) time2 = time1 - timedelta(days=1) data_types = extract_data_types(data_rows) with closing(conn.cursor()) as cursor: table.drop().if_exists().execute(cursor) create_trend_table(conn, SCHEMA, table.name, trend_names, data_types) store_insert_rows(conn, SCHEMA, table.name, trend_names, time1, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 2) store_insert_rows(conn, SCHEMA, table.name, trend_names, time2, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 4) store_insert_rows(conn, SCHEMA, table.name, trend_names, time1, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 4) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)
def test_store_copy_from_1(conn): trend_names = ['CellID', 'CCR', 'CCRatts', 'Drops'] data_rows = [ (10023, ('10023', '0.9919', '2105', '17')), (10047, ('10047', '0.9963', '4906', '18')), (10048, ('10048', '0.9935', '2448', '16')), (10049, ('10049', '0.9939', '5271', '32')), (10050, ('10050', '0.9940', '3693', '22')), (10051, ('10051', '0.9944', '3753', '21')), (10052, ('10052', '0.9889', '2168', '24')), (10053, ('10053', '0.9920', '2372', '19')), (10085, ('10085', '0.9987', '2282', '3')), (10086, ('10086', '0.9972', '1763', '5')), (10087, ('10087', '0.9931', '1453', '10')) ] curr_timezone = timezone("Europe/Amsterdam") data_types = extract_data_types(data_rows) timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) granularity = create_granularity("900") modified = curr_timezone.localize(datetime.now()) with closing(conn.cursor()) as cursor: datasource = name_to_datasource(cursor, "test-src009") entitytype = name_to_entitytype(cursor, "test-type001") trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor) partition = trendstore.partition(timestamp) table = partition.table() partition.create(cursor) partition.check_columns_exist(trend_names, data_types)(cursor) store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 11) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)
def test_store_using_tmp(conn): table = Table(SCHEMA, 'storage_tmp_test_table') trend_names = ['CellID', 'CCR', 'RadioFail', 'RFOldHo', 'AbisFailCall'] data_rows = [ (10023, ('10023', '0.9919', '10', '3', '3')), (10047, ('10047', '0.9963', '11', '5', '0')), (10048, ('10048', '0.9935', '12', '3', '0')), (10049, ('10049', '0.9939', '20', '3', '4')), (10050, ('10050', '0.9940', '18', '3', '0')), (10051, ('10051', '0.9944', '17', '2', '2')), (10052, ('10052', '0.9889', '18', '2', '0')), (10053, ('10053', '0.9920', '15', '3', '1')), (10023, ('10023', '0.9931', '9', '0', '1')), (10085, ('10085', '0.9987', '3', '0', '0')), (10086, ('10086', '0.9972', '3', '2', '0')) ] data_types = extract_data_types(data_rows) with closing(conn.cursor()) as cursor: table.drop().if_exists().execute(cursor) create_trend_table(conn, SCHEMA, table.name, trend_names, data_types) curr_timezone = timezone("Europe/Amsterdam") timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0)) modified = curr_timezone.localize(datetime.now()) store_using_tmp(conn, SCHEMA, table.name, trend_names, timestamp, modified, data_rows) conn.commit() eq_(row_count(cursor, table), 10) table.select(Call("max", Column("modified"))).execute(cursor) max_modified = first(cursor.fetchone()) eq_(max_modified, modified)