def test_simple(conn):
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CellID', 'CCR', 'Drops']

        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")

        timestamp = pytz.utc.localize(datetime.utcnow())
        data_rows = [(10023, timestamp, ('10023', '0.9919', '17'))]

        datapackage = DataPackage(attribute_names, data_rows)

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)

        query = (
            "SELECT attribute_directory.materialize_curr_ptr(attributestore) "
            "FROM attribute_directory.attributestore "
            "WHERE id = %s")

        cursor.execute(query, (attributestore.id,))

        query = (
            "SELECT timestamp "
            "FROM {0} "
            "LIMIT 1").format(attributestore.table.render())

        cursor.execute(query)
        timestamp, = cursor.fetchone()

        eq_(timestamp.toordinal(), timestamp.toordinal())
def test_retrieve(conn):
    with closing(conn.cursor()) as cursor:
        time1 = pytz.utc.localize(datetime.utcnow())
        trend_names = ['CellID', 'CCR', 'Drops']
        data_rows = [
            (10023, time1, ('10023', '0.9919', '17')),
            (10047, time1, ('10047', '0.9963', '18'))
        ]
        datapackage = DataPackage(trend_names, data_rows)

        entitytype = name_to_entitytype(cursor, "UtranCell")
        datasource = name_to_datasource(cursor, "integration-test")

        data_types = ["text", "real", "smallint"]

        attributes = [Attribute(name, datatype) for name, datatype in
                      zip(trend_names, data_types)]

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)
        time.sleep(5)

        time2 = pytz.utc.localize(datetime.utcnow())
        update_data_rows = [(10023, time2, ('10023', '0.9919', '18'))]
        update_datapackage = DataPackage(trend_names, update_data_rows)
        attributestore.store_txn(update_datapackage).run(conn)
        conn.commit()

        data = retrieve(conn, attributestore.table, trend_names, [10023])
        assert_not_equal(data, None)
def test_changing_datatype(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "storagetest")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())
        attribute_names = ['site_nr', 'height']

        datapackage_a = DataPackage(
            attribute_names=attribute_names,
            rows=[
                (10023, timestamp, ('10023', '15'))
            ]
        )

        datapackage_b = DataPackage(
            attribute_names=attribute_names,
            rows=[
                (10023, timestamp, ('10023', '25.6'))
            ]
        )

        attributes = datapackage_a.deduce_attributes()

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage_a).run(conn)
        attributestore.store_txn(datapackage_b).run(conn)

        conn.commit()
        column_names = get_column_names(conn, "attribute_history",
                                        attributestore.table_name())
        eq_(len(column_names), 6)
def test_extra_column(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "storagetest")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())

        datapackage_a = DataPackage(
            attribute_names=['test0', 'test1'],
            rows=[
                (10023, timestamp, ('10023', '0.9919'))
            ]
        )

        datapackage_b = DataPackage(
            attribute_names=['test0', 'test1', "test2"],
            rows=[
                (10023, timestamp, ('10023', '0.9919', '17'))
            ]
        )

        attributes = datapackage_a.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage_a).run(conn)
        attributestore.store_txn(datapackage_b).run(conn)

        conn.commit()
        column_names = get_column_names(conn, 'attribute_history',
                                        attributestore.table_name())
        eq_(len(column_names), 7)
def test_store_copy_from_2(conn):
    trend_names = ['CCR', 'CCRatts', 'Drops']
    data_rows = [
        (10023, ('0.9919', '2105', '17'))
    ]

    data_types = ['integer', 'smallint', 'smallint']

    curr_timezone = timezone("Europe/Amsterdam")
    timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0))
    modified = curr_timezone.localize(datetime.now())
    granularity = create_granularity("900")

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src010")
        entitytype = name_to_entitytype(cursor, "test-type002")
        trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor)
        partition = trendstore.partition(timestamp)
        partition.create(cursor)
        partition.check_columns_exist(trend_names, data_types)(cursor)
        table = partition.table()

        store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp,
                modified, data_rows)

        conn.commit()

        eq_(row_count(cursor, table), 1)

        table.select(Call("max", Column("modified"))).execute(cursor)

        max_modified = first(cursor.fetchone())

        eq_(max_modified, modified)
    def test_get_aggregate_shard(self):
        awacs_query = [{"type": "C", "value": ["dummy_type"]}]

        granularity = create_granularity("900")

        formula = "SUM(Drops)"

        shard_indexes = [15680]

        with closing(self.conn.cursor()) as cursor:
            entitytype_cell = name_to_entitytype(cursor, 'dummy_type')

            sql, args, entity_id_column = compile_sql(awacs_query, None)

            select_statement = "SELECT {} AS id {}".format(entity_id_column, sql)

            entities_query = cursor.mogrify(select_statement, args)

        get_shard = partial(get_aggregate_shard, self.conn, entities_query,
                entitytype_cell.id, granularity, formula)

        shards = map(get_shard, shard_indexes)

        for shard in shards:
            logging.debug("{} - {}".format(shard[0], shard[-1]))

        eq_(len(shards), len(shard_indexes))
Beispiel #7
0
    def setup(self):
        self.conn = connect()

        clear_database(self.conn)

        with closing(self.conn.cursor()) as cursor:
            self.datasource = name_to_datasource(cursor, "test-source")
            self.entitytype = name_to_entitytype(cursor, "test_type")

        self.conn.commit()
Beispiel #8
0
    def load(self, cursor):
        self.entitytype = name_to_entitytype(cursor, self.entitytype_name)

        self.entities = map(partial(dn_to_entity, cursor), self.dns)

        granularity = create_granularity("900")

        # Data a

        self.datasource_a = name_to_datasource(cursor, "test-source-a")
        self.trendstore_a = TrendStore(self.datasource_a, self.entitytype,
                granularity, partition_size=86400, type="table").create(cursor)
        datapackage = generate_datapackage_a(granularity, self.timestamp_1,
                self.entities)
        self.partition_a = store_datapackage(cursor, self.trendstore_a,
                datapackage, self.modified)

        # Data b

        self.datasource_b = name_to_datasource(cursor, "test-source-b")
        self.trendstore_b = TrendStore(self.datasource_b, self.entitytype,
                granularity, partition_size=86400, type="table").create(cursor)
        datapackage = generate_datapackage_b(granularity, self.timestamp_1,
                self.entities)
        self.partition_b = store_datapackage(cursor, self.trendstore_b,
                datapackage, self.modified)

        # Data c

        self.datasource_c = name_to_datasource(cursor, "test-source-c")
        self.trendstore_c = TrendStore(self.datasource_c, self.entitytype,
                granularity, partition_size=86400, type="table").create(cursor)
        datapackage = generate_datapackage_c(granularity, self.timestamp_1,
                self.entities)
        self.partition_c = store_datapackage(cursor, self.trendstore_c,
                datapackage, self.modified)

        # Data d

        self.datasource_d = name_to_datasource(cursor, "test-source-d")
        self.trendstore_d = TrendStore(self.datasource_d, self.entitytype,
                granularity, partition_size=86400, type="table").create(cursor)
        datapackage_1 = generate_datapackage_d(granularity, self.timestamp_1,
                self.entities)
        self.partition_d_1 = store_datapackage(cursor, self.trendstore_d,
                datapackage_1, self.modified)

        datapackage_2 = generate_datapackage_d(granularity, self.timestamp_2,
                self.entities)
        self.partition_d_2 = store_datapackage(cursor, self.trendstore_d,
                datapackage_2, self.modified)
def test_update_modified_column(conn):
    curr_timezone = timezone("Europe/Amsterdam")

    trend_names = ['CellID', 'CCR', 'Drops']
    data_rows = [
        (10023, ('10023', '0.9919', '17')),
        (10047, ('10047', '0.9963', '18'))
    ]
    data_types = extract_data_types(data_rows)

    update_data_rows = [(10023, ('10023', '0.9919', '17'))]
    timestamp = curr_timezone.localize(datetime.now())
    granularity = create_granularity("900")

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src009")
        entitytype = name_to_entitytype(cursor, "test-type001")

        trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor)
        partition = trendstore.partition(timestamp)

        table = partition.table()

        partition.create(cursor)

        partition.check_columns_exist(trend_names, data_types)(cursor)

        store(conn, SCHEMA, table.name, trend_names, timestamp, data_rows)
        time.sleep(1)
        store(conn, SCHEMA, table.name, trend_names, timestamp, update_data_rows)
        conn.commit()

        query = table.select([Column("modified")])

        query.execute(cursor)
        modified_list = [modified for modified in cursor.fetchall()]
        assert_not_equal(modified_list[0], modified_list[1])

        table.select(Call("max", Column("modified"))).execute(cursor)

        max_modified = first(cursor.fetchone())

        modified_table.select(Column("end")).where_(
                Eq(Column("table_name"), table.name)).execute(cursor)

        end = first(cursor.fetchone())

        eq_(end, max_modified)
def test_store_copy_from_1(conn):
    trend_names = ['CellID', 'CCR', 'CCRatts', 'Drops']

    data_rows = [
        (10023, ('10023', '0.9919', '2105', '17')),
        (10047, ('10047', '0.9963', '4906', '18')),
        (10048, ('10048', '0.9935', '2448', '16')),
        (10049, ('10049', '0.9939', '5271', '32')),
        (10050, ('10050', '0.9940', '3693', '22')),
        (10051, ('10051', '0.9944', '3753', '21')),
        (10052, ('10052', '0.9889', '2168', '24')),
        (10053, ('10053', '0.9920', '2372', '19')),
        (10085, ('10085', '0.9987', '2282', '3')),
        (10086, ('10086', '0.9972', '1763', '5')),
        (10087, ('10087', '0.9931', '1453', '10'))
    ]

    curr_timezone = timezone("Europe/Amsterdam")
    data_types = extract_data_types(data_rows)
    timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0))
    granularity = create_granularity("900")
    modified = curr_timezone.localize(datetime.now())

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src009")
        entitytype = name_to_entitytype(cursor, "test-type001")

        trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor)
        partition = trendstore.partition(timestamp)

        table = partition.table()

        partition.create(cursor)

        partition.check_columns_exist(trend_names, data_types)(cursor)

        store_copy_from(conn, SCHEMA, table.name, trend_names, timestamp,
                modified, data_rows)

        conn.commit()

        eq_(row_count(cursor, table), 11)

        table.select(Call("max", Column("modified"))).execute(cursor)

        max_modified = first(cursor.fetchone())

        eq_(max_modified, modified)
def test_update_and_modify_columns_fractured(conn):
    curr_timezone = timezone("Europe/Amsterdam")
    granularity = create_granularity("900")
    timestamp = curr_timezone.localize(datetime(2013, 1, 2, 10, 45, 0))
    entity_ids = range(1023, 1023 + 100)

    trend_names_a = ["CellID", "CCR", "Drops"]
    data_rows_a = [(i, ("10023", "0.9919", "17")) for i in entity_ids]
    data_types_a = extract_data_types(data_rows_a)

    trend_names_b = ["CellID", "Drops"]
    data_rows_b = [(i, ("10023", "19")) for i in entity_ids]
    data_types_b = extract_data_types(data_rows_b)

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src009")
        entitytype = name_to_entitytype(cursor, "test-type001")

        trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor)
        partition = trendstore.partition(timestamp)

        table = partition.table()

        partition.create(cursor)

        partition.check_columns_exist(trend_names_a, data_types_a)(cursor)
        conn.commit()

    store(conn, SCHEMA, table.name, trend_names_a, timestamp, data_rows_a)
    time.sleep(0.2)

    check_columns = map(Column, ["modified", "Drops"])
    query = table.select(check_columns)

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)
        row_before = cursor.fetchone()

    store(conn, SCHEMA, table.name, trend_names_b, timestamp, data_rows_b)

    query = table.select(check_columns)

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)
        row_after = cursor.fetchone()

    assert_not_equal(row_before[0], row_after[0])
    assert_not_equal(row_before[1], row_after[1])
def test_update_modified_column(conn):
    attribute_names = ['CCR', 'Drops']

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = datasource.tzinfo.localize(datetime.now())

        rows = [
            (10023, timestamp, ('0.9919', '17')),
            (10047, timestamp, ('0.9963', '18'))
        ]

        datapackage_a = DataPackage(
            attribute_names=attribute_names,
            rows=rows)

        datapackage_b = DataPackage(
            attribute_names=attribute_names,
            rows=rows[:1])

        attributes = datapackage_a.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)
        conn.commit()

    query = Query((
        "SELECT modified, hash "
        "FROM {0} "
        "WHERE entity_id = 10023").format(attributestore.history_table.render()))

    attributestore.store_txn(datapackage_a).run(conn)

    with closing(conn.cursor()) as cursor:
        modified_a, hash_a = query.execute(cursor).fetchone()

    attributestore.store_txn(datapackage_b).run(conn)

    with closing(conn.cursor()) as cursor:
        modified_b, hash_b = query.execute(cursor).fetchone()

    # modified should be updated when same data is delivered again
    ok_(modified_a < modified_b)

    # hash should remain the same when then same data is delivered again
    eq_(hash_a, hash_b)
def test_get_trendstore(conn):
    plugin = get_plugin("trend")

    instance = plugin(conn, api_version=4)

    granularity = create_granularity("900")

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src")
        entitytype = name_to_entitytype(cursor, "test-type")

        instance.TrendStore(datasource, entitytype, granularity, 86400,
                "table").create(cursor)

    trendstore = instance.get_trendstore(datasource, entitytype, granularity)

    assert_not_equal(trendstore, None)
Beispiel #14
0
    def load(self, cursor):
        entitytype = name_to_entitytype(cursor, "materialize_dummytype001")
        self.datasource = name_to_datasource(cursor,
                                             "materialize_src_normal001")
        view_datasource = name_to_datasource(cursor, "vmaterialize_normal001")
        granularity = create_granularity('900')

        self.timestamp = self.datasource.tzinfo.localize(
            datetime.datetime(2013, 8, 26, 22, 0, 0))
        trend_names = ["cntr"]
        rows_small = [
            (1234, (55,)),
            (1235, (56,))]

        self.small_datapackage = DataPackage(granularity, self.timestamp,
                                             trend_names, rows_small)

        rows_large = [
            (1234, (55243444334,)),
            (1235, (56242343242,))]

        self.large_datapackage = DataPackage(granularity, self.timestamp,
                                             trend_names, rows_large)

        self.trendstore = TrendStore(self.datasource, entitytype, granularity,
                                     86400, 'table')
        self.trendstore.create(cursor)
        partition = self.trendstore.partition(self.timestamp)
        partition.create(cursor)
        self.trendstore.check_columns_exist(trend_names, ["smallint"])(cursor)
        modified = self.datasource.tzinfo.localize(datetime.datetime.now())
        store_copy_from(cursor, partition.table(), self.small_datapackage,
                        modified)
        mark_modified(cursor, partition.table(), self.timestamp, modified)

        view_trendstore = TrendStore(view_datasource, entitytype, granularity,
                                     0, 'view').create(cursor)
        sql = (
            "SELECT "
            "entity_id, "
            "timestamp, "
            'cntr FROM {}').format(self.trendstore.base_table().render())
        self.view = View(view_trendstore, sql).define(cursor).create(cursor)
Beispiel #15
0
    def load(self, cursor):
        self.datasource = name_to_datasource(cursor, "testset1")

        self.entitytype = name_to_entitytype(cursor, self.entitytype_name)

        self.entities = map(partial(dn_to_entity, cursor), self.dns)

        datapackage = generate_datapackage_a(self.granularity,
                self.timestamp, self.entities)

        self.trendstore = TrendStore.get(cursor, self.datasource, self.entitytype,
                self.granularity)

        if not self.trendstore:
            self.trendstore = TrendStore(self.datasource, self.entitytype,
                    self.granularity, partition_size=86400, type="table").create(cursor)

        self.partition = store_datapackage(cursor, self.trendstore,
                datapackage, self.modified)
    def test_retrieve_aggregate(self):
        granularity = create_granularity("900")

        with closing(self.conn.cursor()) as cursor:
            datasource = name_to_datasource(cursor, "test")
            entitytype = name_to_entitytype(cursor, "Cell")

            TrendStore(datasource, entitytype, granularity, 86400,
                    "table").create(cursor)

        column_expressions = ["COUNT(entity_id)"]

        start = datasource.tzinfo.localize(datetime(2012, 12, 6, 14, 15))
        end = datasource.tzinfo.localize(datetime(2012, 12, 6, 14, 15))

        interval = start, end

        retrieve_aggregated(self.conn, datasource, granularity, entitytype,
            column_expressions, interval, group_by="entity_id")
def test_array(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())

        datapackage = DataPackage(
            attribute_names=['channel', 'pwr'],
            rows=[
                (10023, timestamp, ('7', '0,0,0,2,5,12,87,34,5,0,0')),
                (10024, timestamp, ('9', '0,0,0,1,11,15,95,41,9,0,0'))
            ]
        )

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage).run(conn)
def test_update(conn):
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CellID', 'CCR', 'Drops']

        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        time1 = datasource.tzinfo.localize(datetime.now())

        data_rows = [
            (10023, time1, ('10023', '0.9919', '17')),
            (10047, time1, ('10047', '0.9963', '18'))
        ]
        update_data_rows = [
            (10023, time1, ('10023', '0.5555', '17'))
        ]

        datapackage = DataPackage(attribute_names, data_rows)
        attributes = datapackage.deduce_attributes()

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)

        time.sleep(1)

        datapackage = DataPackage(attribute_names, update_data_rows)
        attributestore.store_txn(datapackage).run(conn)

        conn.commit()

        query = (
            'SELECT modified, "CCR" '
            'FROM {0}').format(attributestore.history_table.render())

        cursor.execute(query)
        test_list = [(modified, ccr) for modified, ccr in cursor.fetchall()]
        assert_not_equal(test_list[0][0], test_list[1][0])
        assert_not_equal(test_list[0][1], test_list[1][1])
def test_update(conn):
    trend_names = ["CellID", "CCR", "Drops"]
    data_rows = [
        (10023, ("10023", "0.9919", "17")),
        (10047, ("10047", "0.9963", "18"))
    ]
    data_types = extract_data_types(data_rows)
    update_data_rows = [(10023, ("10023", "0.5555", "17"))]
    timestamp = datetime.now()
    granularity = create_granularity("900")

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "test-src009")
        entitytype = name_to_entitytype(cursor, "test-type001")

        trendstore = TrendStore(datasource, entitytype, granularity, 86400, "table").create(cursor)
        partition = trendstore.partition(timestamp)

        table = partition.table()

        partition.create(cursor)

        partition.check_columns_exist(trend_names, data_types)(cursor)

    store(conn, SCHEMA, table.name, trend_names, timestamp, data_rows)

    store(conn, SCHEMA, table.name, trend_names, timestamp, update_data_rows)
    conn.commit()

    query = table.select([Column("modified"), Column("CCR")])

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)
        rows = cursor.fetchall()

    assert_not_equal(rows[0][0], rows[1][0])
    assert_not_equal(rows[0][1], rows[1][1])
def test_name_to_entitytype(conn):
    with closing(conn.cursor()) as cursor:
        entitytype = helpers_v4.name_to_entitytype(cursor, "test_name_to_entitytype")

    assert not entitytype is None
    assert entitytype.name == "test_name_to_entitytype"
 def load(self, cursor):
     self.datasource = name_to_datasource(cursor, "test-source")
     self.entitytype = name_to_entitytype(cursor, "test_type")