Esempio n. 1
0
def test_changing_datatype(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "storagetest")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())
        attribute_names = ['site_nr', 'height']

        datapackage_a = DataPackage(
            attribute_names=attribute_names,
            rows=[
                (10023, timestamp, ('10023', '15'))
            ]
        )

        datapackage_b = DataPackage(
            attribute_names=attribute_names,
            rows=[
                (10023, timestamp, ('10023', '25.6'))
            ]
        )

        attributes = datapackage_a.deduce_attributes()

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage_a).run(conn)
        attributestore.store_txn(datapackage_b).run(conn)

        conn.commit()
        column_names = get_column_names(conn, "attribute_history",
                                        attributestore.table_name())
        eq_(len(column_names), 6)
Esempio n. 2
0
def test_simple(conn):
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CellID', 'CCR', 'Drops']

        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")

        timestamp = pytz.utc.localize(datetime.utcnow())
        data_rows = [(10023, timestamp, ('10023', '0.9919', '17'))]

        datapackage = DataPackage(attribute_names, data_rows)

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)

        query = (
            "SELECT attribute_directory.materialize_curr_ptr(attributestore) "
            "FROM attribute_directory.attributestore "
            "WHERE id = %s")

        cursor.execute(query, (attributestore.id,))

        query = (
            "SELECT timestamp "
            "FROM {0} "
            "LIMIT 1").format(attributestore.table.render())

        cursor.execute(query)
        timestamp, = cursor.fetchone()

        eq_(timestamp.toordinal(), timestamp.toordinal())
Esempio n. 3
0
def test_extra_column(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "storagetest")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())

        datapackage_a = DataPackage(
            attribute_names=['test0', 'test1'],
            rows=[
                (10023, timestamp, ('10023', '0.9919'))
            ]
        )

        datapackage_b = DataPackage(
            attribute_names=['test0', 'test1', "test2"],
            rows=[
                (10023, timestamp, ('10023', '0.9919', '17'))
            ]
        )

        attributes = datapackage_a.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage_a).run(conn)
        attributestore.store_txn(datapackage_b).run(conn)

        conn.commit()
        column_names = get_column_names(conn, 'attribute_history',
                                        attributestore.table_name())
        eq_(len(column_names), 7)
def test_store_batch_update(conn):
    """Test batch wise storing with updates using staging table."""
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CCR', 'Drops']
        timestamp = pytz.utc.localize(datetime.utcnow())

        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")

        datapackage = DataPackage(
            attribute_names,
            [(10023 + i, timestamp, ('0.9919', '17')) for i in range(100)]
        )

        update_datapackage = DataPackage(
            attribute_names,
            [(10023 + i, timestamp, ('0.9918', '18')) for i in range(100)]
        )

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_batch(cursor, datapackage)
        conn.commit()
        modified_query = (
            'SELECT modified FROM {0} '
            'WHERE entity_id = 10023').format(
            attributestore.history_table.render())

        cursor.execute(modified_query)
        modified_a, = cursor.fetchone()

        attributestore.store_batch(cursor, update_datapackage)
        conn.commit()

        cursor.execute(modified_query)
        modified_b, = cursor.fetchone()

        assert modified_b > modified_a

        cursor.execute(
            "SELECT attribute_directory.materialize_curr_ptr(attributestore) "
            "FROM attribute_directory.attributestore "
            "WHERE id = %s", (attributestore.id,))

        query = (
            'SELECT timestamp, "Drops" '
            'FROM {0}').format(attributestore.table.render())

        cursor.execute(query)
        # Row count should be the same as the stored batch size
        eq_(cursor.rowcount, len(datapackage.rows))

        stored_timestamp, drops = cursor.fetchone()

        # Timestamp should be the same as the stored batch timestamp
        eq_(stored_timestamp, timestamp)
        eq_(drops, 18)
def test_deduce_datatypes_empty():
    datapackage = DataPackage(
        attribute_names=('height', 'power', 'refs'),
        rows=[]
    )

    data_types = datapackage.deduce_data_types()

    assert data_types == ['smallint', 'smallint', 'smallint']
def test_compact(conn):
    """Test compacting of redundant data."""
    def make_rows(timestamp):
        return [
            (10023 + i, timestamp, ('0.9919', '17'))
            for i in range(100)
        ]

    with closing(conn.cursor()) as cursor:
        attribute_names = ['CCR', 'Drops']

        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())

        datapackage_a = DataPackage(
            attribute_names=attribute_names,
            rows=make_rows(timestamp)
        )

        datapackage_b = DataPackage(
            attribute_names=attribute_names,
            rows=make_rows(timestamp + timedelta(10))
        )

        attributes = datapackage_a.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_batch(cursor, datapackage_a)
        conn.commit()

        attributestore.store_batch(cursor, datapackage_b)
        conn.commit()

        count_query = (
            "SELECT count(*) "
            "FROM {0}").format(attributestore.history_table.render())

        cursor.execute(count_query)

        count, = cursor.fetchone()
        # Row count should be the same as the stored batch sizes summed
        eq_(count, len(datapackage_b.rows) + len(datapackage_a.rows))

        attributestore.compact(cursor)
        conn.commit()

        cursor.execute(count_query)

        count, = cursor.fetchone()
        # Row count should be the same as the first stored batch size
        eq_(count, len(datapackage_a.rows))
def test_store_empty_rows(conn):
    """Test storing of empty datapackage."""
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CCR', 'Drops']
        data_rows = []

        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")

        datapackage = DataPackage(attribute_names, data_rows)

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)
        conn.commit()
Esempio n. 8
0
def test_update_modified_column(conn):
    attribute_names = ['CCR', 'Drops']

    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = datasource.tzinfo.localize(datetime.now())

        rows = [
            (10023, timestamp, ('0.9919', '17')),
            (10047, timestamp, ('0.9963', '18'))
        ]

        datapackage_a = DataPackage(
            attribute_names=attribute_names,
            rows=rows)

        datapackage_b = DataPackage(
            attribute_names=attribute_names,
            rows=rows[:1])

        attributes = datapackage_a.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)
        conn.commit()

    query = Query((
        "SELECT modified, hash "
        "FROM {0} "
        "WHERE entity_id = 10023").format(attributestore.history_table.render()))

    attributestore.store_txn(datapackage_a).run(conn)

    with closing(conn.cursor()) as cursor:
        modified_a, hash_a = query.execute(cursor).fetchone()

    attributestore.store_txn(datapackage_b).run(conn)

    with closing(conn.cursor()) as cursor:
        modified_b, hash_b = query.execute(cursor).fetchone()

    # modified should be updated when same data is delivered again
    ok_(modified_a < modified_b)

    # hash should remain the same when then same data is delivered again
    eq_(hash_a, hash_b)
def test_store_empty_attributes(conn):
    """Test storing of empty datapackage."""
    with closing(conn.cursor()) as cursor:
        attribute_names = []
        timestamp = pytz.utc.localize(datetime.utcnow())
        rows = [(10023 + i, timestamp, tuple()) for i in range(100)]

        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")

        datapackage = DataPackage(attribute_names, rows)

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)
        conn.commit()
Esempio n. 10
0
    def store(self, column_names, fields, raw_data_rows):
        rows = list(raw_data_rows)
        raw_datapackage = DataPackage(column_names, rows)
        attributes = raw_datapackage.deduce_attributes()

        entity_ref = EntityDnRef(rows[0][0])

        with closing(self.conn.cursor()) as cursor:
            datasource = DataSource.from_name(cursor, self.datasource)

            entitytype = entity_ref.get_entitytype(cursor)

            attributestore = AttributeStore.from_attributes(
                cursor, datasource, entitytype, attributes)

        self.conn.commit()

        attributestore.store_raw(raw_datapackage).run(self.conn)
def test_to_dict():
    datapackage = DataPackage(
        attribute_names=('height', 'power'),
        rows=[
            (10034, TIMESTAMP, ['15.6', '68'])
        ]
    )

    json_data = datapackage.to_dict()

    expected_json = (
        '{"attribute_names": ["height", "power"], '
        '"rows": ['
        '[10034, "2013-08-30T15:30:00+00:00", ["15.6", "68"]]'
        ']'
        '}')

    eq_(json.dumps(json_data), expected_json)
Esempio n. 12
0
    def store(self, column_names, fields, raw_data_rows):
        rows = list(raw_data_rows)
        raw_datapackage = DataPackage(column_names, rows)
        attributes = raw_datapackage.deduce_attributes()

        entity_ref = EntityDnRef(rows[0][0])

        with closing(self.conn.cursor()) as cursor:
            datasource = DataSource.from_name(cursor, self.datasource)

            entitytype = entity_ref.get_entitytype(cursor)

            attributestore = AttributeStore.from_attributes(
                cursor, datasource, entitytype, attributes
            )

        self.conn.commit()

        attributestore.store_raw(raw_datapackage).run(self.conn)
def test_store_txn_with_empty(conn):
    """Test transactional storing with empty value."""
    with closing(conn.cursor()) as cursor:
        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())
        datapackage = DataPackage(
            attribute_names=['freeText'],
            rows=[
                (10023, timestamp, ('',))
            ]
        )

        attributes = datapackage.deduce_attributes()
        eq_(attributes[0].datatype, 'smallint')
        attributestore = AttributeStore.from_attributes(
            cursor, datasource, entitytype, attributes)
        conn.commit()

        attributestore.store_txn(datapackage).run(conn)
def test_store_batch_with_list_c(conn):
    """Test batch wise storing using staging table."""
    attribute_names = ['height', 'refs']
    timestamp = pytz.utc.localize(datetime.utcnow())
    data_rows = [
        (10023, timestamp, ('19.5', ['', '', '', ''])),
        (10024, timestamp, ('19.3', ['', '', '', '']))
    ]

    datapackage = DataPackage(attribute_names, data_rows)
    attributes = datapackage.deduce_attributes()

    with closing(conn.cursor()) as cursor:
        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_batch(cursor, datapackage)
        conn.commit()
Esempio n. 15
0
def test_array(conn):
    with closing(conn.cursor()) as cursor:
        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        timestamp = pytz.utc.localize(datetime.utcnow())

        datapackage = DataPackage(
            attribute_names=['channel', 'pwr'],
            rows=[
                (10023, timestamp, ('7', '0,0,0,2,5,12,87,34,5,0,0')),
                (10024, timestamp, ('9', '0,0,0,1,11,15,95,41,9,0,0'))
            ]
        )

        attributes = datapackage.deduce_attributes()
        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        conn.commit()

        attributestore.store_txn(datapackage).run(conn)
Esempio n. 16
0
def test_update(conn):
    with closing(conn.cursor()) as cursor:
        attribute_names = ['CellID', 'CCR', 'Drops']

        datasource = name_to_datasource(cursor, "integration-test")
        entitytype = name_to_entitytype(cursor, "UtranCell")
        time1 = datasource.tzinfo.localize(datetime.now())

        data_rows = [
            (10023, time1, ('10023', '0.9919', '17')),
            (10047, time1, ('10047', '0.9963', '18'))
        ]
        update_data_rows = [
            (10023, time1, ('10023', '0.5555', '17'))
        ]

        datapackage = DataPackage(attribute_names, data_rows)
        attributes = datapackage.deduce_attributes()

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_txn(datapackage).run(conn)

        time.sleep(1)

        datapackage = DataPackage(attribute_names, update_data_rows)
        attributestore.store_txn(datapackage).run(conn)

        conn.commit()

        query = (
            'SELECT modified, "CCR" '
            'FROM {0}').format(attributestore.history_table.render())

        cursor.execute(query)
        test_list = [(modified, ccr) for modified, ccr in cursor.fetchall()]
        assert_not_equal(test_list[0][0], test_list[1][0])
        assert_not_equal(test_list[0][1], test_list[1][1])
def test_store_batch_with_list_a(conn):
    """Test batch wise storing using staging table."""
    attribute_names = ['height', 'refs']
    timestamp = pytz.utc.localize(datetime.utcnow())
    data_rows = [
        (10023 + i, timestamp, ('19.5', ['r34', 'r23', 'r33']))
        for i in range(100)]
    datapackage = DataPackage(attribute_names, data_rows)
    attributes = datapackage.deduce_attributes()

    with closing(conn.cursor()) as cursor:
        datasource = DataSource.from_name(cursor, "integration-test")
        entitytype = EntityType.from_name(cursor, "UtranCell")

        attributestore = AttributeStore(datasource, entitytype, attributes)
        attributestore.create(cursor)

        attributestore.store_batch(cursor, datapackage)
        conn.commit()

        cursor.execute(
            "SELECT attribute_directory.materialize_curr_ptr(attributestore) "
            "FROM attribute_directory.attributestore "
            "WHERE id = %s", (attributestore.id,))

        query = (
            "SELECT timestamp, height "
            "FROM {0}").format(attributestore.table.render())

        cursor.execute(query)

        # Row count should be the same as the stored batch size
        eq_(cursor.rowcount, len(datapackage.rows))

        stored_timestamp, height = cursor.fetchone()
        # Timestamp should be the same as the stored batch timestamp
        eq_(stored_timestamp, timestamp)
        eq_(height, 19.5)
def test_from_dict():
    json_data = {
        "timestamp": "2013-09-16T16:55:00+00:00",
        "attribute_names": ["tilt", "azimuth"],
        "rows": [
            [13403, ["4", "180"]]
        ]
    }

    datapackage = DataPackage.from_dict(json_data)

    eq_(datapackage.attribute_names[1], "azimuth")
    eq_(datapackage.rows[0][0], 13403)
    eq_(datapackage.rows[0][1][1], "180")
Esempio n. 19
0
 def load_datapackage(stream):
     return DataPackage.from_dict(json.load(stream))