예제 #1
0
def test_store_alter_column(conn, dataset):
    partition_size = 86400
    trendstore = TrendStore(dataset.datasource, dataset.entitytype, dataset.granularity,
            partition_size, "table")

    with closing(conn.cursor()) as cursor:
        trendstore.create(cursor)

    conn.commit()

    timestamp = dataset.datasource.tzinfo.localize(
            datetime.datetime(2013, 4, 25, 11, 00))

    trends = ["a", "b", "c"]

    rows = [
        (1234, [1, 2, 3]),
        (2345, [4, 5, 6])]

    datapackage = DataPackage(dataset.granularity, timestamp, trends, rows)

    transaction = trendstore.store(datapackage)
    transaction.run(conn)

    table = trendstore.partition(timestamp).table()

    condition = And(
        Eq(Column("entity_id"), 2345),
        Eq(Column("timestamp"), timestamp))

    query = table.select(Column("c")).where_(condition)

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)

        c, = cursor.fetchone()

    eq_(c, 6)

    trends = ["a", "b", "c"]

    rows = [
        (2345, [4, 5, "2013-04-25 11:00:00"])]

    datapackage = DataPackage(dataset.granularity, timestamp, trends, rows)

    transaction = trendstore.store(datapackage)
    transaction.run(conn)

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)

        c, = cursor.fetchone()

    eq_(c, datetime.datetime(2013, 4, 25, 11, 0, 0))
예제 #2
0
class NormalSet(DataSet):
    def load(self, cursor):
        entitytype = name_to_entitytype(cursor, "materialize_dummytype001")
        self.datasource = name_to_datasource(cursor,
                                             "materialize_src_normal001")
        view_datasource = name_to_datasource(cursor, "vmaterialize_normal001")
        granularity = create_granularity('900')

        self.timestamp = self.datasource.tzinfo.localize(
            datetime.datetime(2013, 8, 26, 22, 0, 0))
        trend_names = ["cntr"]
        rows_small = [
            (1234, (55,)),
            (1235, (56,))]

        self.small_datapackage = DataPackage(granularity, self.timestamp,
                                             trend_names, rows_small)

        rows_large = [
            (1234, (55243444334,)),
            (1235, (56242343242,))]

        self.large_datapackage = DataPackage(granularity, self.timestamp,
                                             trend_names, rows_large)

        self.trendstore = TrendStore(self.datasource, entitytype, granularity,
                                     86400, 'table')
        self.trendstore.create(cursor)
        partition = self.trendstore.partition(self.timestamp)
        partition.create(cursor)
        self.trendstore.check_columns_exist(trend_names, ["smallint"])(cursor)
        modified = self.datasource.tzinfo.localize(datetime.datetime.now())
        store_copy_from(cursor, partition.table(), self.small_datapackage,
                        modified)
        mark_modified(cursor, partition.table(), self.timestamp, modified)

        view_trendstore = TrendStore(view_datasource, entitytype, granularity,
                                     0, 'view').create(cursor)
        sql = (
            "SELECT "
            "entity_id, "
            "timestamp, "
            'cntr FROM {}').format(self.trendstore.base_table().render())
        self.view = View(view_trendstore, sql).define(cursor).create(cursor)

    def update_type(self, cursor):
        self.trendstore.clear_timestamp(self.timestamp)(cursor)
        names = ["cntr"]
        types = ["bigint"]
        self.trendstore.check_column_types(names, types)(cursor)
        partition = self.trendstore.partition(self.timestamp)
        modified = self.datasource.tzinfo.localize(datetime.datetime.now())
        store_copy_from(cursor, partition.table(), self.large_datapackage,
                        modified)
예제 #3
0
def test_create_trendstore(conn, dataset):
    partition_size = 3600

    trendstore = TrendStore(dataset.datasource, dataset.entitytype,
            dataset.granularity, partition_size, "table")

    with closing(conn.cursor()) as cursor:
        trendstore.create(cursor)

    assert isinstance(trendstore, TrendStore)

    assert trendstore.id is not None
예제 #4
0
    def create_trendstore(self, datasource, entitytype, granularity):
        partition_size = PARTITION_SIZES[str(granularity)]
        trendstore = TrendStore(datasource, entitytype, granularity,
                partition_size, 'table')

        with closing(self.conn.cursor()) as cursor:
            return trendstore.create(cursor)
예제 #5
0
def test_create_trendstore_with_children(conn, dataset):
    partition_size = 3600

    trendstore = TrendStore(dataset.datasource, dataset.entitytype,
            dataset.granularity, partition_size, "table")

    with closing(conn.cursor()) as cursor:
        trendstore.create(cursor)

        assert trendstore.id is not None

        timestamp = dataset.datasource.tzinfo.localize(
                datetime.datetime(2013, 5, 6, 14, 45))

        partition = trendstore.partition(timestamp)

        partition.create(cursor)
예제 #6
0
    def test_check_column_types(self):
        granularity = create_granularity("900")
        partition_size = 3600

        trendstore = TrendStore(self.datasource, self.entitytype, granularity, partition_size, "table")

        with closing(self.conn.cursor()) as cursor:
            trendstore.create(cursor)

            column_names = ["counter1", "counter2"]
            initial_data_types = ["smallint", "smallint"]
            data_types = ["integer", "text"]

            check_columns_exist = trendstore.check_columns_exist(column_names, initial_data_types)
            check_columns_exist(cursor)

            check_column_types = trendstore.check_column_types(column_names, data_types)
            check_column_types(cursor)
예제 #7
0
def test_store_copy_from_missing_column(conn, dataset):
    partition_size = 86400
    trendstore = TrendStore(dataset.datasource, dataset.entitytype, dataset.granularity,
            partition_size, "table")

    with closing(conn.cursor()) as cursor:
        trendstore.create(cursor)

    conn.commit()

    timestamp = dataset.datasource.tzinfo.localize(
            datetime.datetime(2013, 4, 25, 9, 45))

    trends = ["a", "b", "c"]

    def make_row_x(index):
        return (1234 + index, [1, 2, 3 + index])

    rows = map(make_row_x, range(100))

    datapackage = DataPackage(dataset.granularity, timestamp, trends, rows)

    transaction = trendstore.store(datapackage)
    transaction.run(conn)

    # Store second part with one column extra

    timestamp = dataset.datasource.tzinfo.localize(
            datetime.datetime(2013, 4, 25, 10, 00))

    trends = ["a", "b", "c", "d"]

    def make_row_y(index):
        return (1234 + index, [1, 2, 3, 4 + index])

    rows = map(make_row_y, range(100))

    datapackage = DataPackage(dataset.granularity, timestamp, trends, rows)

    transaction = trendstore.store(datapackage)
    transaction.run(conn)
예제 #8
0
def test_run(conn):
    with closing(conn.cursor()) as cursor:
        reset_db(cursor)

    conn.commit()

    minerva_context = MinervaContext(conn, conn)

    source_granularity = create_granularity("900")
    dest_granularity = create_granularity("900")

    with closing(conn.cursor()) as cursor:
        source_datasource_1 = get_dummy_datasource(cursor, "dummy-src-1")
        source_datasource_2 = get_dummy_datasource(cursor, "dummy-src-2")
        dest_datasource = get_dummy_datasource(cursor, "dummy-transform-src")

        entitytype = get_dummy_entitytype(cursor, dummy_type_name)

        partition_size = 86400

        trendstore_1 = TrendStore(
            source_datasource_1, entitytype, source_granularity,
            partition_size, "table")
        trendstore_1.create(cursor)
        trendstore_2 = TrendStore(
            source_datasource_2, entitytype, source_granularity,
            partition_size, "table")
        trendstore_2.create(cursor)
        result_trendstore = TrendStore(
            dest_datasource, entitytype, dest_granularity, partition_size,
            "table")
        result_trendstore.create(cursor)

        function_mappings = [
            add_function_mapping(cursor, None, ["counter_a"], "identity_a"),
            add_function_mapping(cursor, None, ["counter_b"], "identity_b"),
            add_function_mapping(cursor, None, ["counter_c"], "identity_c"),
            add_function_mapping(cursor, "add", ["counter_a", "counter_b"], "add_a_b"),
            add_function_mapping(cursor, "multiply", ["counter_a", "300"], "a_times_300")]

        function_mapping_ids = [fm.id for fm in function_mappings]

        function_set_qtr = add_function_set(cursor, "test_set", "", function_mapping_ids,
                [source_datasource_1.id, source_datasource_2.id], entitytype.id, source_granularity.name, dest_datasource.id, entitytype.id,
                dest_granularity.name, None, [], None, True)

        entities = map(partial(get_or_create_entity, cursor), dns)

        conn.commit()

        source_1 = create_source_1(source_granularity, entities)

        def store_modified_at(trendstore, datapackage, modified):
            def set_modified(state):
                state["modified"] = modified

            partition = trendstore.partition(datapackage.timestamp)
            set_modified_action = UpdateState(set_modified)
            copy_from = CopyFrom(k(partition), k(datapackage))

            return DbTransaction(set_modified_action, copy_from)

        transaction = store_modified_at(trendstore_1, source_1, modified_a)
        transaction.run(conn)

        source_2 = create_source_2(source_granularity, entities)

        transaction = store_modified_at(trendstore_2, source_2, modified_a)
        transaction.run(conn)

        result_partition = result_trendstore.partition(timestamp)

        result_table = result_partition.table()

    conn.commit()

    logging.debug("source_1")
    logging.debug(unlines(render_datapackage(source_1)))

    logging.debug("source_2")
    logging.debug(unlines(render_datapackage(source_2)))

    dest_timestamp = timestamp

    transformation = Transformation(function_set_qtr, dest_timestamp)

    transformation.execute(minerva_context)

    columns = map(Column, ["entity_id", "identity_a", "identity_b", "add_a_b", "a_times_300"])

    query = result_table.select(columns)

    with closing(conn.cursor()) as cursor:
        query.execute(cursor)

        logging.debug(unlines(render_result(cursor)))

        src_table_1 = trendstore_1.partition(timestamp).table()
        query = src_table_1.select(Call("max", Column("modified")))
        query.execute(cursor)
        src1_max_modified = first(cursor.fetchone())

        src_table_2 = trendstore_2.partition(timestamp).table()
        query = src_table_2.select(Call("max", Column("modified")))
        query.execute(cursor)
        src2_max_modified = first(cursor.fetchone())

        query = modified_table.select(Column("end")).where_(Eq(Column("table_name"), result_table.name))
        query.execute(cursor)

        query = state_table.select(Column("processed_max_modified")).where_(Eq(Column("function_set_id")))
        query.execute(cursor, (function_set_qtr.id,))
        processed_max_modified = first(cursor.fetchone())

        eq_(max(src1_max_modified, src2_max_modified), processed_max_modified)