コード例 #1
0
def load_sample_data_into_datasource(schema_only_db):
    """
    load our sample test data into a dataspace
    This is a function not a fixture so you can
    run it on any datasource providing the right API.
    """
    logging.getLogger().debug("Loading Sample data for tests")

    _pk = schema_only_db.store_taskmanager(
        "taskmanager1",
        "11111111-1111-1111-1111-111111111111",
        datetime.datetime(2016, 3, 14),
    )  # _pk=1 probably
    header = Header(_pk)
    metadata = Metadata(_pk)
    schema_only_db.insert(_pk, 1, "my_test_key", b"my_test_value", header,
                          metadata)
    schema_only_db.insert(_pk, 1, "a_test_key", b"a_test_value", header,
                          metadata)

    _pk = schema_only_db.store_taskmanager(
        "taskmanager2",
        "22222222-2222-2222-2222-222222222222")  # _pk=2 probably
    header = Header(_pk)
    metadata = Metadata(_pk)
    schema_only_db.insert(_pk, 2, "other_test_key", b"other_test_value",
                          header, metadata)

    # return the connection now that it isn't just the schema
    return schema_only_db
コード例 #2
0
def test_update_bad(datasource):  # noqa: F811
    """Do updates fail to work on bogus taskmanager as expected"""
    metadata_row = datasource.get_metadata(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
    )
    header_row = datasource.get_header(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
    )
    with pytest.raises(Exception):
        datasource.update(
            taskmanager_id=100,
            generation_id=1,
            key="my_test_key",
            value=b"I changed IT",
            header=Header(
                header_row[0],
                create_time=header_row[4],
                expiration_time=header_row[5],
                scheduled_create_time=header_row[6],
                creator=header_row[7],
                schema_id=header_row[8],
            ),
            metadata=Metadata(
                metadata_row[0],
                state=metadata_row[4],
                generation_id=metadata_row[2],
                generation_time=metadata_row[5],
                missed_update_count=metadata_row[6],
            ),
        )
コード例 #3
0
def test_insert(datasource):  # noqa: F811
    """Can we insert new elements"""
    primary_key = datasource.store_taskmanager("taskmanager3", "33333333-3333-3333-3333-333333333333")
    assert primary_key > 1

    header = Header(primary_key)
    metadata = Metadata(primary_key)
    datasource.insert(
        primary_key,
        1,
        "sample_test_key",
        b"sample_test_value",
        header,
        metadata,
    )

    result1 = datasource.get_dataproducts(taskmanager_id=primary_key)

    assert result1 == [
        {
            "key": "sample_test_key",
            "taskmanager_id": primary_key,
            "generation_id": 1,
            "value": b"sample_test_value",
        }
    ]

    result2 = datasource.get_dataproducts(taskmanager_id=primary_key, key="sample_test_key")

    assert result1 == result2
コード例 #4
0
def test_update(datasource):  # noqa: F811
    """Do updates work as expected"""
    metadata_row = datasource.get_metadata(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
    )
    header_row = datasource.get_header(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
    )
    datasource.update(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
        value=b"I changed IT",
        header=Header(
            header_row[0],
            create_time=header_row[4],
            expiration_time=header_row[5],
            scheduled_create_time=header_row[6],
            creator=header_row[7],
            schema_id=header_row[8],
        ),
        metadata=Metadata(
            metadata_row[0],
            state=metadata_row[4],
            generation_id=metadata_row[2],
            generation_time=metadata_row[5],
            missed_update_count=metadata_row[6],
        ),
    )

    result1 = datasource.get_dataproduct(
        taskmanager_id=1,
        generation_id=1,
        key="my_test_key",
    )

    assert result1 == b"I changed IT"
コード例 #5
0
def header(data):
    return Header(
        data["taskmanager"][0]["taskmanager_id"]
    )
コード例 #6
0
    def test_datablock(self):
        print "Hello test_datablock"
        if config['dataspace']['db_driver']['name'] == 'SQLite3DB':
            filename = config['dataspace']['db_driver']['config']['filename']
            if os.path.exists(filename):
                os.unlink(filename)

        dataspace = DataSpace(config)

        taskmanager_id = 'E0B9A7F5-B55E-47F6-B5EF-DCCB8B977AFE'
        generation_id = 9999

        datablock = DataBlock(dataspace, taskmanager_id, generation_id)

        timestamp = time.time()
        key = 'aKey'
        value = {"m1": "v1"}
        header = Header(taskmanager_id,
                        create_time=timestamp,
                        scheduled_create_time=timestamp + 600,
                        schema_id=0)
        metadata = Metadata(taskmanager_id,
                            generation_time=timestamp,
                            generation_id=generation_id)

        print 'Doing put:\nkey=%s\nvalue=%s\n\nheader=%s\n\nmetadata=%s\n\n' % (
            key, value, header, metadata)
        datablock.put(key, value, header, metadata)
        datablock.put('zKey', {'mz': 'vz'}, header, metadata)

        print 'Doing get: key=%s ...\n' % key
        db_value = datablock.get(key)
        print db_value
        print 'Doing get_header: key=%s ...\n' % key
        db_header = datablock.get_header(key)
        print db_header
        print 'Doing get_metadata: key=%s ...\n' % key
        db_metadata = datablock.get_metadata(key)
        print db_metadata

        print 'Performing comparison of value, header and metadata ...'
        if (are_dicts_same(value, db_value)
                and are_dicts_same(header, db_header)
                and are_dicts_same(metadata, db_metadata)):
            print 'DICTS CONSISTENCY CHECK PASSED\n'
        else:
            print 'DICTS CONSISTENCY CHECK FAILED\n'
        assert (are_dicts_same(value, db_value)
                and are_dicts_same(header, db_header)
                and are_dicts_same(metadata, db_metadata))

        # TEST: Insert new value for same key
        new_value = {"m2": "v2"}
        print 'Doing put:\nkey=%s\nvalue=%s\nheader=%s\nmetadata=%s\n' % (
            key, new_value, header, metadata)
        datablock.put(key, new_value, header, metadata)
        print 'Doing get: key=%s ...\n' % key
        print datablock.get(key)
        db_new_value = datablock.get(key)
        db_header = datablock.get_header(key)
        db_metadata = datablock.get_metadata(key)
        assert are_dicts_same(new_value, db_new_value)
        assert (are_dicts_same(new_value, db_new_value)
                and are_dicts_same(header, db_header)
                and are_dicts_same(metadata, db_metadata))

        # TEST: Duplicate functionality

        print '-----------------------'
        print 'Duplicating datablock ...\n'
        dup_datablock = datablock.duplicate()

        print '---'
        print 'datablock.generation_id = ', datablock.generation_id
        print 'Doing get: key=%s ...\n' % key
        print datablock.get(key)
        print 'Doing get_header: key=%s ...\n' % key
        print datablock.get_header(key)
        print 'Doing get_metadata: key=%s ...\n' % key
        print datablock.get_metadata(key)
        print '---'
        print 'dup_datablock.generation_id = ', dup_datablock.generation_id
        print 'Doing get on dup_datablock: key=%s\n' % key
        print dup_datablock.get(key)
        print 'Doing get_header on dup_datablock: key=%s ...\n' % key
        print dup_datablock.get_header(key)
        print 'Doing get_metadata on dup_datablock: key=%s ...\n' % key
        print dup_datablock.get_metadata(key)
        print '---'

        # TEST: Insert new value on duplicated datablock
        new_value3 = {"m3": "v3"}
        dup_datablock.put(key, new_value3, dup_datablock.get_header(key),
                          dup_datablock.get_metadata(key))

        print dup_datablock.get(key)
        print dup_datablock.get_header(key)
        print dup_datablock.get_metadata(key)

        db_dup_value = dup_datablock.get(key)
        db_dup_header = dup_datablock.get_header(key)
        db_dup_metadata = dup_datablock.get_metadata(key)

        assert (are_dicts_same(new_value3, db_dup_value)
                and are_dicts_same(header, db_dup_header)
                and are_dicts_same(metadata, db_dup_metadata))

        metadata1 = Metadata(taskmanager_id,
                             generation_time=timestamp,
                             generation_id=(generation_id + 1))

        db_orig_value = datablock.get(key)
        db_orig_header = datablock.get_header(key)
        db_orig_metadata = datablock.get_metadata(key)

        assert (are_dicts_same(new_value, db_orig_value)
                and are_dicts_same(header, db_orig_header)
                and are_dicts_same(metadata1, db_orig_metadata))

        dataspace.close()