Пример #1
0
def test_transaction_execute_aql(db, col, docs):
    with db.begin_transaction(return_result=True,
                              read=[col.name],
                              write=[col.name]) as txn_db:
        job1 = txn_db.aql.execute('INSERT @data IN @@collection',
                                  bind_vars={
                                      'data': docs[0],
                                      '@collection': col.name
                                  })
        job2 = txn_db.aql.execute('INSERT @data IN @@collection',
                                  bind_vars={
                                      'data': docs[1],
                                      '@collection': col.name
                                  })
        job3 = txn_db.aql.execute('RETURN DOCUMENT(@@collection, @key)',
                                  bind_vars={
                                      'key': docs[1]['_key'],
                                      '@collection': col.name
                                  })
        jobs = txn_db.queued_jobs()
        assert jobs == [job1, job2, job3]
        assert all(job.status() == 'pending' for job in jobs)

    assert txn_db.queued_jobs() == [job1, job2, job3]
    assert all(job.status() == 'done' for job in txn_db.queued_jobs())
    assert extract('_key', col.all()) == extract('_key', docs[:2])

    # Test successful results
    assert extract('_key', job3.result()) == [docs[1]['_key']]
Пример #2
0
def test_delete_index(icol, bad_col):
    old_indexes = set(extract("id", icol.indexes()))
    icol.add_hash_index(["attr3", "attr4"], unique=True)
    icol.add_skiplist_index(["attr3", "attr4"], unique=True)
    icol.add_fulltext_index(fields=["attr3"], min_length=10)

    new_indexes = set(extract("id", icol.indexes()))
    assert new_indexes.issuperset(old_indexes)

    indexes_to_delete = new_indexes - old_indexes
    for index_id in indexes_to_delete:
        assert icol.delete_index(index_id) is True

    new_indexes = set(extract("id", icol.indexes()))
    assert new_indexes == old_indexes

    # Test delete missing indexes
    for index_id in indexes_to_delete:
        assert icol.delete_index(index_id, ignore_missing=True) is False
    for index_id in indexes_to_delete:
        with assert_raises(IndexDeleteError) as err:
            icol.delete_index(index_id, ignore_missing=False)
        assert err.value.error_code == 1212

    # Test delete indexes with bad collection
    for index_id in indexes_to_delete:
        with assert_raises(IndexDeleteError) as err:
            bad_col.delete_index(index_id, ignore_missing=False)
        assert err.value.error_code in {11, 1228}
Пример #3
0
def test_batch_job_result_not_ready(db, col, docs):
    batch_db = db.begin_batch_execution(return_result=True)
    job = batch_db.collection(col.name).insert_many(docs)

    # Test get job result before commit
    with pytest.raises(BatchJobResultError) as err:
        job.result()
    assert str(err.value) == "result not available yet"

    # Test commit to make sure it still works after the errors
    assert batch_db.commit() == [job]
    assert len(job.result()) == len(docs)
    assert extract("_key", col.all()) == extract("_key", docs)
Пример #4
0
def test_batch_job_result_not_ready(db, col, docs):
    batch_db = db.begin_batch_execution(return_result=True)
    job = batch_db.collection(col.name).insert_many(docs)

    # Test get job result before commit
    with pytest.raises(BatchJobResultError) as err:
        job.result()
    assert str(err.value) == 'result not available yet'

    # Test commit to make sure it still works after the errors
    assert batch_db.commit() == [job]
    assert len(job.result()) == len(docs)
    assert extract('_key', col.all()) == extract('_key', docs)
Пример #5
0
def test_transaction_job_result_not_ready(db, col, docs):
    txn_db = db.begin_transaction(return_result=True)
    job = txn_db.collection(col.name).insert_many(docs)

    # Test get job result before commit
    with pytest.raises(TransactionJobResultError) as err:
        job.result()
    assert str(err.value) == 'result not available yet'

    # Test commit to make sure it still works after the errors
    assert list(txn_db.commit()) == [job]
    assert len(job.result()) == len(docs)
    assert extract('_key', col.all()) == extract('_key', docs)
Пример #6
0
def test_add_hash_index(icol):
    icol = icol

    fields = ['attr1', 'attr2']
    result = icol.add_hash_index(fields=fields,
                                 unique=True,
                                 sparse=True,
                                 deduplicate=True,
                                 name='hash_index',
                                 in_background=False)

    expected_index = {
        'sparse': True,
        'type': 'hash',
        'fields': ['attr1', 'attr2'],
        'unique': True,
        'deduplicate': True,
        'name': 'hash_index'
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Clean up the index
    icol.delete_index(result['id'])
Пример #7
0
def test_transaction_commit(db, col, docs):
    txn_db = db.begin_transaction(read=col.name,
                                  write=col.name,
                                  exclusive=[],
                                  sync=True,
                                  allow_implicit=False,
                                  lock_timeout=1000,
                                  max_size=10000)
    txn_col = txn_db.collection(col.name)

    assert '_rev' in txn_col.insert(docs[0])
    assert '_rev' in txn_col.delete(docs[0])
    assert '_rev' in txn_col.insert(docs[1])
    assert '_rev' in txn_col.delete(docs[1])
    assert '_rev' in txn_col.insert(docs[2])
    txn_db.commit_transaction()

    assert extract('_key', col.all()) == [docs[2]['_key']]
    assert txn_db.transaction_status() == 'committed'

    # Test commit_transaction with an illegal transaction ID
    txn_db._executor._id = 'illegal'
    with pytest.raises(TransactionCommitError) as err:
        txn_db.commit_transaction()
    assert err.value.error_code in {10, 1655}
Пример #8
0
def test_add_fulltext_index(icol):
    # Test add fulltext index with one attributes
    result = icol.add_fulltext_index(fields=['attr1'],
                                     min_length=10,
                                     name='fulltext_index',
                                     in_background=True)
    expected_index = {
        'sparse': True,
        'type': 'fulltext',
        'fields': ['attr1'],
        'min_length': 10,
        'unique': False,
        'name': 'fulltext_index'
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Test add fulltext index with two attributes (should fail)
    with assert_raises(IndexCreateError) as err:
        icol.add_fulltext_index(fields=['attr1', 'attr2'])
    assert err.value.error_code == 10

    # Clean up the index
    icol.delete_index(result['id'])
Пример #9
0
def test_add_fulltext_index(icol):
    # Test add fulltext index with one attributes
    result = icol.add_fulltext_index(
        fields=["attr1"], min_length=10, name="fulltext_index", in_background=True
    )
    expected_index = {
        "sparse": True,
        "type": "fulltext",
        "fields": ["attr1"],
        "min_length": 10,
        "unique": False,
        "name": "fulltext_index",
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Test add fulltext index with two attributes (should fail)
    with assert_raises(IndexCreateError) as err:
        icol.add_fulltext_index(fields=["attr1", "attr2"])
    assert err.value.error_code == 10

    # Clean up the index
    icol.delete_index(result["id"])
Пример #10
0
def test_add_skiplist_index(icol):
    fields = ["attr1", "attr2"]
    result = icol.add_skiplist_index(
        fields=fields,
        unique=True,
        sparse=True,
        deduplicate=True,
        name="skiplist_index",
        in_background=False,
    )

    expected_index = {
        "sparse": True,
        "type": "skiplist",
        "fields": ["attr1", "attr2"],
        "unique": True,
        "deduplicate": True,
        "name": "skiplist_index",
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Clean up the index
    icol.delete_index(result["id"])
Пример #11
0
def test_user_create_with_new_database(client, sys_db, cluster):
    if cluster:
        pytest.skip("Not tested in a cluster setup")

    db_name = generate_db_name()

    username1 = generate_username()
    username2 = generate_username()
    username3 = generate_username()

    password1 = generate_string()
    password2 = generate_string()
    password3 = generate_string()

    result = sys_db.create_database(
        name=db_name,
        users=[
            {
                "username": username1,
                "password": password1,
                "active": True
            },
            {
                "username": username2,
                "password": password2,
                "active": True
            },
            {
                "username": username3,
                "password": password3,
                "active": False
            },
        ],
    )
    assert result is True

    sys_db.update_permission(username1, permission="rw", database=db_name)
    sys_db.update_permission(username2, permission="rw", database=db_name)
    sys_db.update_permission(username3, permission="rw", database=db_name)

    # Test if the users were created properly
    usernames = extract("username", sys_db.users())
    assert all(u in usernames for u in [username1, username2, username3])

    # Test if the first user has access to the database
    db = client.db(db_name, username1, password1)
    db.properties()

    # Test if the second user also has access to the database
    db = client.db(db_name, username2, password2)
    db.properties()

    # Test if the third user has access to the database (should not)
    db = client.db(db_name, username3, password3)
    with assert_raises(DatabasePropertiesError) as err:
        db.properties()
    assert err.value.http_code == 401
Пример #12
0
def test_transaction_execute_with_result(db, col, docs):
    with db.begin_transaction(return_result=True) as txn_db:
        txn_col = txn_db.collection(col.name)
        job1 = txn_col.insert(docs[0])
        job2 = txn_col.insert(docs[1])
        job3 = txn_col.get(docs[1])
        jobs = txn_db.queued_jobs()
        assert jobs == [job1, job2, job3]
        assert all(job.status() == 'pending' for job in jobs)

    assert txn_db.queued_jobs() == [job1, job2, job3]
    assert all(job.status() == 'done' for job in txn_db.queued_jobs())
    assert extract('_key', col.all()) == extract('_key', docs[:2])

    # Test successful results
    assert job1.result()['_key'] == docs[0]['_key']
    assert job2.result()['_key'] == docs[1]['_key']
    assert job3.result()['_key'] == docs[1]['_key']
Пример #13
0
def test_add_geo_index(icol):
    # Test add geo index with one attribute
    result = icol.add_geo_index(fields=['attr1'],
                                ordered=False,
                                name='geo_index',
                                in_background=True)

    expected_index = {
        'sparse': True,
        'type': 'geo',
        'fields': ['attr1'],
        'unique': False,
        'geo_json': False,
        'name': 'geo_index'
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Test add geo index with two attributes
    result = icol.add_geo_index(
        fields=['attr1', 'attr2'],
        ordered=False,
    )
    expected_index = {
        'sparse': True,
        'type': 'geo',
        'fields': ['attr1', 'attr2'],
        'unique': False,
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Test add geo index with more than two attributes (should fail)
    with assert_raises(IndexCreateError) as err:
        icol.add_geo_index(fields=['attr1', 'attr2', 'attr3'])
    assert err.value.error_code == 10

    # Clean up the index
    icol.delete_index(result['id'])
Пример #14
0
def test_add_geo_index(icol):
    # Test add geo index with one attribute
    result = icol.add_geo_index(
        fields=["attr1"], ordered=False, name="geo_index", in_background=True
    )

    expected_index = {
        "sparse": True,
        "type": "geo",
        "fields": ["attr1"],
        "unique": False,
        "geo_json": False,
        "name": "geo_index",
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Test add geo index with two attributes
    result = icol.add_geo_index(
        fields=["attr1", "attr2"],
        ordered=False,
    )
    expected_index = {
        "sparse": True,
        "type": "geo",
        "fields": ["attr1", "attr2"],
        "unique": False,
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Test add geo index with more than two attributes (should fail)
    with assert_raises(IndexCreateError) as err:
        icol.add_geo_index(fields=["attr1", "attr2", "attr3"])
    assert err.value.error_code == 10

    # Clean up the index
    icol.delete_index(result["id"])
Пример #15
0
def test_user_create_with_new_database(client, sys_db):
    db_name = generate_db_name()

    username1 = generate_username()
    username2 = generate_username()
    username3 = generate_username()

    password1 = generate_string()
    password2 = generate_string()
    password3 = generate_string()

    result = sys_db.create_database(name=db_name,
                                    users=[
                                        {
                                            'username': username1,
                                            'password': password1,
                                            'active': True
                                        },
                                        {
                                            'username': username2,
                                            'password': password2,
                                            'active': True
                                        },
                                        {
                                            'username': username3,
                                            'password': password3,
                                            'active': False
                                        },
                                    ])
    assert result is True

    sys_db.update_permission(username1, permission='rw', database=db_name)
    sys_db.update_permission(username2, permission='rw', database=db_name)
    sys_db.update_permission(username3, permission='rw', database=db_name)

    # Test if the users were created properly
    usernames = extract('username', sys_db.users())
    assert all(u in usernames for u in [username1, username2, username3])

    # Test if the first user has access to the database
    db = client.db(db_name, username1, password1)
    db.properties()

    # Test if the second user also has access to the database
    db = client.db(db_name, username2, password2)
    db.properties()

    # Test if the third user has access to the database (should not)
    db = client.db(db_name, username3, password3)
    with assert_raises(DatabasePropertiesError) as err:
        db.properties()
    assert err.value.http_code == 401
Пример #16
0
def test_async_execute_without_result(db, col, docs):
    # Insert test documents asynchronously with return_result set to False
    async_col = db.begin_async_execution(return_result=False).collection(
        col.name)

    # Ensure that no jobs were returned
    assert async_col.insert(docs[0]) is None
    assert async_col.insert(docs[1]) is None
    assert async_col.insert(docs[2]) is None

    # Ensure that the operations went through
    wait_on_jobs(db)
    assert extract('_key', col.all()) == ['1', '2', '3']
Пример #17
0
def test_batch_execute_with_result(db, col, docs):
    with db.begin_batch_execution(return_result=True) as batch_db:
        batch_col = batch_db.collection(col.name)
        job1 = batch_col.insert(docs[0])
        job2 = batch_col.insert(docs[1])
        job3 = batch_col.insert(docs[1])  # duplicate
        jobs = batch_db.queued_jobs()
        assert jobs == [job1, job2, job3]
        assert all(job.status() == 'pending' for job in jobs)

    assert batch_db.queued_jobs() == [job1, job2, job3]
    assert all(job.status() == 'done' for job in batch_db.queued_jobs())
    assert extract('_key', col.all()) == extract('_key', docs[:2])

    # Test successful results
    assert job1.result()['_key'] == docs[0]['_key']
    assert job2.result()['_key'] == docs[1]['_key']

    # Test insert error result
    with pytest.raises(DocumentInsertError) as err:
        job3.result()
    assert err.value.error_code == 1210
Пример #18
0
def test_batch_execute_with_result(db, col, docs):
    with db.begin_batch_execution(return_result=True) as batch_db:
        batch_col = batch_db.collection(col.name)
        job1 = batch_col.insert(docs[0])
        job2 = batch_col.insert(docs[1])
        job3 = batch_col.insert(docs[1])  # duplicate
        jobs = batch_db.queued_jobs()
        assert jobs == [job1, job2, job3]
        assert all(job.status() == 'pending' for job in jobs)

    assert batch_db.queued_jobs() == [job1, job2, job3]
    assert all(job.status() == 'done' for job in batch_db.queued_jobs())
    assert extract('_key', col.all()) == extract('_key', docs[:2])

    # Test successful results
    assert job1.result()['_key'] == docs[0]['_key']
    assert job2.result()['_key'] == docs[1]['_key']

    # Test insert error result
    with pytest.raises(DocumentInsertError) as err:
        job3.result()
    assert err.value.error_code == 1210
Пример #19
0
def test_vertex_collection_management(db, graph, bad_graph):
    # Test create valid "from" vertex collection
    fvcol_name = generate_col_name()
    assert not graph.has_vertex_collection(fvcol_name)
    assert not db.has_collection(fvcol_name)

    fvcol = graph.create_vertex_collection(fvcol_name)
    assert graph.has_vertex_collection(fvcol_name)
    assert db.has_collection(fvcol_name)
    assert fvcol.name == fvcol_name
    assert fvcol.graph == graph.name
    assert fvcol_name in repr(fvcol)
    assert fvcol_name in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())

    # Test create duplicate vertex collection
    with assert_raises(VertexCollectionCreateError) as err:
        graph.create_vertex_collection(fvcol_name)
    assert err.value.error_code == 1938
    assert fvcol_name in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())

    # Test create valid "to" vertex collection
    tvcol_name = generate_col_name()
    assert not graph.has_vertex_collection(tvcol_name)
    assert not db.has_collection(tvcol_name)

    tvcol = graph.create_vertex_collection(tvcol_name)
    assert graph.has_vertex_collection(tvcol_name)
    assert db.has_collection(tvcol_name)
    assert tvcol_name == tvcol_name
    assert tvcol.graph == graph.name
    assert tvcol_name in repr(tvcol)
    assert tvcol_name in graph.vertex_collections()
    assert tvcol_name in extract('name', db.collections())

    # Test list vertex collection via bad fabric
    with assert_raises(VertexCollectionListError) as err:
        bad_graph.vertex_collections()
    assert err.value.error_code == 1228

    # Test delete missing vertex collection
    with assert_raises(VertexCollectionDeleteError) as err:
        graph.delete_vertex_collection(generate_col_name())
    assert err.value.error_code == 1926

    # Test delete "to" vertex collection with purge option
    assert graph.delete_vertex_collection(tvcol_name, purge=True) is True
    assert tvcol_name not in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())
    assert tvcol_name not in extract('name', db.collections())
    assert not graph.has_vertex_collection(tvcol_name)

    # Test delete "from" vertex collection without purge option
    assert graph.delete_vertex_collection(fvcol_name, purge=False) is True
    assert fvcol_name not in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())
    assert not graph.has_vertex_collection(fvcol_name)
Пример #20
0
def test_vertex_collection_management(db, graph, bad_graph):
    # Test create valid "from" vertex collection
    fvcol_name = generate_col_name()
    assert not graph.has_vertex_collection(fvcol_name)
    assert not db.has_collection(fvcol_name)

    fvcol = graph.create_vertex_collection(fvcol_name)
    assert graph.has_vertex_collection(fvcol_name)
    assert db.has_collection(fvcol_name)
    assert fvcol.name == fvcol_name
    assert fvcol.graph == graph.name
    assert fvcol_name in repr(fvcol)
    assert fvcol_name in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())

    # Test create duplicate vertex collection
    with assert_raises(VertexCollectionCreateError) as err:
        graph.create_vertex_collection(fvcol_name)
    assert err.value.error_code == 1938
    assert fvcol_name in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())

    # Test create valid "to" vertex collection
    tvcol_name = generate_col_name()
    assert not graph.has_vertex_collection(tvcol_name)
    assert not db.has_collection(tvcol_name)

    tvcol = graph.create_vertex_collection(tvcol_name)
    assert graph.has_vertex_collection(tvcol_name)
    assert db.has_collection(tvcol_name)
    assert tvcol_name == tvcol_name
    assert tvcol.graph == graph.name
    assert tvcol_name in repr(tvcol)
    assert tvcol_name in graph.vertex_collections()
    assert tvcol_name in extract('name', db.collections())

    # Test list vertex collection via bad database
    with assert_raises(VertexCollectionListError) as err:
        bad_graph.vertex_collections()
    assert err.value.error_code in {11, 1228}

    # Test delete missing vertex collection
    with assert_raises(VertexCollectionDeleteError) as err:
        graph.delete_vertex_collection(generate_col_name())
    assert err.value.error_code in {1926, 1928}

    # Test delete "to" vertex collection with purge option
    assert graph.delete_vertex_collection(tvcol_name, purge=True) is True
    assert tvcol_name not in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())
    assert tvcol_name not in extract('name', db.collections())
    assert not graph.has_vertex_collection(tvcol_name)

    # Test delete "from" vertex collection without purge option
    assert graph.delete_vertex_collection(fvcol_name, purge=False) is True
    assert fvcol_name not in graph.vertex_collections()
    assert fvcol_name in extract('name', db.collections())
    assert not graph.has_vertex_collection(fvcol_name)
Пример #21
0
def test_batch_execute_without_result(db, col, docs):
    with db.begin_batch_execution(return_result=False) as batch_db:
        batch_col = batch_db.collection(col.name)

        # Ensure that no jobs are returned
        assert batch_col.insert(docs[0]) is None
        assert batch_col.delete(docs[0]) is None
        assert batch_col.insert(docs[1]) is None
        assert batch_col.delete(docs[1]) is None
        assert batch_col.insert(docs[2]) is None
        assert batch_col.get(docs[2]) is None
        assert batch_db.queued_jobs() is None

    # Ensure that the operations went through
    assert batch_db.queued_jobs() is None
    assert extract("_key", col.all()) == [docs[2]["_key"]]
Пример #22
0
def test_batch_execute_without_result(db, col, docs):
    with db.begin_batch_execution(return_result=False) as batch_db:
        batch_col = batch_db.collection(col.name)

        # Ensure that no jobs are returned
        assert batch_col.insert(docs[0]) is None
        assert batch_col.delete(docs[0]) is None
        assert batch_col.insert(docs[1]) is None
        assert batch_col.delete(docs[1]) is None
        assert batch_col.insert(docs[2]) is None
        assert batch_col.get(docs[2]) is None
        assert batch_db.queued_jobs() is None

    # Ensure that the operations went through
    assert batch_db.queued_jobs() is None
    assert extract('_key', col.all()) == [docs[2]['_key']]
Пример #23
0
def test_transaction_execute_without_result(db, col, docs):
    with db.begin_transaction(return_result=False) as txn_db:
        txn_col = txn_db.collection(col.name)

        # Ensure that no jobs are returned
        assert txn_col.insert(docs[0]) is None
        assert txn_col.delete(docs[0]) is None
        assert txn_col.insert(docs[1]) is None
        assert txn_col.delete(docs[1]) is None
        assert txn_col.insert(docs[2]) is None
        assert txn_col.get(docs[2]) is None
        assert txn_db.queued_jobs() is None

    # Ensure that the operations went through
    assert txn_db.queued_jobs() is None
    assert extract('_key', col.all()) == [docs[2]['_key']]
Пример #24
0
def test_transaction_abort(db, col, docs):
    txn_db = db.begin_transaction(write=col.name)
    txn_col = txn_db.collection(col.name)

    assert '_rev' in txn_col.insert(docs[0])
    assert '_rev' in txn_col.delete(docs[0])
    assert '_rev' in txn_col.insert(docs[1])
    assert '_rev' in txn_col.delete(docs[1])
    assert '_rev' in txn_col.insert(docs[2])
    txn_db.abort_transaction()

    assert extract('_key', col.all()) == []
    assert txn_db.transaction_status() == 'aborted'

    txn_db._executor._id = 'illegal'
    with pytest.raises(TransactionAbortError) as err:
        txn_db.abort_transaction()
    assert err.value.error_code in {10, 1655}
Пример #25
0
def test_add_ttl_index(icol):
    # Test add persistent index with two attributes
    result = icol.add_ttl_index(
        fields=["attr1"], expiry_time=1000, name="ttl_index", in_background=True
    )
    expected_index = {
        "type": "ttl",
        "fields": ["attr1"],
        "expiry_time": 1000,
        "name": "ttl_index",
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Clean up the index
    icol.delete_index(result["id"])
Пример #26
0
def test_add_ttl_index(icol):
    # Test add persistent index with two attributes
    result = icol.add_ttl_index(fields=['attr1'],
                                expiry_time=1000,
                                name='ttl_index',
                                in_background=True)
    expected_index = {
        'type': 'ttl',
        'fields': ['attr1'],
        'expiry_time': 1000,
        'name': 'ttl_index'
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Clean up the index
    icol.delete_index(result['id'])
Пример #27
0
def test_user_create_with_new_database(client, sys_db):
    db_name = generate_db_name()

    username1 = generate_username()
    username2 = generate_username()
    username3 = generate_username()

    password1 = generate_string()
    password2 = generate_string()
    password3 = generate_string()

    result = sys_db.create_database(
        name=db_name,
        users=[
            {'username': username1, 'password': password1, 'active': True},
            {'username': username2, 'password': password2, 'active': True},
            {'username': username3, 'password': password3, 'active': False},
        ]
    )
    assert result is True

    sys_db.update_permission(username1, permission='rw', database=db_name)
    sys_db.update_permission(username2, permission='rw', database=db_name)
    sys_db.update_permission(username3, permission='rw', database=db_name)

    # Test if the users were created properly
    usernames = extract('username', sys_db.users())
    assert all(u in usernames for u in [username1, username2, username3])

    # Test if the first user has access to the database
    db = client.db(db_name, username1, password1)
    db.properties()

    # Test if the second user also has access to the database
    db = client.db(db_name, username2, password2)
    db.properties()

    # Test if the third user has access to the database (should not)
    db = client.db(db_name, username3, password3)
    with assert_raises(DatabasePropertiesError) as err:
        db.properties()
    assert err.value.http_code == 401
Пример #28
0
def test_add_persistent_index(icol):
    # Test add persistent index with two attributes
    result = icol.add_persistent_index(fields=['attr1', 'attr2'],
                                       unique=True,
                                       sparse=True,
                                       name='persistent_index',
                                       in_background=True)
    expected_index = {
        'sparse': True,
        'type': 'persistent',
        'fields': ['attr1', 'attr2'],
        'unique': True,
        'name': 'persistent_index'
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result['id'] in extract('id', icol.indexes())

    # Clean up the index
    icol.delete_index(result['id'])
Пример #29
0
def test_add_persistent_index(icol):
    # Test add persistent index with two attributes
    result = icol.add_persistent_index(
        fields=["attr1", "attr2"],
        unique=True,
        sparse=True,
        name="persistent_index",
        in_background=True,
    )
    expected_index = {
        "sparse": True,
        "type": "persistent",
        "fields": ["attr1", "attr2"],
        "unique": True,
        "name": "persistent_index",
    }
    for key, value in expected_index.items():
        assert result[key] == value

    assert result["id"] in extract("id", icol.indexes())

    # Clean up the index
    icol.delete_index(result["id"])
Пример #30
0
def test_graph_management(db, bad_db):
    # Test create graph
    graph_name = generate_graph_name()
    assert db.has_graph(graph_name) is False

    graph = db.create_graph(graph_name)
    assert db.has_graph(graph_name) is True
    assert graph.name == graph_name
    assert graph.db_name == db.name

    # Test create duplicate graph
    with assert_raises(GraphCreateError) as err:
        db.create_graph(graph_name)
    assert err.value.error_code == 1925

    # Test get graph
    result = db.graph(graph_name)
    assert result.name == graph.name
    assert result.db_name == graph.db_name

    # Test get graphs
    result = db.graphs()
    for entry in result:
        assert 'revision' in entry
        assert 'edge_definitions' in entry
        assert 'orphan_collections' in entry
    assert graph_name in extract('name', db.graphs())

    # Test get graphs with bad fabric
    with assert_raises(GraphListError) as err:
        bad_db.graphs()
    assert err.value.error_code == 1228

    # Test delete graph
    assert db.delete_graph(graph_name) is True
    assert graph_name not in extract('name', db.graphs())

    # Test delete missing graph
    with assert_raises(GraphDeleteError) as err:
        db.delete_graph(graph_name)
    assert err.value.error_code == 1924
    assert db.delete_graph(graph_name, ignore_missing=True) is False

    # Create a graph with vertex and edge collections and delete the graph
    graph = db.create_graph(graph_name)
    ecol_name = generate_col_name()
    fvcol_name = generate_col_name()
    tvcol_name = generate_col_name()

    graph.create_vertex_collection(fvcol_name)
    graph.create_vertex_collection(tvcol_name)
    graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    collections = extract('name', db.collections())
    assert fvcol_name in collections
    assert tvcol_name in collections
    assert ecol_name in collections

    db.delete_graph(graph_name)
    collections = extract('name', db.collections())
    assert fvcol_name in collections
    assert tvcol_name in collections
    assert ecol_name in collections

    # Create a graph with vertex and edge collections and delete all
    graph = db.create_graph(graph_name)
    graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    db.delete_graph(graph_name, drop_collections=True)
    collections = extract('name', db.collections())
    assert fvcol_name not in collections
    assert tvcol_name not in collections
    assert ecol_name not in collections
Пример #31
0
def test_edge_definition_management(db, graph, bad_graph):
    ecol_name = generate_col_name()
    assert not graph.has_edge_definition(ecol_name)
    assert not graph.has_edge_collection(ecol_name)
    assert not db.has_collection(ecol_name)

    ecol = graph.create_edge_definition(ecol_name, [], [])
    assert graph.has_edge_definition(ecol_name)
    assert graph.has_edge_collection(ecol_name)
    assert db.has_collection(ecol_name)
    assert isinstance(ecol, EdgeCollection)

    ecol = graph.edge_collection(ecol_name)
    assert ecol.name == ecol_name
    assert ecol.name in repr(ecol)
    assert ecol.graph == graph.name
    assert {
               'edge_collection': ecol_name,
               'from_vertex_collections': [],
               'to_vertex_collections': []
           } in graph.edge_definitions()
    assert ecol_name in extract('name', db.collections())

    # Test create duplicate edge definition
    with assert_raises(EdgeDefinitionCreateError) as err:
        graph.create_edge_definition(ecol_name, [], [])
    assert err.value.error_code == 1920

    # Test create edge definition with existing vertex collection
    fvcol_name = generate_col_name()
    tvcol_name = generate_col_name()
    ecol_name = generate_col_name()
    ecol = graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    assert ecol.name == ecol_name
    assert {
               'edge_collection': ecol_name,
               'from_vertex_collections': [fvcol_name],
               'to_vertex_collections': [tvcol_name]
           } in graph.edge_definitions()
    assert ecol_name in extract('name', db.collections())

    vertex_collections = graph.vertex_collections()
    assert fvcol_name in vertex_collections
    assert tvcol_name in vertex_collections

    # Test create edge definition with missing vertex collection
    bad_vcol_name = generate_col_name()
    ecol_name = generate_col_name()
    ecol = graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[bad_vcol_name],
        to_vertex_collections=[bad_vcol_name]
    )
    assert graph.has_edge_definition(ecol_name)
    assert graph.has_edge_collection(ecol_name)
    assert ecol.name == ecol_name
    assert {
               'edge_collection': ecol_name,
               'from_vertex_collections': [bad_vcol_name],
               'to_vertex_collections': [bad_vcol_name]
           } in graph.edge_definitions()
    assert bad_vcol_name in graph.vertex_collections()
    assert bad_vcol_name in extract('name', db.collections())
    assert bad_vcol_name in extract('name', db.collections())

    # Test list edge definition with bad fabric
    with assert_raises(EdgeDefinitionListError) as err:
        bad_graph.edge_definitions()
    assert err.value.error_code == 1228

    # Test replace edge definition (happy path)
    ecol = graph.replace_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[tvcol_name],
        to_vertex_collections=[fvcol_name]
    )
    assert isinstance(ecol, EdgeCollection)
    assert ecol.name == ecol_name
    assert {
               'edge_collection': ecol_name,
               'from_vertex_collections': [tvcol_name],
               'to_vertex_collections': [fvcol_name]
           } in graph.edge_definitions()

    # Test replace missing edge definition
    bad_ecol_name = generate_col_name()
    with assert_raises(EdgeDefinitionReplaceError):
        graph.replace_edge_definition(
            edge_collection=bad_ecol_name,
            from_vertex_collections=[],
            to_vertex_collections=[fvcol_name]
        )

    # Test delete missing edge definition
    with assert_raises(EdgeDefinitionDeleteError) as err:
        graph.delete_edge_definition(bad_ecol_name)
    assert err.value.error_code == 1930

    # Test delete existing edge definition with purge
    assert graph.delete_edge_definition(ecol_name, purge=True) is True
    assert {
               'edge_collection': ecol_name,
               'from_vertex_collections': [tvcol_name],
               'to_vertex_collections': [fvcol_name]
           } not in graph.edge_definitions()
    assert ecol_name not in extract('name', db.collections())
    assert not graph.has_edge_definition(ecol_name)
    assert not graph.has_edge_collection(ecol_name)
Пример #32
0
def test_foxx_service_management(db, bad_db):
    service_mount = generate_service_mount()
    missing_mount = generate_service_mount()

    # Test list services
    for service in db.foxx.services():
        assert 'development' in service
        assert 'legacy' in service
        assert 'mount' in service
        assert 'name' in service
        assert 'provides' in service
        assert 'version' in service

    # Test list services with bad database
    with assert_raises(FoxxServiceListError) as err:
        bad_db.foxx.services()
    assert err.value.error_code in {11, 1228}

    # Test create service
    service = db.foxx.create_service(
        mount=service_mount,
        source=service_file,
        config={},
        dependencies={},
        development=True,
        setup=True,
        legacy=True
    )
    assert service['mount'] == service_mount
    assert service['name'] == 'test'
    assert service['development'] is True
    assert service['legacy'] is True
    assert service['manifest']['configuration'] == {}
    assert service['manifest']['dependencies'] == {}

    # Test create duplicate service
    with assert_raises(FoxxServiceCreateError) as err:
        db.foxx.create_service(service_mount, 'service.zip')
    assert err.value.error_code == 3011

    # Test get service
    service = db.foxx.service(service_mount)
    assert service['mount'] == service_mount
    assert service['name'] == 'test'
    assert service['development'] is True
    assert service['manifest']['configuration'] == {}
    assert service['manifest']['dependencies'] == {}
    assert 'checksum' in service
    assert 'options' in service
    assert 'path' in service
    assert 'version' in service

    # Test get missing service
    with assert_raises(FoxxServiceGetError) as err:
        db.foxx.service(missing_mount)
    assert err.value.error_code == 3009

    # Test update service
    service = db.foxx.update_service(
        mount=service_mount,
        source=service_file,
        config={},
        dependencies={},
        teardown=True,
        setup=True,
        legacy=False
    )
    assert service['mount'] == service_mount
    assert service['name'] == 'test'
    assert service['legacy'] is False

    # Test update missing service
    with assert_raises(FoxxServiceUpdateError) as err:
        db.foxx.update_service(missing_mount, 'service.zip')
    assert err.value.error_code == 3009

    # Test replace service
    service = db.foxx.replace_service(
        mount=service_mount,
        source=service_file,
        config={},
        dependencies={},
        teardown=True,
        setup=True,
        legacy=True,
        force=False
    )
    assert service['mount'] == service_mount
    assert service['name'] == 'test'
    assert service['legacy'] is True

    # Test replace missing service
    with assert_raises(FoxxServiceReplaceError) as err:
        db.foxx.replace_service(missing_mount, 'service.zip')
    assert err.value.error_code == 3009

    assert db.foxx.delete_service(service_mount, teardown=False) is True
    assert service_mount not in extract('mount', db.foxx.services())

    # Test delete missing service
    with assert_raises(FoxxServiceDeleteError) as err:
        db.foxx.delete_service(missing_mount, teardown=False)
    assert err.value.error_code == 3009
Пример #33
0
def test_aql_query_management(db, bad_db, col, docs):
    plan_fields = [
        'estimatedNrItems',
        'estimatedCost',
        'rules',
        'variables',
        'collections',
    ]
    # Test explain invalid query
    with assert_raises(AQLQueryExplainError) as err:
        db.aql.explain('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test explain valid query with all_plans set to False
    plan = db.aql.explain('FOR d IN {} RETURN d'.format(col.name),
                          all_plans=False,
                          opt_rules=['-all', '+use-index-range'])
    assert all(field in plan for field in plan_fields)

    # Test explain valid query with all_plans set to True
    plans = db.aql.explain('FOR d IN {} RETURN d'.format(col.name),
                           all_plans=True,
                           opt_rules=['-all', '+use-index-range'],
                           max_plans=10)
    for plan in plans:
        assert all(field in plan for field in plan_fields)
    assert len(plans) < 10

    # Test validate invalid query
    with assert_raises(AQLQueryValidateError) as err:
        db.aql.validate('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test validate valid query
    result = db.aql.validate('FOR d IN {} RETURN d'.format(col.name))
    assert 'ast' in result
    assert 'bind_vars' in result
    assert 'collections' in result
    assert 'parsed' in result

    # Test execute invalid AQL query
    with assert_raises(AQLQueryExecuteError) as err:
        db.aql.execute('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test execute valid query
    db.collection(col.name).import_bulk(docs)
    cursor = db.aql.execute('''
        FOR d IN {col}
            UPDATE {{_key: d._key, _val: @val }} IN {col}
            RETURN NEW
        '''.format(col=col.name),
                            count=True,
                            batch_size=1,
                            ttl=10,
                            bind_vars={'val': 42},
                            full_count=True,
                            max_plans=1000,
                            optimizer_rules=['+all'],
                            cache=True,
                            memory_limit=1000000,
                            fail_on_warning=False,
                            profile=True,
                            max_transaction_size=100000,
                            max_warning_count=10,
                            intermediate_commit_count=1,
                            intermediate_commit_size=1000,
                            satellite_sync_wait=False,
                            write_collections=[col.name],
                            read_collections=[col.name])
    if db.context == 'transaction':
        assert cursor.id is None
        assert cursor.type == 'cursor'
        assert cursor.batch() is not None
        assert cursor.has_more() is False
        assert cursor.count() == len(col)
        assert cursor.cached() is None
        assert cursor.statistics() is None
        assert cursor.profile() is None
        assert cursor.warnings() is None
        assert extract('_key', cursor) == extract('_key', docs)
        assert cursor.close() is None
    else:
        assert cursor.id is not None
        assert cursor.type == 'cursor'
        assert cursor.batch() is not None
        assert cursor.has_more() is True
        assert cursor.count() == len(col)
        assert cursor.cached() is False
        assert cursor.statistics() is not None
        assert cursor.profile() is not None
        assert cursor.warnings() == []
        assert extract('_key', cursor) == extract('_key', docs)
        assert cursor.close(ignore_missing=True) is False

    # Test get tracking properties with bad database
    with assert_raises(AQLQueryTrackingGetError) as err:
        bad_db.aql.tracking()
    assert err.value.error_code == 1228

    # Test get tracking properties
    tracking = db.aql.tracking()
    assert isinstance(tracking['enabled'], bool)
    assert isinstance(tracking['max_query_string_length'], int)
    assert isinstance(tracking['max_slow_queries'], int)
    assert isinstance(tracking['slow_query_threshold'], int)
    assert isinstance(tracking['track_bind_vars'], bool)
    assert isinstance(tracking['track_slow_queries'], bool)

    # Test set tracking properties with bad database
    with assert_raises(AQLQueryTrackingSetError) as err:
        bad_db.aql.set_tracking(enabled=not tracking['enabled'])
    assert err.value.error_code == 1228
    assert db.aql.tracking()['enabled'] == tracking['enabled']

    # Test set tracking properties
    new_tracking = db.aql.set_tracking(
        enabled=not tracking['enabled'],
        max_query_string_length=4000,
        max_slow_queries=60,
        slow_query_threshold=15,
        track_bind_vars=not tracking['track_bind_vars'],
        track_slow_queries=not tracking['track_slow_queries'])
    assert new_tracking['enabled'] != tracking['enabled']
    assert new_tracking['max_query_string_length'] == 4000
    assert new_tracking['max_slow_queries'] == 60
    assert new_tracking['slow_query_threshold'] == 15
    assert new_tracking['track_bind_vars'] != tracking['track_bind_vars']
    assert new_tracking['track_slow_queries'] != tracking['track_slow_queries']

    # Make sure to revert the properties
    new_tracking = db.aql.set_tracking(enabled=True,
                                       track_bind_vars=True,
                                       track_slow_queries=True)
    assert new_tracking['enabled'] is True
    assert new_tracking['track_bind_vars'] is True
    assert new_tracking['track_slow_queries'] is True

    # Kick off some long lasting queries in the background
    db.begin_async_execution().aql.execute('RETURN SLEEP(100)')
    db.begin_async_execution().aql.execute('RETURN SLEEP(50)')

    # Test list queries
    queries = db.aql.queries()
    for query in queries:
        assert 'id' in query
        assert 'query' in query
        assert 'started' in query
        assert 'state' in query
        assert 'bind_vars' in query
        assert 'runtime' in query
    assert len(queries) == 2

    # Test list queries with bad database
    with assert_raises(AQLQueryListError) as err:
        bad_db.aql.queries()
    assert err.value.error_code == 1228

    # Test kill queries
    query_id_1, query_id_2 = extract('id', queries)
    assert db.aql.kill(query_id_1) is True

    while len(queries) > 1:
        queries = db.aql.queries()
    assert query_id_1 not in extract('id', queries)

    assert db.aql.kill(query_id_2) is True
    while len(queries) > 0:
        queries = db.aql.queries()
    assert query_id_2 not in extract('id', queries)

    # Test kill missing queries
    with assert_raises(AQLQueryKillError) as err:
        db.aql.kill(query_id_1)
    assert err.value.error_code == 1591
    with assert_raises(AQLQueryKillError) as err:
        db.aql.kill(query_id_2)
    assert err.value.error_code == 1591

    # Test list slow queries
    assert db.aql.slow_queries() == []

    # Test list slow queries with bad database
    with assert_raises(AQLQueryListError) as err:
        bad_db.aql.slow_queries()
    assert err.value.error_code == 1228

    # Test clear slow queries
    assert db.aql.clear_slow_queries() is True

    # Test clear slow queries with bad database
    with assert_raises(AQLQueryClearError) as err:
        bad_db.aql.clear_slow_queries()
    assert err.value.error_code == 1228
Пример #34
0
def test_traverse(db):
    # Create test graph, vertex and edge collections
    school = db.create_graph(generate_graph_name())
    profs = school.create_vertex_collection(generate_col_name())
    classes = school.create_vertex_collection(generate_col_name())
    teaches = school.create_edge_definition(
        edge_collection=generate_col_name(),
        from_vertex_collections=[profs.name],
        to_vertex_collections=[classes.name])
    # Insert test vertices into the graph
    profs.insert({'_key': 'anna', 'name': 'Professor Anna'})
    profs.insert({'_key': 'andy', 'name': 'Professor Andy'})
    classes.insert({'_key': 'CSC101', 'name': 'Introduction to CS'})
    classes.insert({'_key': 'MAT223', 'name': 'Linear Algebra'})
    classes.insert({'_key': 'STA201', 'name': 'Statistics'})
    classes.insert({'_key': 'MAT101', 'name': 'Calculus I'})
    classes.insert({'_key': 'MAT102', 'name': 'Calculus II'})

    # Insert test edges into the graph
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/CSC101'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/STA201'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/MAT223'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT101'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT102'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT223'.format(classes.name)
    })

    # Traverse the graph with default settings
    result = school.traverse('{}/anna'.format(profs.name))
    visited = extract('_key', result['vertices'])
    assert visited == ['CSC101', 'MAT223', 'STA201', 'anna']

    for path in result['paths']:
        for vertex in path['vertices']:
            assert set(vertex) == {'_id', '_key', '_rev', 'name'}
        for edge in path['edges']:
            assert set(edge) == {'_id', '_key', '_rev', '_to', '_from'}

    result = school.traverse('{}/andy'.format(profs.name))
    visited = extract('_key', result['vertices'])
    assert visited == ['MAT101', 'MAT102', 'MAT223', 'andy']

    # Traverse the graph with an invalid start vertex
    with assert_raises(GraphTraverseError):
        school.traverse('invalid')

    with assert_raises(GraphTraverseError):
        bad_col_name = generate_col_name()
        school.traverse('{}/hanna'.format(bad_col_name))

    with assert_raises(GraphTraverseError):
        school.traverse('{}/anderson'.format(profs.name))

    # Travers the graph with max iteration of 0
    with assert_raises(GraphTraverseError):
        school.traverse('{}/andy'.format(profs.name), max_iter=0)

    # Traverse the graph with max depth of 0
    result = school.traverse('{}/andy'.format(profs.name), max_depth=0)
    assert extract('_key', result['vertices']) == ['andy']

    result = school.traverse('{}/anna'.format(profs.name), max_depth=0)
    assert extract('_key', result['vertices']) == ['anna']

    # Traverse the graph with min depth of 2
    result = school.traverse('{}/andy'.format(profs.name), min_depth=2)
    assert extract('_key', result['vertices']) == []

    result = school.traverse('{}/anna'.format(profs.name), min_depth=2)
    assert extract('_key', result['vertices']) == []

    # Traverse the graph with DFS and BFS
    result = school.traverse(
        {'_id': '{}/anna'.format(profs.name)},
        strategy='dfs',
        direction='any',
    )
    dfs_vertices = extract('_key', result['vertices'])

    result = school.traverse({'_id': '{}/anna'.format(profs.name)},
                             strategy='bfs',
                             direction='any')
    bfs_vertices = extract('_key', result['vertices'])

    assert sorted(dfs_vertices) == sorted(bfs_vertices)

    # Traverse the graph with filter function
    result = school.traverse(
        {'_id': '{}/andy'.format(profs.name)},
        filter_func='if (vertex._key == "MAT101") {return "exclude";} return;')
    assert extract('_key', result['vertices']) == ['MAT102', 'MAT223', 'andy']

    # Traverse the graph with global uniqueness (should be same as before)
    result = school.traverse(
        {'_id': '{}/andy'.format(profs.name)},
        vertex_uniqueness='global',
        edge_uniqueness='global',
        filter_func='if (vertex._key == "MAT101") {return "exclude";} return;')
    assert extract('_key', result['vertices']) == ['MAT102', 'MAT223', 'andy']

    with assert_raises(DocumentParseError) as err:
        school.traverse({})
    assert err.value.message == 'field "_id" required'
Пример #35
0
def test_user_management(sys_db, bad_db):
    # Test create user
    username = generate_username()
    password = generate_string()
    assert not sys_db.has_user(username)

    new_user = sys_db.create_user(
        username=username,
        password=password,
        active=True,
        extra={'foo': 'bar'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is True
    assert new_user['extra'] == {'foo': 'bar'}
    assert sys_db.has_user(username)

    # Test create duplicate user
    with assert_raises(UserCreateError) as err:
        sys_db.create_user(username=username, password=password)
    assert err.value.error_code == 1702

    # Test list users
    for user in sys_db.users():
        assert isinstance(user['username'], string_types)
        assert isinstance(user['active'], bool)
        assert isinstance(user['extra'], dict)
    assert sys_db.user(username) == new_user

    # Test list users with bad fabric
    with assert_raises(UserListError) as err:
        bad_db.users()
    assert err.value.error_code == 1228

    # Test get user
    users = sys_db.users()
    for user in users:
        assert 'active' in user
        assert 'extra' in user
        assert 'username' in user
    assert username in extract('username', sys_db.users())

    # Test get missing user
    with assert_raises(UserGetError) as err:
        sys_db.user(generate_username())
    assert err.value.error_code == 1703

    # Update existing user
    new_user = sys_db.update_user(
        username=username,
        password=password,
        active=False,
        extra={'bar': 'baz'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is False
    assert new_user['extra'] == {'bar': 'baz'}
    assert sys_db.user(username) == new_user

    # Update missing user
    with assert_raises(UserUpdateError) as err:
        sys_db.update_user(username=generate_username(),
                           password=generate_string())
    assert err.value.error_code == 1703

    # Replace existing user
    new_user = sys_db.replace_user(
        username=username,
        password=password,
        active=False,
        extra={'baz': 'qux'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is False
    assert new_user['extra'] == {'baz': 'qux'}
    assert sys_db.user(username) == new_user

    # Replace missing user
    with assert_raises(UserReplaceError) as err:
        sys_db.replace_user(username=generate_username(),
                            password=generate_string())
    assert err.value.error_code == 1703

    # Delete an existing user
    assert sys_db.delete_user(username) is True

    # Delete a missing user
    with assert_raises(UserDeleteError) as err:
        sys_db.delete_user(username, ignore_missing=False)
    assert err.value.error_code == 1703
    assert sys_db.delete_user(username, ignore_missing=True) is False
Пример #36
0
def test_aql_query_management(db, bad_db, col, docs):
    plan_fields = [
        'estimatedNrItems',
        'estimatedCost',
        'rules',
        'variables',
        'collections',
    ]
    # Test explain invalid query
    with assert_raises(AQLQueryExplainError) as err:
        db.aql.explain('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test explain valid query with all_plans set to False
    plan = db.aql.explain(
        'FOR d IN {} RETURN d'.format(col.name),
        all_plans=False,
        opt_rules=['-all', '+use-index-range']
    )
    assert all(field in plan for field in plan_fields)

    # Test explain valid query with all_plans set to True
    plans = db.aql.explain(
        'FOR d IN {} RETURN d'.format(col.name),
        all_plans=True,
        opt_rules=['-all', '+use-index-range'],
        max_plans=10
    )
    for plan in plans:
        assert all(field in plan for field in plan_fields)
    assert len(plans) < 10

    # Test validate invalid query
    with assert_raises(AQLQueryValidateError) as err:
        db.aql.validate('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test validate valid query
    result = db.aql.validate('FOR d IN {} RETURN d'.format(col.name))
    assert 'ast' in result
    assert 'bind_vars' in result
    assert 'collections' in result
    assert 'parsed' in result

    # Test execute invalid AQL query
    with assert_raises(AQLQueryExecuteError) as err:
        db.aql.execute('INVALID QUERY')
    assert err.value.error_code == 1501

    # Test execute valid query
    db.collection(col.name).import_bulk(docs)
    cursor = db.aql.execute(
        '''
        FOR d IN {col}
            UPDATE {{_key: d._key, _val: @val }} IN {col}
            RETURN NEW
        '''.format(col=col.name),
        count=True,
        batch_size=1,
        ttl=10,
        bind_vars={'val': 42},
        full_count=True,
        max_plans=1000,
        optimizer_rules=['+all'],
        cache=True,
        memory_limit=1000000,
        fail_on_warning=False,
        profile=True,
        max_transaction_size=100000,
        max_warning_count=10,
        intermediate_commit_count=1,
        intermediate_commit_size=1000,
        satellite_sync_wait=False,
        write_collections=[col.name],
        read_collections=[col.name],
        stream=False,
        skip_inaccessible_cols=True
    )
    if db.context == 'transaction':
        assert cursor.id is None
        assert cursor.type == 'cursor'
        assert cursor.batch() is not None
        assert cursor.has_more() is False
        assert cursor.count() == len(col)
        assert cursor.cached() is None
        assert cursor.statistics() is None
        assert cursor.profile() is None
        assert cursor.warnings() is None
        assert extract('_key', cursor) == extract('_key', docs)
        assert cursor.close() is None
    else:
        assert cursor.id is not None
        assert cursor.type == 'cursor'
        assert cursor.batch() is not None
        assert cursor.has_more() is True
        assert cursor.count() == len(col)
        assert cursor.cached() is False
        assert cursor.statistics() is not None
        assert cursor.profile() is not None
        assert cursor.warnings() == []
        assert extract('_key', cursor) == extract('_key', docs)
        assert cursor.close(ignore_missing=True) is False

    # Test get tracking properties with bad database
    with assert_raises(AQLQueryTrackingGetError) as err:
        bad_db.aql.tracking()
    assert err.value.error_code in {11, 1228}

    # Test get tracking properties
    tracking = db.aql.tracking()
    assert isinstance(tracking['enabled'], bool)
    assert isinstance(tracking['max_query_string_length'], int)
    assert isinstance(tracking['max_slow_queries'], int)
    assert isinstance(tracking['slow_query_threshold'], int)
    assert isinstance(tracking['track_bind_vars'], bool)
    assert isinstance(tracking['track_slow_queries'], bool)

    # Test set tracking properties with bad database
    with assert_raises(AQLQueryTrackingSetError) as err:
        bad_db.aql.set_tracking(enabled=not tracking['enabled'])
    assert err.value.error_code in {11, 1228}
    assert db.aql.tracking()['enabled'] == tracking['enabled']

    # Test set tracking properties
    new_tracking = db.aql.set_tracking(
        enabled=not tracking['enabled'],
        max_query_string_length=4000,
        max_slow_queries=60,
        slow_query_threshold=15,
        track_bind_vars=not tracking['track_bind_vars'],
        track_slow_queries=not tracking['track_slow_queries']
    )
    assert new_tracking['enabled'] != tracking['enabled']
    assert new_tracking['max_query_string_length'] == 4000
    assert new_tracking['max_slow_queries'] == 60
    assert new_tracking['slow_query_threshold'] == 15
    assert new_tracking['track_bind_vars'] != tracking['track_bind_vars']
    assert new_tracking['track_slow_queries'] != tracking['track_slow_queries']

    # Make sure to revert the properties
    new_tracking = db.aql.set_tracking(
        enabled=True,
        track_bind_vars=True,
        track_slow_queries=True
    )
    assert new_tracking['enabled'] is True
    assert new_tracking['track_bind_vars'] is True
    assert new_tracking['track_slow_queries'] is True

    # Kick off some long lasting queries in the background
    db.begin_async_execution().aql.execute('RETURN SLEEP(100)')
    db.begin_async_execution().aql.execute('RETURN SLEEP(50)')

    # Test list queries
    queries = db.aql.queries()
    for query in queries:
        assert 'id' in query
        assert 'query' in query
        assert 'started' in query
        assert 'state' in query
        assert 'bind_vars' in query
        assert 'runtime' in query
    assert len(queries) == 2

    # Test list queries with bad database
    with assert_raises(AQLQueryListError) as err:
        bad_db.aql.queries()
    assert err.value.error_code in {11, 1228}

    # Test kill queries
    query_id_1, query_id_2 = extract('id', queries)
    assert db.aql.kill(query_id_1) is True

    while len(queries) > 1:
        queries = db.aql.queries()
    assert query_id_1 not in extract('id', queries)

    assert db.aql.kill(query_id_2) is True
    while len(queries) > 0:
        queries = db.aql.queries()
    assert query_id_2 not in extract('id', queries)

    # Test kill missing queries
    with assert_raises(AQLQueryKillError) as err:
        db.aql.kill(query_id_1)
    assert err.value.error_code == 1591
    with assert_raises(AQLQueryKillError) as err:
        db.aql.kill(query_id_2)
    assert err.value.error_code == 1591

    # Test list slow queries
    assert db.aql.slow_queries() == []

    # Test list slow queries with bad database
    with assert_raises(AQLQueryListError) as err:
        bad_db.aql.slow_queries()
    assert err.value.error_code in {11, 1228}

    # Test clear slow queries
    assert db.aql.clear_slow_queries() is True

    # Test clear slow queries with bad database
    with assert_raises(AQLQueryClearError) as err:
        bad_db.aql.clear_slow_queries()
    assert err.value.error_code in {11, 1228}
Пример #37
0
def test_collection_management(db, bad_db, cluster):
    # Test create collection
    col_name = generate_col_name()
    assert db.has_collection(col_name) is False

    col = db.create_collection(name=col_name,
                               sync=True,
                               compact=False,
                               journal_size=7774208,
                               system=False,
                               volatile=False,
                               key_generator='traditional',
                               user_keys=False,
                               key_increment=9,
                               key_offset=100,
                               edge=True,
                               shard_count=2,
                               shard_fields=['test_attr'],
                               index_bucket_count=10,
                               replication_factor=1,
                               shard_like='',
                               sync_replication=False,
                               enforce_replication_factor=False,
                               sharding_strategy='community-compat',
                               smart_join_attribute='test')
    assert db.has_collection(col_name) is True

    properties = col.properties()
    if col.context != 'transaction':
        assert 'id' in properties
    assert properties['name'] == col_name
    assert properties['sync'] is True
    assert properties['system'] is False
    assert properties['key_generator'] == 'traditional'
    assert properties['user_keys'] is False

    # Test create duplicate collection
    with assert_raises(CollectionCreateError) as err:
        db.create_collection(col_name)
    assert err.value.error_code == 1207

    # Test list collections
    assert all(entry['name'].startswith('test_collection')
               or entry['name'].startswith('_') for entry in db.collections())

    # Test list collections with bad database
    with assert_raises(CollectionListError) as err:
        bad_db.collections()
    assert err.value.error_code in {11, 1228}

    # Test get collection object
    test_col = db.collection(col.name)
    assert isinstance(test_col, StandardCollection)
    assert test_col.name == col.name

    test_col = db[col.name]
    assert isinstance(test_col, StandardCollection)
    assert test_col.name == col.name

    # Test delete collection
    assert db.delete_collection(col_name, system=False) is True
    assert col_name not in extract('name', db.collections())

    # Test drop missing collection
    with assert_raises(CollectionDeleteError) as err:
        db.delete_collection(col_name)
    assert err.value.error_code == 1203
    assert db.delete_collection(col_name, ignore_missing=True) is False

    if not cluster:
        # Test rename collection
        new_name = generate_col_name()
        col = db.create_collection(new_name)
        assert col.rename(new_name) is True
        assert col.name == new_name
        assert repr(col) == '<StandardCollection {}>'.format(new_name)

        # Try again (the operation should be idempotent)
        assert col.rename(new_name) is True
        assert col.name == new_name
        assert repr(col) == '<StandardCollection {}>'.format(new_name)

        # Test rename with bad collection
        with assert_raises(CollectionRenameError) as err:
            bad_db.collection(new_name).rename(new_name)
        assert err.value.error_code in {11, 1228}
Пример #38
0
def test_task_management(sys_db, db, bad_db):
    test_command = 'require("@arangodb").print(params);'

    # Test create task with random ID
    task_name = generate_task_name()
    new_task = db.create_task(
        name=task_name,
        command=test_command,
        params={'foo': 1, 'bar': 2},
        offset=1,
    )
    assert new_task['name'] == task_name
    assert 'print(params)' in new_task['command']
    assert new_task['type'] == 'timed'
    assert new_task['database'] == db.name
    assert isinstance(new_task['created'], float)
    assert isinstance(new_task['id'], string_types)

    # Test get existing task
    assert db.task(new_task['id']) == new_task

    # Test create task with specific ID
    task_name = generate_task_name()
    task_id = generate_task_id()
    new_task = db.create_task(
        name=task_name,
        command=test_command,
        params={'foo': 1, 'bar': 2},
        offset=1,
        period=10,
        task_id=task_id
    )
    assert new_task['name'] == task_name
    assert new_task['id'] == task_id
    assert 'print(params)' in new_task['command']
    assert new_task['type'] == 'periodic'
    assert new_task['database'] == db.name
    assert isinstance(new_task['created'], float)
    assert db.task(new_task['id']) == new_task

    # Test create duplicate task
    with assert_raises(TaskCreateError) as err:
        db.create_task(
            name=task_name,
            command=test_command,
            params={'foo': 1, 'bar': 2},
            task_id=task_id
        )
    assert err.value.error_code == 1851

    # Test list tasks
    for task in sys_db.tasks():
        assert task['database'] in db.databases()
        assert task['type'] in {'periodic', 'timed'}
        assert isinstance(task['id'], string_types)
        assert isinstance(task['name'], string_types)
        assert isinstance(task['created'], float)
        assert isinstance(task['command'], string_types)

    # Test list tasks with bad database
    with assert_raises(TaskListError) as err:
        bad_db.tasks()
    assert err.value.error_code in {11, 1228}

    # Test get missing task
    with assert_raises(TaskGetError) as err:
        db.task(generate_task_id())
    assert err.value.error_code == 1852

    # Test delete existing task
    assert task_id in extract('id', db.tasks())
    assert db.delete_task(task_id) is True
    assert task_id not in extract('id', db.tasks())
    with assert_raises(TaskGetError) as err:
        db.task(task_id)
    assert err.value.error_code == 1852

    # Test delete missing task
    with assert_raises(TaskDeleteError) as err:
        db.delete_task(generate_task_id(), ignore_missing=False)
    assert err.value.error_code == 1852
    assert db.delete_task(task_id, ignore_missing=True) is False
Пример #39
0
def test_permission_management(client, sys_db, bad_db):
    username = generate_username()
    password = generate_string()
    db_name = generate_db_name()
    col_name_1 = generate_col_name()
    col_name_2 = generate_col_name()

    sys_db.create_database(
        name=db_name,
        users=[{
            'username': username,
            'password': password,
            'active': True
        }]
    )
    db = client.db(db_name, username, password)
    assert isinstance(sys_db.permissions(username), dict)

    # Test list permissions with bad database
    with assert_raises(PermissionListError) as err:
        bad_db.permissions(username)
    assert err.value.error_code in {11, 1228}

    # Test get permission with bad database
    with assert_raises(PermissionGetError) as err:
        bad_db.permission(username, db_name)
    assert err.value.error_code in {11, 1228}

    # The user should not have read and write permissions
    assert sys_db.permission(username, db_name) == 'none'
    assert sys_db.permission(username, db_name, col_name_1) == 'none'
    with assert_raises(CollectionCreateError) as err:
        db.create_collection(col_name_1)
    assert err.value.http_code == 401
    with assert_raises(CollectionListError) as err:
        db.collections()
    assert err.value.http_code == 401

    # Test update permission (database level) with bad database
    with assert_raises(PermissionUpdateError):
        bad_db.update_permission(username, 'ro', db_name)
    assert sys_db.permission(username, db_name) == 'none'

    # Test update permission (database level) to read only and verify access
    assert sys_db.update_permission(username, 'ro', db_name) is True
    assert sys_db.permission(username, db_name) == 'ro'
    with assert_raises(CollectionCreateError) as err:
        db.create_collection(col_name_2)
    assert err.value.http_code == 403
    assert col_name_1 not in extract('name', db.collections())
    assert col_name_2 not in extract('name', db.collections())

    # Test reset permission (database level) with bad database
    with assert_raises(PermissionResetError) as err:
        bad_db.reset_permission(username, db_name)
    assert err.value.error_code in {11, 1228}
    assert sys_db.permission(username, db_name) == 'ro'

    # Test reset permission (database level) and verify access
    assert sys_db.reset_permission(username, db_name) is True
    assert sys_db.permission(username, db_name) == 'none'
    with assert_raises(CollectionCreateError) as err:
        db.create_collection(col_name_1)
    assert err.value.http_code == 401
    with assert_raises(CollectionListError) as err:
        db.collections()
    assert err.value.http_code == 401

    # Test update permission (database level) and verify access
    assert sys_db.update_permission(username, 'rw', db_name) is True
    assert sys_db.permission(username, db_name, col_name_2) == 'rw'
    assert db.create_collection(col_name_1) is not None
    assert db.create_collection(col_name_2) is not None
    assert col_name_1 in extract('name', db.collections())
    assert col_name_2 in extract('name', db.collections())

    col_1 = db.collection(col_name_1)
    col_2 = db.collection(col_name_2)

    # Verify that user has read and write access to both collections
    assert isinstance(col_1.properties(), dict)
    assert isinstance(col_1.insert({}), dict)
    assert isinstance(col_2.properties(), dict)
    assert isinstance(col_2.insert({}), dict)

    # Test update permission (collection level) to read only and verify access
    assert sys_db.update_permission(username, 'ro', db_name, col_name_1)
    assert sys_db.permission(username, db_name, col_name_1) == 'ro'
    assert isinstance(col_1.properties(), dict)
    with assert_raises(DocumentInsertError) as err:
        col_1.insert({})
    assert err.value.http_code == 403
    assert isinstance(col_2.properties(), dict)
    assert isinstance(col_2.insert({}), dict)

    # Test update permission (collection level) to none and verify access
    assert sys_db.update_permission(username, 'none', db_name, col_name_1)
    assert sys_db.permission(username, db_name, col_name_1) == 'none'
    with assert_raises(CollectionPropertiesError) as err:
        col_1.properties()
    assert err.value.http_code == 403
    with assert_raises(DocumentInsertError) as err:
        col_1.insert({})
    assert err.value.http_code == 403
    assert isinstance(col_2.properties(), dict)
    assert isinstance(col_2.insert({}), dict)

    # Test reset permission (collection level)
    assert sys_db.reset_permission(username, db_name, col_name_1) is True
    assert sys_db.permission(username, db_name, col_name_1) == 'rw'
    assert isinstance(col_1.properties(), dict)
    assert isinstance(col_1.insert({}), dict)
    assert isinstance(col_2.properties(), dict)
    assert isinstance(col_2.insert({}), dict)
Пример #40
0
def test_user_management(sys_db, bad_db):
    # Test create user
    username = generate_username()
    password = generate_string()
    assert not sys_db.has_user(username)

    new_user = sys_db.create_user(
        username=username,
        password=password,
        active=True,
        extra={'foo': 'bar'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is True
    assert new_user['extra'] == {'foo': 'bar'}
    assert sys_db.has_user(username)

    # Test create duplicate user
    with assert_raises(UserCreateError) as err:
        sys_db.create_user(
            username=username,
            password=password
        )
    assert err.value.error_code == 1702

    # Test list users
    for user in sys_db.users():
        assert isinstance(user['username'], string_types)
        assert isinstance(user['active'], bool)
        assert isinstance(user['extra'], dict)
    assert sys_db.user(username) == new_user

    # Test list users with bad database
    with assert_raises(UserListError) as err:
        bad_db.users()
    assert err.value.error_code in {11, 1228}

    # Test get user
    users = sys_db.users()
    for user in users:
        assert 'active' in user
        assert 'extra' in user
        assert 'username' in user
    assert username in extract('username', sys_db.users())

    # Test get missing user
    with assert_raises(UserGetError) as err:
        sys_db.user(generate_username())
    assert err.value.error_code == 1703

    # Update existing user
    new_user = sys_db.update_user(
        username=username,
        password=password,
        active=False,
        extra={'bar': 'baz'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is False
    assert new_user['extra'] == {'bar': 'baz'}
    assert sys_db.user(username) == new_user

    # Update missing user
    with assert_raises(UserUpdateError) as err:
        sys_db.update_user(
            username=generate_username(),
            password=generate_string()
        )
    assert err.value.error_code == 1703

    # Replace existing user
    new_user = sys_db.replace_user(
        username=username,
        password=password,
        active=False,
        extra={'baz': 'qux'},
    )
    assert new_user['username'] == username
    assert new_user['active'] is False
    assert new_user['extra'] == {'baz': 'qux'}
    assert sys_db.user(username) == new_user

    # Replace missing user
    with assert_raises(UserReplaceError) as err:
        sys_db.replace_user(
            username=generate_username(),
            password=generate_string()
        )
    assert err.value.error_code == 1703

    # Delete an existing user
    assert sys_db.delete_user(username) is True

    # Delete a missing user
    with assert_raises(UserDeleteError) as err:
        sys_db.delete_user(username, ignore_missing=False)
    assert err.value.error_code == 1703
    assert sys_db.delete_user(username, ignore_missing=True) is False
Пример #41
0
def test_traverse(db):
    # Create test graph, vertex and edge collections
    school = db.create_graph(generate_graph_name())
    profs = school.create_vertex_collection(generate_col_name())
    classes = school.create_vertex_collection(generate_col_name())
    teaches = school.create_edge_definition(
        edge_collection=generate_col_name(),
        from_vertex_collections=[profs.name],
        to_vertex_collections=[classes.name]
    )
    # Insert test vertices into the graph
    profs.insert({'_key': 'anna', 'name': 'Professor Anna'})
    profs.insert({'_key': 'andy', 'name': 'Professor Andy'})
    classes.insert({'_key': 'CSC101', 'name': 'Introduction to CS'})
    classes.insert({'_key': 'MAT223', 'name': 'Linear Algebra'})
    classes.insert({'_key': 'STA201', 'name': 'Statistics'})
    classes.insert({'_key': 'MAT101', 'name': 'Calculus I'})
    classes.insert({'_key': 'MAT102', 'name': 'Calculus II'})

    # Insert test edges into the graph
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/CSC101'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/STA201'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/anna'.format(profs.name),
        '_to': '{}/MAT223'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT101'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT102'.format(classes.name)
    })
    teaches.insert({
        '_from': '{}/andy'.format(profs.name),
        '_to': '{}/MAT223'.format(classes.name)
    })

    # Traverse the graph with default settings
    result = school.traverse('{}/anna'.format(profs.name))
    visited = extract('_key', result['vertices'])
    assert visited == ['CSC101', 'MAT223', 'STA201', 'anna']

    for path in result['paths']:
        for vertex in path['vertices']:
            assert set(vertex) == {'_id', '_key', '_rev', 'name'}
        for edge in path['edges']:
            assert set(edge) == {'_id', '_key', '_rev', '_to', '_from'}

    result = school.traverse('{}/andy'.format(profs.name))
    visited = extract('_key', result['vertices'])
    assert visited == ['MAT101', 'MAT102', 'MAT223', 'andy']

    # Traverse the graph with an invalid start vertex
    with assert_raises(GraphTraverseError):
        school.traverse('invalid')

    with assert_raises(GraphTraverseError):
        bad_col_name = generate_col_name()
        school.traverse('{}/hanna'.format(bad_col_name))

    with assert_raises(GraphTraverseError):
        school.traverse('{}/anderson'.format(profs.name))

    # Travers the graph with max iteration of 0
    with assert_raises(GraphTraverseError):
        school.traverse('{}/andy'.format(profs.name), max_iter=0)

    # Traverse the graph with max depth of 0
    result = school.traverse('{}/andy'.format(profs.name), max_depth=0)
    assert extract('_key', result['vertices']) == ['andy']

    result = school.traverse('{}/anna'.format(profs.name), max_depth=0)
    assert extract('_key', result['vertices']) == ['anna']

    # Traverse the graph with min depth of 2
    result = school.traverse('{}/andy'.format(profs.name), min_depth=2)
    assert extract('_key', result['vertices']) == []

    result = school.traverse('{}/anna'.format(profs.name), min_depth=2)
    assert extract('_key', result['vertices']) == []

    # Traverse the graph with DFS and BFS
    result = school.traverse(
        {'_id': '{}/anna'.format(profs.name)},
        strategy='dfs',
        direction='any',
    )
    dfs_vertices = extract('_key', result['vertices'])

    result = school.traverse(
        {'_id': '{}/anna'.format(profs.name)},
        strategy='bfs',
        direction='any'
    )
    bfs_vertices = extract('_key', result['vertices'])

    assert sorted(dfs_vertices) == sorted(bfs_vertices)

    # Traverse the graph with filter function
    result = school.traverse(
        {'_id': '{}/andy'.format(profs.name)},
        filter_func='if (vertex._key == "MAT101") {return "exclude";} return;'
    )
    assert extract('_key', result['vertices']) == ['MAT102', 'MAT223', 'andy']

    # Traverse the graph with global uniqueness (should be same as before)
    result = school.traverse(
        {'_id': '{}/andy'.format(profs.name)},
        vertex_uniqueness='global',
        edge_uniqueness='global',
        filter_func='if (vertex._key == "MAT101") {return "exclude";} return;'
    )
    assert extract('_key', result['vertices']) == ['MAT102', 'MAT223', 'andy']

    with assert_raises(DocumentParseError) as err:
        school.traverse({})
    assert err.value.message == 'field "_id" required'
Пример #42
0
def test_edge_definition_management(db, graph, bad_graph):
    ecol_name = generate_col_name()
    assert not graph.has_edge_definition(ecol_name)
    assert not graph.has_edge_collection(ecol_name)
    assert not db.has_collection(ecol_name)

    ecol = graph.create_edge_definition(ecol_name, [], [])
    assert graph.has_edge_definition(ecol_name)
    assert graph.has_edge_collection(ecol_name)
    assert db.has_collection(ecol_name)
    assert isinstance(ecol, EdgeCollection)

    ecol = graph.edge_collection(ecol_name)
    assert ecol.name == ecol_name
    assert ecol.name in repr(ecol)
    assert ecol.graph == graph.name
    assert {
        'edge_collection': ecol_name,
        'from_vertex_collections': [],
        'to_vertex_collections': []
    } in graph.edge_definitions()
    assert ecol_name in extract('name', db.collections())

    # Test create duplicate edge definition
    with assert_raises(EdgeDefinitionCreateError) as err:
        graph.create_edge_definition(ecol_name, [], [])
    assert err.value.error_code == 1920

    # Test create edge definition with existing vertex collections
    fvcol_name = generate_col_name()
    tvcol_name = generate_col_name()
    ecol_name = generate_col_name()
    ecol = graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    assert ecol.name == ecol_name
    assert {
        'edge_collection': ecol_name,
        'from_vertex_collections': [fvcol_name],
        'to_vertex_collections': [tvcol_name]
    } in graph.edge_definitions()
    assert ecol_name in extract('name', db.collections())

    vertex_collections = graph.vertex_collections()
    assert fvcol_name in vertex_collections
    assert tvcol_name in vertex_collections

    # Test create edge definition with missing vertex collection
    bad_vcol_name = generate_col_name()
    ecol_name = generate_col_name()
    ecol = graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[bad_vcol_name],
        to_vertex_collections=[bad_vcol_name]
    )
    assert graph.has_edge_definition(ecol_name)
    assert graph.has_edge_collection(ecol_name)
    assert ecol.name == ecol_name
    assert {
        'edge_collection': ecol_name,
        'from_vertex_collections': [bad_vcol_name],
        'to_vertex_collections': [bad_vcol_name]
    } in graph.edge_definitions()
    assert bad_vcol_name in graph.vertex_collections()
    assert bad_vcol_name in extract('name', db.collections())
    assert bad_vcol_name in extract('name', db.collections())

    # Test list edge definition with bad database
    with assert_raises(EdgeDefinitionListError) as err:
        bad_graph.edge_definitions()
    assert err.value.error_code in {11, 1228}

    # Test replace edge definition (happy path)
    ecol = graph.replace_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[tvcol_name],
        to_vertex_collections=[fvcol_name]
    )
    assert isinstance(ecol, EdgeCollection)
    assert ecol.name == ecol_name
    assert {
        'edge_collection': ecol_name,
        'from_vertex_collections': [tvcol_name],
        'to_vertex_collections': [fvcol_name]
    } in graph.edge_definitions()

    # Test replace missing edge definition
    bad_ecol_name = generate_col_name()
    with assert_raises(EdgeDefinitionReplaceError):
        graph.replace_edge_definition(
            edge_collection=bad_ecol_name,
            from_vertex_collections=[],
            to_vertex_collections=[fvcol_name]
        )

    # Test delete missing edge definition
    with assert_raises(EdgeDefinitionDeleteError) as err:
        graph.delete_edge_definition(bad_ecol_name)
    assert err.value.error_code == 1930

    # Test delete existing edge definition with purge
    assert graph.delete_edge_definition(ecol_name, purge=True) is True
    assert ecol_name not in \
        extract('edge_collection', graph.edge_definitions())
    assert not graph.has_edge_definition(ecol_name)
    assert not graph.has_edge_collection(ecol_name)
    assert ecol_name not in extract('name', db.collections())
Пример #43
0
def test_graph_management(db, bad_db):
    # Test create graph
    graph_name = generate_graph_name()
    assert db.has_graph(graph_name) is False

    graph = db.create_graph(graph_name)
    assert db.has_graph(graph_name) is True
    assert graph.name == graph_name
    assert graph.db_name == db.name

    # Test create duplicate graph
    with assert_raises(GraphCreateError) as err:
        db.create_graph(graph_name)
    assert err.value.error_code == 1925

    # Test get graph
    result = db.graph(graph_name)
    assert result.name == graph.name
    assert result.db_name == graph.db_name

    # Test get graphs
    result = db.graphs()
    for entry in result:
        assert 'revision' in entry
        assert 'edge_definitions' in entry
        assert 'orphan_collections' in entry
    assert graph_name in extract('name', db.graphs())

    # Test get graphs with bad database
    with assert_raises(GraphListError) as err:
        bad_db.graphs()
    assert err.value.error_code in {11, 1228}

    # Test delete graph
    assert db.delete_graph(graph_name) is True
    assert graph_name not in extract('name', db.graphs())

    # Test delete missing graph
    with assert_raises(GraphDeleteError) as err:
        db.delete_graph(graph_name)
    assert err.value.error_code == 1924
    assert db.delete_graph(graph_name, ignore_missing=True) is False

    # Create a graph with vertex and edge collections and delete the graph
    graph = db.create_graph(graph_name)
    ecol_name = generate_col_name()
    fvcol_name = generate_col_name()
    tvcol_name = generate_col_name()

    graph.create_vertex_collection(fvcol_name)
    graph.create_vertex_collection(tvcol_name)
    graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    collections = extract('name', db.collections())
    assert fvcol_name in collections
    assert tvcol_name in collections
    assert ecol_name in collections

    db.delete_graph(graph_name)
    collections = extract('name', db.collections())
    assert fvcol_name in collections
    assert tvcol_name in collections
    assert ecol_name in collections

    # Create a graph with vertex and edge collections and delete all
    graph = db.create_graph(graph_name)
    graph.create_edge_definition(
        edge_collection=ecol_name,
        from_vertex_collections=[fvcol_name],
        to_vertex_collections=[tvcol_name]
    )
    db.delete_graph(graph_name, drop_collections=True)
    collections = extract('name', db.collections())
    assert fvcol_name not in collections
    assert tvcol_name not in collections
    assert ecol_name not in collections