Beispiel #1
0
class testExample():
    '''
    run all tests on a single env without taking
    env down between tests
    '''
    def __init__(self):
        self.env = Env()

    def setUp(self):
        self.env.debugPrint('setUp', True)
        self.env.cmd('set', 'foo', 'bar')

    def tearDown(self):
        self.env.debugPrint('tearDown', True)
        self.env.expect('get', 'foo').equal('bar')
        self.env.cmd('flushall')

    def testExample(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')

    def testExample1(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')
        self.env.assertFalse(True)  # check failure

    def testExample2(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')
Beispiel #2
0
class testQueryTimeout(FlowTestsBase):
    def __init__(self):
        # skip test if we're running under Valgrind
        if Env().envRunner.debugger is not None:
            Env().skip()  # queries will be much slower under Valgrind

        self.env = Env(decodeResponses=True)
        global redis_con
        redis_con = self.env.getConnection()

    def test_read_query_timeout(self):
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        response = redis_con.execute_command("GRAPH.QUERY", "g", query,
                                             "timeout", 1)
        error = response[-1]
        self.env.assertTrue(isinstance(error, ResponseError))
        self.env.assertContains("Query timed out", str(error))

        response = redis_con.execute_command("GRAPH.QUERY", "g", query,
                                             "timeout", 100)
        self.env.assertFalse(isinstance(response[-1], ResponseError))

    def test_write_query_timeout(self):
        query = "create ()"
        try:
            redis_con.execute_command("GRAPH.QUERY", "g", query, "timeout", 1)
            assert (False)
        except:
            # Expecting an error.
            pass
class testPendingQueryLimit():
    def __init__(self):
        # skip test if we're running under Valgrind
        if Env().envRunner.debugger is not None:
            Env().skip() # valgrind is not working correctly with multi process

        self.env = Env(decodeResponses=True)
        self.conn = self.env.getConnection()

    def test_01_query_limit_config(self):
        # read max queued queries config
        result = self.conn.execute_command("GRAPH.CONFIG", "GET", "MAX_QUEUED_QUERIES")
        max_queued_queries = result[1]
        self.env.assertEquals(max_queued_queries, 4294967295)

        # update configuration, set max queued queries
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES", 10)

        # re-read configuration
        result = self.conn.execute_command("GRAPH.CONFIG", "GET", "MAX_QUEUED_QUERIES")
        max_queued_queries = result[1]
        self.env.assertEquals(max_queued_queries, 10)

    def stress_server(self):
        threadpool_size = self.conn.execute_command("GRAPH.CONFIG", "GET", "THREAD_COUNT")[1]
        thread_count = threadpool_size * 5
        qs = [SLOW_QUERY] * thread_count
        connections = []
        pool = Pool(nodes=thread_count)

        # init connections
        for i in range(thread_count):
            connections.append(self.env.getConnection())

        # invoke queries
        result = pool.map(issue_query, connections, qs)

        # return if error encountered
        return any(result)

    def test_02_overflow_no_limit(self):
        # no limit on number of pending queries
        limit = 4294967295
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES", limit)

        error_encountered = self.stress_server()

        self.env.assertFalse(error_encountered)

    def test_03_overflow_with_limit(self):
        # limit number of pending queries
        limit = 1
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES", limit)

        error_encountered = self.stress_server()

        self.env.assertTrue(error_encountered)
Beispiel #4
0
class testExample():
    '''
    run all tests on a single env without taking
    env down between tests
    '''
    def __init__(self):
        self.env = Env()

    def testExample(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')

    def testExample1(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')
        self.env.assertFalse(True)  # check failure

    def testExample2(self):
        con = self.env.getConnection()
        con.set('x', 1)
        self.env.assertEqual(con.get('x'), '1')
Beispiel #5
0
class testGraphMergeFlow(FlowTestsBase):
    def __init__(self):
        self.env = Env()
        global redis_graph
        global graph_2
        redis_con = self.env.getConnection()
        redis_graph = Graph("G", redis_con)
        graph_2 = Graph("H", redis_con)

    # Create a single node without any labels or properties.
    def test01_single_node_with_label(self):
        global redis_graph
        query = """MERGE (robert:Critic)"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 0)

    # Retry to create an existing entity.
    def test02_existing_single_node_with_label(self):
        global redis_graph
        query = """MERGE (robert:Critic)"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    # Create a single node with two properties and no labels.
    def test03_single_node_with_properties(self):
        global redis_graph
        query = """MERGE (charlie { name: 'Charlie Sheen', age: 10 })"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 2)

    # Retry to create an existing entity.
    def test04_existing_single_node_with_properties(self):
        global redis_graph
        query = """MERGE (charlie { name: 'Charlie Sheen', age: 10 })"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    # Create a single node with both label and property.
    def test05_single_node_both_label_and_property(self):
        global redis_graph
        query = """MERGE (michael:Person { name: 'Michael Douglas' })"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 1)

    # Retry to create an existing entity.
    def test06_existing_single_node_both_label_and_property(self):
        global redis_graph
        query = """MERGE (michael:Person { name: 'Michael Douglas' })"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    # Create a single edge and additional two nodes.
    def test07_merge_on_relationship(self):
        global redis_graph
        query = """MERGE (charlie:ACTOR)-[r:ACTED_IN]->(wallStreet:MOVIE)"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 2)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.relationships_created, 1)

    # Retry to create a single edge and additional two nodes.
    def test08_existing_merge_on_relationship(self):
        global redis_graph
        query = """MERGE (charlie:ACTOR)-[r:ACTED_IN]->(wallStreet:MOVIE)"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.relationships_created, 0)

    # Update existing entity
    def test09_update_existing_node(self):
        global redis_graph
        query = """MERGE (charlie { name: 'Charlie Sheen' }) SET charlie.age = 11, charlie.lastname='Sheen' """
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 2)
        self.env.assertEquals(result.relationships_created, 0)

        query = """MATCH (charlie { name: 'Charlie Sheen' }) RETURN charlie.age, charlie.name, charlie.lastname"""
        actual_result = redis_graph.query(query)
        expected_result = [[11, 'Charlie Sheen', 'Sheen']]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # Update new entity
    def test10_update_new_node(self):
        global redis_graph
        query = """MERGE (tamara:ACTOR { name: 'tamara tunie' }) SET tamara.age = 59, tamara.name = 'Tamara Tunie' """
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.relationships_created, 0)

        query = """MATCH (tamara:ACTOR { name: 'Tamara Tunie' }) RETURN tamara.name, tamara.age"""
        actual_result = redis_graph.query(query)
        expected_result = [['Tamara Tunie', 59]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # Create a single edge and additional two nodes.
    def test11_update_new_relationship(self):
        global redis_graph
        query = """MERGE (franklin:ACTOR { name: 'Franklin Cover' })-[r:ACTED_IN {rate:5.7}]->(almostHeroes:MOVIE) SET r.date=1998, r.rate=5.8"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.properties_set, 4)
        self.env.assertEquals(result.relationships_created, 1)

    # Update existing relation
    def test12_update_existing_edge(self):
        global redis_graph
        query = """MERGE (franklin:ACTOR { name: 'Franklin Cover' })-[r:ACTED_IN {rate:5.8, date:1998}]->(almostHeroes:MOVIE) SET r.date=1998, r.rate=5.9"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 2)
        self.env.assertEquals(result.relationships_created, 0)

        query = """MATCH (franklin:ACTOR { name: 'Franklin Cover' })-[r:ACTED_IN {rate:5.9, date:1998}]->(almostHeroes:MOVIE) RETURN franklin.name, franklin.age, r.rate, r.date"""
        actual_result = redis_graph.query(query)
        expected_result = [['Franklin Cover', None, 5.9, 1998]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # Update multiple nodes
    def test13_update_multiple_nodes(self):
        global redis_graph
        query = """CREATE (:person {age:31}),(:person {age:31}),(:person {age:31}),(:person {age:31})"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 4)
        self.env.assertEquals(result.properties_set, 4)

        query = """MERGE (p:person {age:31}) SET p.newprop=100"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 4)

        query = """MATCH (p:person) RETURN p.age, p.newprop"""
        actual_result = redis_graph.query(query)
        expected_result = [[31, 100], [31, 100], [31, 100], [31, 100]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # Update multiple nodes
    def test14_merge_unbounded_pattern(self):
        global redis_graph
        query = """MERGE (p:person {age:31})-[:owns]->(d:dog {name:'max'})"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.properties_set, 2)
        self.env.assertEquals(result.relationships_created, 1)

        # Although person with age 31 and dog with the name max exists,
        # specified pattern doesn't exists, as a result the entire pattern
        # will be created, if we were to support MATCH MERGE 'p' and 'd'
        # would probably be defined in the MATCH clause, as a result they're
        # bounded and won't be duplicated.
        query = """MERGE (p:person {age:31})-[:owns]->(d:dog {name:'max'})-[:eats]->(f:food {name:'Royal Canin'})"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 3)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.relationships_created, 2)

    # Add node that matches pre-existing index
    def test15_merge_indexed_entity(self):
        global redis_graph
        # Create index
        query = """CREATE INDEX ON :person(age)"""
        redis_graph.query(query)

        count_query = """MATCH (p:person) WHERE p.age > 0 RETURN COUNT(p)"""
        result = redis_graph.query(count_query)
        original_count = result.result_set[0][0]

        # Add one new person
        merge_query = """MERGE (p:person {age:40})"""
        result = redis_graph.query(merge_query)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 1)
        # Verify that one indexed node has been added
        result = redis_graph.query(count_query)
        updated_count = result.result_set[0][0]
        self.env.assertEquals(updated_count, original_count + 1)

        # Perform another merge that does not create an entity
        result = redis_graph.query(merge_query)
        self.env.assertEquals(result.nodes_created, 0)

        # Verify that indexed node count is unchanged
        result = redis_graph.query(count_query)
        updated_count = result.result_set[0][0]
        self.env.assertEquals(updated_count, original_count + 1)

    # Update nodes based on non-constant inlined properties
    def test16_merge_dynamic_properties(self):
        global redis_graph
        # Create and verify a new node
        query = """MERGE (q:dyn {name: toUpper('abcde')}) RETURN q.name"""
        expected = [['ABCDE']]

        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 1)

        self.env.assertEquals(result.result_set, expected)

        # Repeat the query and verify that no changes were introduced
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)

        # Verify that MATCH...MERGE on the same entity does not introduce changes
        query = """MATCH (q {name: 'ABCDE'}) MERGE (r {name: q.name}) RETURN r.name"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

    def test17_complex_merge_queries(self):
        # Beginning with an empty graph
        global graph_2
        # Create a new pattern
        query = """MERGE (a:Person {name: 'a'}) MERGE (b:Person {name: 'b'}) MERGE (a)-[e:FRIEND {val: 1}]->(b) RETURN a.name, e.val, b.name"""
        result = graph_2.query(query)
        expected = [['a', 1, 'b']]

        # Verify the results
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 1)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.result_set, expected)

        # Repeat the query and verify that no changes were introduced
        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

        # Verify that these entities are accessed properly with MATCH...MERGE queries
        query = """MATCH (a:Person {name: 'a'}), (b:Person {name: 'b'}) MERGE (a)-[e:FRIEND {val: 1}]->(b) RETURN a.name, e.val, b.name"""
        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

        # Verify that we can bind entities properly in variable-length traversals
        query = """MATCH (a)-[*]->(b) MERGE (a)-[e:FRIEND {val: 1}]->(b) RETURN a.name, e.val, b.name"""
        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

        # Verify UNWIND...MERGE does not recreate existing entities
        query = """UNWIND ['a', 'b'] AS names MERGE (a:Person {name: names}) RETURN a.name"""
        expected = [['a'], ['b']]

        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

        # Merging entities from an UNWIND list
        query = """UNWIND ['a', 'b', 'c'] AS names MERGE (a:Person {name: names}) ON MATCH SET a.set_by = 'match' ON CREATE SET a.set_by = 'create' RETURN a.name, a.set_by ORDER BY a.name"""
        expected = [['a', 'match'], ['b', 'match'], ['c', 'create']]

        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 4)
        self.env.assertEquals(result.result_set, expected)

        # Connect 'c' to both 'a' and 'b' via a Friend relation
        # One thing to note here is that both `c` and `x` are bounded, which means
        # our current merge distinct validation inspect the created edge only using its relationship, properties and bounded
        # nodes! as such the first created edge is different from the second one (due to changes in the destination node).
        query = """MATCH (c:Person {name: 'c'}) MATCH (x:Person) WHERE x.name in ['a', 'b'] WITH c, x MERGE(c)-[:FRIEND]->(x)"""
        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.relationships_created, 2)

        # Verify function calls in MERGE do not recreate existing entities
        query = """UNWIND ['A', 'B'] AS names MERGE (a:Person {name: toLower(names)}) RETURN a.name"""
        expected = [['a'], ['b']]

        result = graph_2.query(query)
        self.env.assertEquals(result.labels_added, 0)
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.result_set, expected)

        query = """MERGE (a:Person {name: 'a'}) ON MATCH SET a.set_by = 'match' ON CREATE SET a.set_by = 'create' MERGE (b:Clone {name: a.name + '_clone'}) ON MATCH SET b.set_by = 'match' ON CREATE SET b.set_by = 'create' RETURN a.name, a.set_by, b.name, b.set_by"""
        result = graph_2.query(query)
        expected = [['a', 'match', 'a_clone', 'create']]

        # Verify the results
        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.result_set, expected)

    def test18_merge_unique_creations(self):
        global graph_2
        # Create a new pattern with non-unique entities.
        query = """UNWIND ['newprop1', 'newprop2'] AS x MERGE ({v:x})-[:e]->(n {v:'newprop1'})"""
        result = graph_2.query(query)

        # Verify that every entity was created in both executions.
        self.env.assertEquals(result.nodes_created, 4)
        self.env.assertEquals(result.relationships_created, 2)
        self.env.assertEquals(result.properties_set, 4)

        # Repeat the query.
        result = graph_2.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test19_merge_dependency(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Starting with an empty graph.
        # Create 2 nodes and connect them to one another.
        self.env.flush()
        query = """MERGE (a:Person {name: 'a'}) MERGE (b:Person {name: 'b'}) MERGE (a)-[:FRIEND]->(b) MERGE (b)-[:FRIEND]->(a)"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 2)
        self.env.assertEquals(result.properties_set, 2)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test20_merge_edge_dependency(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Starting with an empty graph.
        # Make sure the pattern ()-[]->()-[]->()-[]->() exists.
        self.env.flush()
        query = """MERGE (a {v:1}) MERGE (b {v:2}) MERGE (a)-[:KNOWS]->(b) MERGE ()-[:KNOWS]->()-[:KNOWS]->()"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 5)
        self.env.assertEquals(result.relationships_created, 3)
        self.env.assertEquals(result.properties_set, 2)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test21_merge_scan(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Starting with an empty graph.
        # All node scan should see created nodes.
        self.env.flush()
        query = """MERGE (a {v:1}) WITH a MATCH (n) MERGE (n)-[:KNOWS]->(m)"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 1)
        self.env.assertEquals(result.properties_set, 1)

        # Starting with an empty graph.
        # Label scan should see created nodes.
        self.env.flush()
        query = """MERGE (a:L {v:1}) WITH a MATCH (n:L) MERGE (n)-[:KNOWS]->(m)"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 1)
        self.env.assertEquals(result.properties_set, 1)

    def test22_merge_label_scan(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Starting with an empty graph.
        # Make sure the pattern ()-[]->()-[]->()-[]->() exists.
        self.env.flush()
        query = """MERGE (a {v:1}) MERGE (b {v:2}) MERGE (a)-[:KNOWS]->(b) WITH a AS c, b AS d MATCH (c)-[:KNOWS]->(d) MERGE (c)-[:LIKES]->(d)"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 2)
        self.env.assertEquals(result.properties_set, 2)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test23_merge_var_traverse(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Starting with an empty graph.
        # Make sure the pattern ()-[]->()-[]->()-[]->() exists.
        self.env.flush()
        query = """MERGE (a {v:1}) MERGE (b {v:2}) MERGE (a)-[:KNOWS]->(b) WITH a AS c, b AS d MATCH (c)-[:KNOWS*]->(d) MERGE (c)-[:LIKES]->(d)"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 2)
        self.env.assertEquals(result.properties_set, 2)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.relationships_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test24_merge_merge_delete(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Merge followed by an additional merge and ending with a deletion
        # which doesn't have any data to operate on,
        # this used to trigger force lock release, as the delete didn't tried to acquire/release the lock
        self.env.flush()
        query = """MERGE (user:User {name:'Sceat'}) WITH user UNWIND [1,2,3] AS sessionHash MERGE (user)-[:HAS_SESSION]->(newSession:Session {hash:sessionHash}) WITH DISTINCT user, collect(newSession.hash) as newSessionHash MATCH (user)-->(s:Session) WHERE NOT s.hash IN newSessionHash DELETE s"""
        result = graph.query(query)

        # Verify that every entity was created.
        self.env.assertEquals(result.nodes_created, 4)
        self.env.assertEquals(result.properties_set, 4)
        self.env.assertEquals(result.relationships_created, 3)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)
        self.env.assertEquals(result.relationships_created, 0)

    def test25_merge_with_where(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        # Index the "L:prop) combination so that the MERGE tree will not have a filter op.
        query = """CREATE INDEX ON :L(prop)"""
        graph.query(query)

        query = """MERGE (n:L {prop:1}) WITH n WHERE n.prop < 1 RETURN n.prop"""
        result = graph.query(query)
        plan = graph.execution_plan(query)

        # Verify that the Filter op follows a Project op.
        self.env.assertTrue(re.search('Project\s+Filter', plan))

        # Verify that there is no Filter op after the Merge op.
        self.env.assertFalse(re.search('Merge\s+Filter', plan))

        # Verify that the entity was created and no results were returned.
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 1)

        # Repeat the query.
        result = graph.query(query)

        # Verify that no data was modified and no results were returned.
        self.env.assertEquals(result.nodes_created, 0)
        self.env.assertEquals(result.properties_set, 0)

    def test26_merge_set_invalid_property(self):
        redis_con = self.env.getConnection()
        graph = Graph("M", redis_con)

        query = """MATCH p=() MERGE () ON MATCH SET p.prop4 = 5"""
        result = graph.query(query)
        self.env.assertEquals(result.properties_set, 0)
class TestRedisBloom():
    def __init__(self):
        self.env = Env()
        self.rb = RedisBloom(port=6379)

    def testCreate(self):
        '''Test CREATE/RESERVE calls'''
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.bfCreate('bloom', 0.01, 1000))
        self.env.assertTrue(rb.cfCreate('cuckoo', 1000))
        self.env.assertTrue(rb.cmsInitByDim('cmsDim', 100, 5))
        self.env.assertTrue(rb.cmsInitByProb('cmsProb', 0.01, 0.01))
        self.env.assertTrue(rb.topkReserve('topk', 5, 100, 5, 0.9))

    ################### Test Bloom Filter ###################
    def testBFAdd(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.bfCreate('bloom', 0.01, 1000))
        self.env.assertEqual(1, rb.bfAdd('bloom', 'foo'))
        self.env.assertEqual(0, rb.bfAdd('bloom', 'foo'))
        self.env.assertEqual([0], i(rb.bfMAdd('bloom', 'foo')))
        self.env.assertEqual([0, 1], rb.bfMAdd('bloom', 'foo', 'bar'))
        self.env.assertEqual([0, 0, 1], rb.bfMAdd('bloom', 'foo', 'bar',
                                                  'baz'))
        self.env.assertEqual(1, rb.bfExists('bloom', 'foo'))
        self.env.assertEqual(0, rb.bfExists('bloom', 'noexist'))
        self.env.assertEqual([1, 0], i(rb.bfMExists('bloom', 'foo',
                                                    'noexist')))

    def testBFInsert(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.bfCreate('bloom', 0.01, 1000))
        self.env.assertEqual([1], i(rb.bfInsert('bloom', ['foo'])))
        self.env.assertEqual([0, 1], i(rb.bfInsert('bloom', ['foo', 'bar'])))
        self.env.assertEqual([1],
                             i(rb.bfInsert('captest', ['foo'], capacity=1000)))
        self.env.assertEqual([1], i(rb.bfInsert('errtest', ['foo'],
                                                error=0.01)))
        self.env.assertEqual(1, rb.bfExists('bloom', 'foo'))
        self.env.assertEqual(0, rb.bfExists('bloom', 'noexist'))
        self.env.assertEqual([1, 0], i(rb.bfMExists('bloom', 'foo',
                                                    'noexist')))

    def testBFDumpLoad(self):
        self.env.cmd("flushall")
        rb = self.rb
        # Store a filter
        rb.bfCreate('myBloom', '0.0001', '1000')

        # test is probabilistic and might fail. It is OK to change variables if
        # certain to not break anything
        def do_verify():
            res = 0
            for x in range(1000):
                rb.bfAdd('myBloom', x)
                rv = rb.bfExists('myBloom', x)
                self.env.assertTrue(rv)
                rv = rb.bfExists('myBloom', 'nonexist_{}'.format(x))
                res += (rv == x)
            self.env.assertLess(res, 5)

        do_verify()
        cmds = []
        cur = rb.bfScandump('myBloom', 0)
        first = cur[0]
        cmds.append(cur)

        while True:
            cur = rb.bfScandump('myBloom', first)
            first = cur[0]
            if first == 0:
                break
            else:
                cmds.append(cur)
        prev_info = rb.execute_command('bf.debug', 'myBloom')

        # Remove the filter
        rb.execute_command('del', 'myBloom')

        # Now, load all the commands:
        for cmd in cmds:
            rb.bfLoadChunk('myBloom', *cmd)

        cur_info = rb.execute_command('bf.debug', 'myBloom')
        self.env.assertEqual(prev_info, cur_info)
        do_verify()

        rb.execute_command('del', 'myBloom')
        rb.bfCreate('myBloom', '0.0001', '10000000')

    ################### Test Cuckoo Filter ###################
    def testCFAddInsert(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.cfCreate('cuckoo', 1000))
        self.env.assertTrue(rb.cfAdd('cuckoo', 'filter'))
        self.env.assertFalse(rb.cfAddNX('cuckoo', 'filter'))
        self.env.assertEqual(1, rb.cfAddNX('cuckoo', 'newItem'))
        self.env.assertEqual([1], rb.cfInsert('captest', ['foo']))
        self.env.assertEqual([1], rb.cfInsert('captest', ['foo'],
                                              capacity=1000))
        self.env.assertEqual([1], rb.cfInsertNX('captest', ['bar']))
        self.env.assertEqual([0, 0, 1],
                             rb.cfInsertNX('captest', ['foo', 'bar', 'baz']))
        self.env.assertEqual([0],
                             rb.cfInsertNX('captest', ['bar'], capacity=1000))
        self.env.assertEqual([1], rb.cfInsert('empty1', ['foo'],
                                              capacity=1000))
        self.env.assertEqual([1],
                             rb.cfInsertNX('empty2', ['bar'], capacity=1000))

    def testCFExistsDel(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.cfCreate('cuckoo', 1000))
        self.env.assertTrue(rb.cfAdd('cuckoo', 'filter'))
        self.env.assertTrue(rb.cfExists('cuckoo', 'filter'))
        self.env.assertFalse(rb.cfExists('cuckoo', 'notexist'))
        self.env.assertEqual(1, rb.cfCount('cuckoo', 'filter'))
        self.env.assertEqual(0, rb.cfCount('cuckoo', 'notexist'))
        self.env.assertTrue(rb.cfDel('cuckoo', 'filter'))
        self.env.assertEqual(0, rb.cfCount('cuckoo', 'filter'))

    ################### Test Count-Min Sketch ###################
    def testCMS(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.cmsInitByDim('dim', 1000, 5))
        self.env.assertTrue(rb.cmsInitByProb('prob', 0.01, 0.01))
        self.env.assertTrue(rb.cmsIncrBy('dim', ['foo'], [5]))
        self.env.assertEqual([0], rb.cmsQuery('dim', 'notexist'))
        self.env.assertEqual([5], rb.cmsQuery('dim', 'foo'))
        self.env.assertTrue(rb.cmsIncrBy('dim', ['foo', 'bar'], [5, 15]))
        self.env.assertEqual([10, 15], rb.cmsQuery('dim', 'foo', 'bar'))
        info = rb.cmsInfo('dim')
        self.env.assertEqual(1000, info.width)
        self.env.assertEqual(5, info.depth)
        self.env.assertEqual(25, info.count)

    def testCMSMerge(self):
        self.env.cmd("flushall")
        rb = self.rb
        self.env.assertTrue(rb.cmsInitByDim('A', 1000, 5))
        self.env.assertTrue(rb.cmsInitByDim('B', 1000, 5))
        self.env.assertTrue(rb.cmsInitByDim('C', 1000, 5))
        self.env.assertTrue(rb.cmsIncrBy('A', ['foo', 'bar', 'baz'],
                                         [5, 3, 9]))
        self.env.assertTrue(rb.cmsIncrBy('B', ['foo', 'bar', 'baz'],
                                         [2, 3, 1]))
        self.env.assertEqual([5, 3, 9], rb.cmsQuery('A', 'foo', 'bar', 'baz'))
        self.env.assertEqual([2, 3, 1], rb.cmsQuery('B', 'foo', 'bar', 'baz'))
        self.env.assertTrue(rb.cmsMerge('C', 2, ['A', 'B']))
        self.env.assertEqual([7, 6, 10], rb.cmsQuery('C', 'foo', 'bar', 'baz'))
        self.env.assertTrue(rb.cmsMerge('C', 2, ['A', 'B'], ['1', '2']))
        self.env.assertEqual([9, 9, 11], rb.cmsQuery('C', 'foo', 'bar', 'baz'))
        self.env.assertTrue(rb.cmsMerge('C', 2, ['A', 'B'], ['2', '3']))
        self.env.assertEqual([16, 15, 21], rb.cmsQuery('C', 'foo', 'bar',
                                                       'baz'))

    ################### Test Top-K ###################
    def testTopK(self):
        self.env.cmd("flushall")
        rb = self.rb
        # test list with empty buckets
        self.env.assertTrue(rb.topkReserve('topk', 10, 50, 3, 0.9))
        self.env.assertTrue(
            rb.topkAdd('topk', 'A', 'B', 'C', 'D', 'E', 'A', 'A', 'B', 'C',
                       'G', 'D', 'B', 'D', 'A', 'E', 'E'))
        self.env.assertEqual([1, 1, 1, 1, 1, 0, 1],
                             rb.topkQuery('topk', 'A', 'B', 'C', 'D', 'E', 'F',
                                          'G'))
        self.env.assertEqual([4, 3, 2, 3, 3, 0, 1],
                             rb.topkCount('topk', 'A', 'B', 'C', 'D', 'E', 'F',
                                          'G'))

        # test full list
        self.env.assertTrue(rb.topkReserve('topklist', 3, 50, 3, 0.9))
        self.env.assertTrue(
            rb.topkAdd('topklist', 'A', 'B', 'C', 'D', 'E', 'A', 'A', 'B', 'C',
                       'G', 'D', 'B', 'D', 'A', 'E', 'E'))
        self.env.assertEqual(['D', 'A', 'B'], rb.topkList('topklist'))
Beispiel #7
0
class testCache(FlowTestsBase):
    def __init__(self):
        # Have only one thread handling queries
        self.env = Env(
            moduleArgs='THREAD_COUNT 1 CACHE_SIZE {CACHE_SIZE}'.format(
                CACHE_SIZE=CACHE_SIZE))
        global redis_con
        redis_con = self.env.getConnection()

    def compare_uncached_to_cached_query_plans(self, query):
        global redis_con
        plan_graph = Graph('Cache_Test_plans', redis_con)
        uncached_plan = plan_graph.execution_plan(query)
        cached_plan = plan_graph.execution_plan(query)
        self.env.assertEqual(uncached_plan, cached_plan)
        plan_graph.delete()

    def test_sanity_check(self):
        graph = Graph('Cache_Sanity_Check', redis_con)
        for i in range(CACHE_SIZE + 1):
            result = graph.query(
                "MATCH (n) WHERE n.value = {val} RETURN n".format(val=i))
            self.env.assertFalse(result.cached_execution)

        for i in range(1, CACHE_SIZE + 1):
            result = graph.query(
                "MATCH (n) WHERE n.value = {val} RETURN n".format(val=i))
            self.env.assertTrue(result.cached_execution)

        result = graph.query("MATCH (n) WHERE n.value = 0 RETURN n")
        self.env.assertFalse(result.cached_execution)

        graph.delete()

    def test01_test_create(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Create', redis_con)
        query = "CREATE ()"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query)
        cached_result = graph.query(query)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.nodes_created,
                             cached_result.nodes_created)
        graph.delete()

    def test02_test_create_with_params(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Create_With_Params', redis_con)
        params = {'val': 1}
        query = "CREATE ({val:$val})"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        params = {'val': 2}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.nodes_created,
                             cached_result.nodes_created)
        graph.delete()

    def test03_test_delete(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Delete', redis_con)
        for i in range(2):
            params = {'val': i}
            query = "CREATE ({val:$val})-[:R]->()"
            graph.query(query, params)

        params = {'val': 0}
        query = "MATCH (n {val:$val}) DELETE n"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        params = {'val': 1}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.relationships_deleted,
                             cached_result.relationships_deleted)
        self.env.assertEqual(uncached_result.nodes_deleted,
                             cached_result.nodes_deleted)
        graph.delete()

    def test04_test_merge(self):
        # Different outcome, same execution plan.
        graph = Graph('Cache_Test_Merge', redis_con)
        params = {'create_val': 0, 'match_val': 1}
        query = "MERGE (n) ON CREATE SET n.val = $create_val ON MATCH SET n.val = $match_val RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.properties_set,
                             cached_result.properties_set)
        self.env.assertEqual([[0]], uncached_result.result_set)
        self.env.assertEqual(1, uncached_result.nodes_created)
        self.env.assertEqual([[1]], cached_result.result_set)
        self.env.assertEqual(0, cached_result.nodes_created)

        graph.delete()

    def test05_test_branching_with_path_filter(self):
        # Different outcome, same execution plan.
        graph = Graph('Cache_Test_Path_Filter', redis_con)
        query = "CREATE ({val:1})-[:R]->({val:2})-[:R2]->({val:3})"
        graph.query(query)
        query = "MATCH (n) WHERE (n)-[:R]->({val:$val}) OR (n)-[:R2]->({val:$val}) RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val': 2}
        uncached_result = graph.query(query, params)
        params = {'val': 3}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1]], uncached_result.result_set)
        self.env.assertEqual([[2]], cached_result.result_set)
        graph.delete()

    def test06_test_optimizations_index(self):
        graph = Graph('Cache_Test_Index', redis_con)
        graph.query("CREATE INDEX ON :N(val)")
        query = "CREATE (:N{val:1}), (:N{val:2})"
        graph.query(query)
        query = "MATCH (n:N{val:$val}) RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val': 1}
        uncached_result = graph.query(query, params)
        params = {'val': 2}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1]], uncached_result.result_set)
        self.env.assertEqual([[2]], cached_result.result_set)
        graph.delete()

    def test07_test_optimizations_id_scan(self):
        graph = Graph('Cache_Test_ID_Scan', redis_con)
        query = "CREATE (), ()"
        graph.query(query)
        query = "MATCH (n) WHERE ID(n)=$id RETURN id(n)"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'id': 0}
        uncached_result = graph.query(query, params)
        params = {'id': 1}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[0]], uncached_result.result_set)
        self.env.assertEqual([[1]], cached_result.result_set)
        graph.delete()

    def test08_test_join(self):
        graph = Graph('Cache_Test_Join', redis_con)
        query = "CREATE ({val:1}), ({val:2}), ({val:3}),({val:4})"
        graph.query(query)
        query = "MATCH (a {val:$val}), (b) WHERE a.val = b.val-1 RETURN a.val, b.val "
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val': 1}
        uncached_result = graph.query(query, params)
        params = {'val': 3}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1, 2]], uncached_result.result_set)
        self.env.assertEqual([[3, 4]], cached_result.result_set)
        graph.delete()

    def test09_test_edge_merge(self):
        # In this scenario, the same query is executed twice.
        # In the first time, the relationship `leads` is unknown to the graph so it is created.
        # In the second time the relationship should be known to the graph, so it will be returned by the match.
        # The test validates that a valid edge is returned.
        graph = Graph('Cache_Test_Edge_Merge', redis_con)
        query = "CREATE ({val:1}), ({val:2})"
        graph.query(query)
        query = "MATCH (a {val:1}), (b {val:2}) MERGE (a)-[e:leads]->(b) RETURN e"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query)
        self.env.assertEqual(1, uncached_result.relationships_created)
        cached_result = graph.query(query)
        self.env.assertEqual(0, cached_result.relationships_created)
        self.env.assertEqual(uncached_result.result_set,
                             cached_result.result_set)

    def test10_test_labelscan_update(self):
        # In this scenario a label scan is made for non existing label
        # than the label is created and the label scan query is re-used.
        graph = Graph('Cache_test_labelscan_update', redis_con)
        query = "MATCH (n:Label) return n"
        result = graph.query(query)
        self.env.assertEqual(0, len(result.result_set))
        query = "MERGE (n:Label)"
        result = graph.query(query)
        self.env.assertEqual(1, result.nodes_created)
        query = "MATCH (n:Label) return n"
        result = graph.query(query)
        self.env.assertEqual(1, len(result.result_set))
        self.env.assertEqual("Label", result.result_set[0][0].label)

    def test11_test_skip_limit(self):
        # Test using parameters for skip and limit values,
        # ensuring cached executions always use the parameterized values.
        graph = Graph('Cache_Empty_Key', redis_con)
        query = "UNWIND [1,2,3,4] AS arr RETURN arr SKIP $s LIMIT $l"
        params = {'s': 1, 'l': 1}
        uncached_result = graph.query(query, params)
        expected_result = [[2]]
        cached_result = graph.query(query, params)
        self.env.assertEqual(expected_result, cached_result.result_set)
        self.env.assertEqual(uncached_result.result_set,
                             cached_result.result_set)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)

        # Update the params
        params = {'s': 2, 'l': 2}
        # The new result should respect the new skip and limit.
        expected_result = [[3], [4]]
        cached_result = graph.query(query, params)
        self.env.assertEqual(expected_result, cached_result.result_set)
        self.env.assertTrue(cached_result.cached_execution)
class testGraphVersioning(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)

    # Make sure graph version changes once a new label is created
    def test01_version_update_on_label_creation(self):
        global VERSION
        con = self.env.getConnection()

        # Adding a node without a label shouldn't update graph version.
        q = """CREATE ()"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        # Adding a labeled node should update graph version.
        q = """CREATE (:L)"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertTrue(isinstance(res[0], ResponseError))

        # Update version
        VERSION = int(res[1])

        # Adding a node with an existing label shouldn't update graph version
        q = """CREATE (:L)"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

    # Make sure graph version changes once a new relationship type is created
    def test02_version_update_on_relation_creation(self):
        global VERSION
        con = self.env.getConnection()

        # Adding edge with a new relationship type should update graph version
        q = """CREATE ()-[:R]->()"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertTrue(isinstance(res[0], ResponseError))

        # Update version
        VERSION = int(res[1])

        # Adding edge with existing relationship type shouldn't update graph version
        q = """CREATE ()-[:R]->()"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

    # Make sure graph version changes once a new attribute is created
    def test03_version_update_on_attribute_creation(self):
        global VERSION
        con = self.env.getConnection()

        # Adding a new attribute should update graph version
        q = """CREATE ({v:1})"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertTrue(isinstance(res[0], ResponseError))

        # Update version
        VERSION = int(res[1])

        # Adding a new node with existing attribute shouldn't update graph version
        q = """CREATE ({v:1})"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        # Adding a new edge with a new attribute should update graph version
        q = """CREATE ()-[:R {q:1}]->()"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))

        q = """RETURN 1"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertTrue(isinstance(res[0], ResponseError))

        # Update version
        VERSION = int(res[1])

        # Adding a new edge with existing attribute shouldn't update graph version
        q = """CREATE ()-[:R {v:1}]->()"""
        res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version",
                                  VERSION)
        self.env.assertFalse(isinstance(res[0], ResponseError))
Beispiel #9
0
class testProcedures(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global redis_con
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph(GRAPH_ID, redis_con)
        self.populate_graph()

    def populate_graph(self):
        if redis_con.exists(GRAPH_ID):
            return

        edge = Edge(node1, 'goWellWith', node5)
        redis_graph.add_node(node1)
        redis_graph.add_node(node2)
        redis_graph.add_node(node3)
        redis_graph.add_node(node4)
        redis_graph.add_node(node5)
        redis_graph.add_edge(edge)
        redis_graph.commit()

        # Create full-text index.
        redis_graph.call_procedure("db.idx.fulltext.createNodeIndex", 'fruit', 'name')

    # Compares two nodes based on their properties.
    def _compareNodes(self, a, b):
        return a.properties == b.properties

    # Make sure given item is found within resultset.
    def _inResultSet(self, item, resultset):
        for i in range(len(resultset)):
            result = resultset[i][0]
            if self._compareNodes(item, result):
                return True
        return False

    # Issue query and validates resultset.
    def queryAndValidate(self, query, expected_results, query_params={}):
        actual_resultset = redis_graph.query(query, query_params).result_set
        self.env.assertEquals(len(actual_resultset), len(expected_results))
        for i in range(len(actual_resultset)):
            self.env.assertTrue(self._inResultSet(expected_results[i], actual_resultset))
    
    # Call procedure, omit yield, expecting all procedure outputs to
    # be included in result-set.
    def test01_no_yield(self):
        actual_result = redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1")
        assert(len(actual_result.result_set) == 1)

        header = actual_result.header
        data = actual_result.result_set[0]
        assert(header[0][1] == 'node')
        assert(data[0] is not None)

    # Call procedure specify different outputs.
    def test02_yield(self):
        actual_result = redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["node"])
        assert(len(actual_result.result_set) == 1)

        header = actual_result.header
        data = actual_result.result_set[0]
        assert(header[0][1] == 'node')
        assert(data[0] is not None)

        # Yield an unknown output.
        # Expect an error when trying to use an unknown procedure output.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["unknown"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
        
        # Yield the same output multiple times.
        # Expect an error when trying to use the same output multiple times.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["node", "node"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
    
    def test03_arguments(self):
        # Omit arguments.
        # Expect an error when trying to omit arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
        
        # Omit arguments, queryNodes expecting 2 argument, provide 1.
        # Expect an error when trying to omit arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "arg1")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

        # Overload arguments.
        # Expect an error when trying to send too many arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "query", "fruit", "query", y=["node"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

    # Test procedure call while mixing a number of addition clauses.
    def test04_mix_clauses(self):
        query_params = {'prefix': 'Orange*'}
        # CALL + RETURN.

        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node"""
        expected_results = [node4, node2, node3, node1]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # The combination of CALL and WHERE currently creates a syntax error in libcypher-parser.
        # CALL + WHERE + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    """
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    LIMIT 2"""
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # CALL + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    """
        expected_results = [node1, node2, node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    """
        expected_results = [node2, node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    LIMIT 2
                    """
        expected_results = [node1, node2]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1
                    """
        expected_results = [node2]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value"""
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    LIMIT 1"""
        expected_results = [node3]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # CALL + MATCH + RETURN.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
            MATCH (node)-[]->(z)
            RETURN z"""
        expected_results = [node5]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # UNWIND + CALL + RETURN.
        query = """UNWIND([1,2]) AS x CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node RETURN node"""
        expected_results = [node4, node2, node3, node1, node4, node2, node3, node1]
        self.queryAndValidate(query, expected_results, query_params=query_params)

    def test05_procedure_labels(self):
        actual_resultset = redis_graph.call_procedure("db.labels").result_set
        expected_results = [["fruit"]]        
        self.env.assertEquals(actual_resultset, expected_results)
    
    def test06_procedure_relationshipTypes(self):
        actual_resultset = redis_graph.call_procedure("db.relationshipTypes").result_set
        expected_results = [["goWellWith"]]
        self.env.assertEquals(actual_resultset, expected_results)
    
    def test07_procedure_propertyKeys(self):
        actual_resultset = redis_graph.call_procedure("db.propertyKeys").result_set
        expected_results = [["name"], ["value"]]
        self.env.assertEquals(actual_resultset, expected_results)

    def test08_procedure_fulltext_syntax_error(self):
        try:
            query = """CALL db.idx.fulltext.queryNodes('fruit', 'Orange || Apple') YIELD node RETURN node"""
            redis_graph.query(query)
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

    def test09_procedure_lookup(self):
        try:
            redis_graph.call_procedure("dB.LaBeLS")
        except redis.exceptions.ResponseError:
            # This should not cause an error
            self.env.assertFalse(1)
            pass

        try:
            # looking for a non existing procedure
            redis_graph.call_procedure("db.nonExistingProc")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

        try:
            redis_graph.call_procedure("db.IDX.FulLText.QueRyNoDes", "fruit", "or")
        except redis.exceptions.ResponseError:
            # This should not cause an error
            self.env.assertFalse(1)
            pass

    def test10_procedure_get_all_procedures(self):
        actual_resultset = redis_graph.call_procedure("dbms.procedures").result_set

        # The following two procedure are a part of the expected results
        expected_result = [["db.labels", "READ"], ["db.idx.fulltext.createNodeIndex", "WRITE"],
                           ["db.propertyKeys", "READ"], ["dbms.procedures", "READ"], ["db.relationshipTypes", "READ"],
                           ["algo.BFS", "READ"], ["algo.pageRank", "READ"], ["db.idx.fulltext.queryNodes", "READ"],
                           ["db.idx.fulltext.drop", "WRITE"]]
        for res in expected_result:
            self.env.assertContains(res, actual_resultset)

    def test11_procedure_indexes(self):
        # Verify that the full-text index is reported properly.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties").result_set
        expected_results = [["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Add an exact-match index to a different property on the same label..
        result = redis_graph.query("CREATE INDEX ON :fruit(other_property)")
        self.env.assertEquals(result.indices_created, 1)

        # Verify that all indexes are reported.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties RETURN type, label, properties ORDER BY type").result_set
        expected_results = [["exact-match", "fruit", ["other_property"]],
                            ["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Add an exact-match index to the full-text indexed property on the same label..
        result = redis_graph.query("CREATE INDEX ON :fruit(name)")
        self.env.assertEquals(result.indices_created, 1)

        # Verify that all indexes are reported.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties RETURN type, label, properties ORDER BY type").result_set
        expected_results = [["exact-match", "fruit", ["other_property", "name"]],
                            ["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Validate the results when yielding only one element.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD label").result_set
        expected_results = [["fruit"],
                            ["fruit"]]
        self.env.assertEquals(actual_resultset, expected_results)
class testQueryMemoryLimit():
    def __init__(self):
        global g
        self.env = Env(decodeResponses=True)
        self.conn = self.env.getConnection()
        g = Graph(GRAPH_NAME, self.conn)

    def issue_query(self, conn, q, should_fail):
        try:
            g.query(q)
            self.env.assertFalse(should_fail)
        except Exception as e:
            assert "Query's mem consumption exceeded capacity" in str(e)
            self.env.assertTrue(should_fail)

    def stress_server(self, queries):
        threads = []
        connections = []
        threadpool_size = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                                    "THREAD_COUNT")[1]

        # init connections
        for t in range(threadpool_size):
            connections.append(self.env.getConnection())

        # init circular iterator
        connections_pool = cycle(connections)

        # invoke queries
        for q in queries:
            con = next(connections_pool)
            t = threading.Thread(target=self.issue_query,
                                 args=(con, q[0], q[1]))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # wait for threads to return
        while len(threads) > 0:
            t = threads.pop()
            t.join()

    def test_01_read_memory_limit_config(self):
        # read configuration, test default value, expecting unlimited memory cap
        result = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                           "QUERY_MEM_CAPACITY")
        query_mem_capacity = result[1]
        self.env.assertEquals(query_mem_capacity, 0)

        # update configuration, set memory limit to 1MB
        MB = 1024 * 1024
        self.conn.execute_command("GRAPH.CONFIG", "SET", "QUERY_MEM_CAPACITY",
                                  MB)

        # re-read configuration
        result = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                           "QUERY_MEM_CAPACITY")
        query_mem_capacity = result[1]
        self.env.assertEquals(query_mem_capacity, MB)

    def test_02_overflow_no_limit(self):
        # execute query on each one of the threads
        n_queries_to_execute = self.conn.execute_command(
            "GRAPH.CONFIG", "GET", "THREAD_COUNT")[1]

        # set query memory limit as UNLIMITED
        limit = 0
        self.conn.execute_command("GRAPH.CONFIG", "SET", "QUERY_MEM_CAPACITY",
                                  limit)

        self.stress_server([(MEM_HOG_QUERY, False)] * n_queries_to_execute)

    def test_03_no_overflow_with_limit(self):
        # execute query on each one of the threads
        n_queries_to_execute = self.conn.execute_command(
            "GRAPH.CONFIG", "GET", "THREAD_COUNT")[1]

        # set query memory limit to 1GB
        limit = 1024 * 1024 * 1024
        self.conn.execute_command("GRAPH.CONFIG", "SET", "QUERY_MEM_CAPACITY",
                                  limit)

        self.stress_server([(MEM_HOG_QUERY, False)] * n_queries_to_execute)

    def test_04_overflow_with_limit(self):
        # execute query on each one of the threads
        n_queries_to_execute = self.conn.execute_command(
            "GRAPH.CONFIG", "GET", "THREAD_COUNT")[1]

        # set query memory limit to 1MB
        limit = 1024 * 1024
        self.conn.execute_command("GRAPH.CONFIG", "SET", "QUERY_MEM_CAPACITY",
                                  limit)

        self.stress_server([(MEM_HOG_QUERY, True)] * n_queries_to_execute)

    def test_05_test_mixed_queries(self):
        queries = []
        total_query_count = 100

        # Query the threadpool_size
        threadpool_size = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                                    "THREAD_COUNT")[1]

        # set query memory limit to 1MB
        limit = 1024 * 1024
        self.conn.execute_command("GRAPH.CONFIG", "SET", "QUERY_MEM_CAPACITY",
                                  limit)

        for i in range(total_query_count):
            should_fail = False
            q = MEM_THRIFTY_QUERY
            r = random.randint(0, 100)

            if r <= total_query_count * 0.1:  # 10%
                q = MEM_HOG_QUERY
                should_fail = True

            queries.append((q, should_fail))

        self.stress_server(queries)
Beispiel #11
0
class testCache(FlowTestsBase):

    def __init__(self):
        # Have only one thread handling queries
        self.env = Env(moduleArgs='THREAD_COUNT 1 CACHE_SIZE {CACHE_SIZE}'.format(CACHE_SIZE = CACHE_SIZE))
        global redis_con
        redis_con = self.env.getConnection()

    def compare_uncached_to_cached_query_plans(self, query):
        global redis_con
        plan_graph = Graph('Cache_Test_plans', redis_con)
        uncached_plan = plan_graph.execution_plan(query)
        cached_plan = plan_graph.execution_plan(query)
        self.env.assertEqual(uncached_plan, cached_plan)
        plan_graph.delete()

    def test_sanity_check(self):
        graph = Graph('Cache_Sanity_Check', redis_con)
        for i in range(CACHE_SIZE + 1):
            result = graph.query("MATCH (n) WHERE n.value = {val} RETURN n".format(val=i))
            self.env.assertFalse(result.cached_execution)
        
        for i in range(1,CACHE_SIZE + 1):
            result = graph.query("MATCH (n) WHERE n.value = {val} RETURN n".format(val=i))
            self.env.assertTrue(result.cached_execution)
        
        result = graph.query("MATCH (n) WHERE n.value = 0 RETURN n")
        self.env.assertFalse(result.cached_execution)

        graph.delete()

    def test01_test_create(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Create', redis_con)
        query = "CREATE ()"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query)
        cached_result = graph.query(query)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.nodes_created, cached_result.nodes_created)
        graph.delete()
        
    def test02_test_create_with_params(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Create_With_Params', redis_con)
        params = {'val' : 1}
        query = "CREATE ({val:$val})"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        params = {'val' : 2}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.nodes_created, cached_result.nodes_created)
        graph.delete()

    def test03_test_delete(self):
        # Both queries do exactly the same operations
        graph = Graph('Cache_Test_Delete', redis_con)
        for i in range(2):
            params = {'val' : i}
            query = "CREATE ({val:$val})-[:R]->()"
            graph.query(query, params)
        
        params = {'val': 0}
        query = "MATCH (n {val:$val}) DELETE n"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        params = {'val': 1}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.relationships_deleted, cached_result.relationships_deleted)
        self.env.assertEqual(uncached_result.nodes_deleted, cached_result.nodes_deleted)
        graph.delete()

    def test04_test_merge(self):
        # Different outcome, same execution plan.
        graph = Graph('Cache_Test_Merge', redis_con)    
        params = {'create_val': 0, 'match_val':1}
        query = "MERGE (n) ON CREATE SET n.val = $create_val ON MATCH SET n.val = $match_val RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        uncached_result = graph.query(query, params)
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual(uncached_result.properties_set, cached_result.properties_set)
        self.env.assertEqual([[0]], uncached_result.result_set)
        self.env.assertEqual(1, uncached_result.nodes_created)
        self.env.assertEqual([[1]], cached_result.result_set)
        self.env.assertEqual(0, cached_result.nodes_created)

        graph.delete()

    def test05_test_branching_with_path_filter(self):
        # Different outcome, same execution plan.
        graph = Graph('Cache_Test_Path_Filter', redis_con) 
        query = "CREATE ({val:1})-[:R]->({val:2})-[:R2]->({val:3})"
        graph.query(query)
        query = "MATCH (n) WHERE (n)-[:R]->({val:$val}) OR (n)-[:R2]->({val:$val}) RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val':2}
        uncached_result = graph.query(query, params)
        params = {'val':3}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1]], uncached_result.result_set)
        self.env.assertEqual([[2]], cached_result.result_set)
        graph.delete()


    def test06_test_optimizations_index(self):
        graph = Graph('Cache_Test_Index', redis_con)
        graph.query("CREATE INDEX ON :N(val)")
        query = "CREATE (:N{val:1}), (:N{val:2})"
        graph.query(query)
        query = "MATCH (n:N{val:$val}) RETURN n.val"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val':1}
        uncached_result = graph.query(query, params)
        params = {'val':2}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1]], uncached_result.result_set)
        self.env.assertEqual([[2]], cached_result.result_set)
        graph.delete()


    def test07_test_optimizations_id_scan(self):
        graph = Graph('Cache_Test_ID_Scan', redis_con)
        query = "CREATE (), ()"
        graph.query(query)
        query = "MATCH (n) WHERE ID(n)=$id RETURN id(n)"
        self.compare_uncached_to_cached_query_plans(query)
        params = {'id':0}
        uncached_result = graph.query(query, params)
        params = {'id':1}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[0]], uncached_result.result_set)
        self.env.assertEqual([[1]], cached_result.result_set)
        graph.delete()


    def test08_test_join(self):
        graph = Graph('Cache_Test_Join', redis_con)
        query = "CREATE ({val:1}), ({val:2}), ({val:3}),({val:4})"
        graph.query(query)
        query = "MATCH (a {val:$val}), (b) WHERE a.val = b.val-1 RETURN a.val, b.val "
        self.compare_uncached_to_cached_query_plans(query)
        params = {'val':1}
        uncached_result = graph.query(query, params)
        params = {'val':3}
        cached_result = graph.query(query, params)
        self.env.assertFalse(uncached_result.cached_execution)
        self.env.assertTrue(cached_result.cached_execution)
        self.env.assertEqual([[1, 2]], uncached_result.result_set)
        self.env.assertEqual([[3, 4]], cached_result.result_set)
        graph.delete()
Beispiel #12
0
class testPendingQueryLimit():
    def __init__(self):
        self.env = Env(decodeResponses=True)
        self.conn = self.env.getConnection()

    def test_01_query_limit_config(self):
        # read max queued queries config
        result = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                           "MAX_QUEUED_QUERIES")
        max_queued_queries = result[1]
        self.env.assertEquals(max_queued_queries, 4294967295)

        # update configuration, set max queued queries
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES",
                                  10)

        # re-read configuration
        result = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                           "MAX_QUEUED_QUERIES")
        max_queued_queries = result[1]
        self.env.assertEquals(max_queued_queries, 10)

    def stress_server(self):
        threads = []
        connections = []
        threadpool_size = self.conn.execute_command("GRAPH.CONFIG", "GET",
                                                    "THREAD_COUNT")[1]
        thread_count = threadpool_size * 5

        # init connections
        for i in range(thread_count):
            connections.append(self.env.getConnection())

        # invoke queries
        for i in range(thread_count):
            con = connections.pop()
            t = threading.Thread(target=issue_query, args=(con, SLOW_QUERY))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # wait for threads to return
        for i in range(thread_count):
            t = threads[i]
            t.join()

    def test_02_overflow_no_limit(self):
        global error_encountered
        error_encountered = False

        # no limit on number of pending queries
        limit = 4294967295
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES",
                                  limit)

        self.stress_server()

        self.env.assertFalse(error_encountered)

    def test_03_overflow_with_limit(self):
        global error_encountered
        error_encountered = False

        # limit number of pending queries
        limit = 1
        self.conn.execute_command("GRAPH.CONFIG", "SET", "MAX_QUEUED_QUERIES",
                                  limit)

        self.stress_server()

        self.env.assertTrue(error_encountered)