Exemplo n.º 1
0
def testDependenciesBasicExportImport():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)

    #disable rdb save
    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').foreach(lambda x: execute('config', 'set', 'save', '')).run()"
    )

    env.expect('RG.PYEXECUTE', "import redisgraph", 'REQUIREMENTS',
               'redisgraph').ok()
    md, data = env.cmd('RG.PYEXPORTREQ', 'redisgraph')
    env.assertEqual(md[5], 'yes')
    env.assertEqual(md[7], 'yes')
    env.stop()
    env.start()
    conn = getConnectionByEnv(env)
    env.expect('RG.PYDUMPREQS').equal([])
    env.expect('RG.PYIMPORTREQ', *data).equal('OK')
    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)
Exemplo n.º 2
0
class testIndexDeletionFlow():
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global redis_graph
        global redis_con
        redis_con = self.env.getConnection()
        redis_graph = Graph(GRAPH_ID, redis_con)

    def test01_drop_index(self):
        # drop not existed index
        try:
            result = redis_graph.query("DROP INDEX ON :person(age)")
            self.env.assertTrue(False)
        except ResponseError as e:
            self.env.assertContains(
                "Unable to drop index on :person(age): no such index.", str(e))

        # create index over node person:age
        result = redis_graph.query("CREATE INDEX FOR (p:person) ON (p.age)")
        self.env.assertEquals(result.indices_created, 1)

        # drop index
        result = redis_graph.query("DROP INDEX ON :person(age)")
        self.env.assertEquals(result.indices_deleted, 1)

        # create an index over edge follow:created_at
        result = redis_graph.query(
            "CREATE INDEX FOR ()-[r:follow]-() ON (r.created_at)")
        self.env.assertEquals(result.indices_created, 1)

        # delete index
        result = redis_graph.query("DROP INDEX ON :follow(created_at)")
        self.env.assertEquals(result.indices_deleted, 1)
Exemplo n.º 3
0
def testAof(env):
    env = Env(moduleArgs='CreateVenv 1', useAof=True)
    conn = getConnectionByEnv(env)
    env.expect('RG.PYEXECUTE', "import redisgraph", 'REQUIREMENTS',
               'redisgraph').ok()

    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)

    env.broadcast('debug', 'loadaof')

    res, err = env.cmd(
        'RG.PYEXECUTE',
        "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()"
    )
    env.assertEqual(len(err), 0)
    env.assertEqual(len(res), env.shardsCount)
    for r in res:
        env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)
Exemplo n.º 4
0
class testGetExecutionErrorReporting:
    def __init__(self):
        self.env = Env()
        conn = getConnectionByEnv(self.env)
        conn.execute_command('set', '0', 'falsE')
        conn.execute_command('set', '1', 'truE')
        conn.execute_command('set', '', 'mebbE')

    def testErrorShouldBeReportedWithTracebackAttempted(self):
        self.env.cmd('RG.CONFIGSET', 'PythonAttemptTraceback', 1)
        id = self.env.cmd(
            'RG.PYEXECUTE',
            'GearsBuilder().repartition(lambda x: notexists(x)).repartition(lambda x: notexists(x)).collect().run()',
            'UNBLOCKING')
        time.sleep(1)
        res = self.env.cmd('RG.GETEXECUTION', id)
        errors = res[0][3][9]
        for error in errors:
            self.env.assertContains("name \'notexists\' is not defined", error)
        self.env.cmd('RG.DROPEXECUTION', id)

    def testErrorShouldBeReportedWithTracebackNotAttempted(self):
        self.env.cmd('RG.CONFIGSET', 'PythonAttemptTraceback', 0)
        id = self.env.cmd(
            'RG.PYEXECUTE',
            'GearsBuilder().repartition(lambda x: notexists(x)).repartition(lambda x: notexists(x)).collect().run()',
            'UNBLOCKING')
        time.sleep(1)
        res = self.env.cmd('RG.GETEXECUTION', id)
        errors = res[0][3][9]
        for error in errors:
            self.env.assertContains("name 'notexists' is not defined", error)
        self.env.cmd('RG.DROPEXECUTION', id)
        self.env.cmd('RG.CONFIGSET', 'PythonAttemptTraceback', 1)
Exemplo n.º 5
0
def testExt():
    if not os.path.exists(EXTPATH):
        raise Exception(
            "Path ({}) does not exist. "
            "Run from the build directory or set EXT_TEST_PATH in the environment"
            .format(EXTPATH))

    # extentionPath = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/../ctests/ext-example/example.so')
    env = Env(moduleArgs='EXTLOAD %s' % EXTPATH)

    if env.env == 'existing-env':
        env.skip()

    N = 100
    env.assertOk(
        env.execute_command('ft.create', 'idx', 'ON', 'HASH', 'schema', 'f',
                            'text'))
    for i in range(N):
        env.assertOk(
            env.execute_command('ft.add', 'idx', 'doc%d' % i, 1.0, 'fields',
                                'f', 'hello world'))
    res = env.execute_command('ft.search', 'idx', 'hello world')
    env.assertEqual(N, res[0])
    res = env.execute_command('ft.search', 'idx', 'hello world', 'scorer',
                              'filterout_scorer')
    env.assertEqual(0, res[0])

    if not env.isCluster():
        res = env.cmd('ft.config', 'get', 'EXTLOAD')[0][1]
        env.assertContains('libexample_extension', res)
Exemplo n.º 6
0
class testQueryTimeout(FlowTestsBase):
    def __init__(self):
        # skip test if we're running under Valgrind
        if Env().envRunner.debugger is not None:
            Env().skip()  # queries will be much slower under Valgrind

        self.env = Env(decodeResponses=True)
        global redis_con
        redis_con = self.env.getConnection()

    def test_read_query_timeout(self):
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        response = redis_con.execute_command("GRAPH.QUERY", "g", query,
                                             "timeout", 1)
        error = response[-1]
        self.env.assertTrue(isinstance(error, ResponseError))
        self.env.assertContains("Query timed out", str(error))

        response = redis_con.execute_command("GRAPH.QUERY", "g", query,
                                             "timeout", 100)
        self.env.assertFalse(isinstance(response[-1], ResponseError))

    def test_write_query_timeout(self):
        query = "create ()"
        try:
            redis_con.execute_command("GRAPH.QUERY", "g", query, "timeout", 1)
            assert (False)
        except:
            # Expecting an error.
            pass
Exemplo n.º 7
0
def testDependenciesInstall():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)
    res = env.cmd('RG.PYEXECUTE', "GB('ShardsIDReader')."
                            "map(lambda x: str(__import__('redisgraph')))."
                            "collect().distinct().run()", 'REQUIREMENTS', 'redisgraph')
    env.assertEqual(len(res[0]), env.shardsCount)
    env.assertEqual(len(res[1]), 0)
    env.assertContains("<module 'redisgraph'", res[0][0])
Exemplo n.º 8
0
class testQueryTimeout(FlowTestsBase):
    def __init__(self):
        # skip test if we're running under Valgrind
        if Env().envRunner.debugger is not None:
            Env().skip()  # queries will be much slower under Valgrind

        self.env = Env(decodeResponses=True)
        global redis_con
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph("timeout", redis_con)

    def test01_read_query_timeout(self):
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        try:
            # The query is expected to time out
            redis_graph.query(query, timeout=1)
            assert (False)
        except ResponseError as error:
            self.env.assertContains("Query timed out", str(error))

        try:
            # The query is expected to succeed
            redis_graph.query(query, timeout=100)
        except:
            assert (False)

    def test02_configured_timeout(self):
        # Verify that the module-level timeout is set to the default of 0
        response = redis_con.execute_command("GRAPH.CONFIG GET timeout")
        self.env.assertEquals(response[1], 0)
        # Set a default timeout of 1 millisecond
        redis_con.execute_command("GRAPH.CONFIG SET timeout 1")
        response = redis_con.execute_command("GRAPH.CONFIG GET timeout")
        self.env.assertEquals(response[1], 1)

        # Validate that a read query times out
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        try:
            redis_graph.query(query)
            assert (False)
        except ResponseError as error:
            self.env.assertContains("Query timed out", str(error))

    def test03_write_query_ignore_timeout(self):
        # Verify that the timeout argument is ignored by write queries
        query = "CREATE (a:M) WITH a UNWIND range(1,10000) AS ctr SET a.v = ctr"
        try:
            # The query should complete successfully
            actual_result = redis_graph.query(query, timeout=1)
            # The query should have taken longer than the timeout value
            self.env.assertGreater(actual_result.run_time_ms, 1)
            # The query should have updated properties 10,000 times
            self.env.assertEquals(actual_result.properties_set, 10000)
        except ResponseError:
            assert (False)
Exemplo n.º 9
0
def testDependenciesSavedToRDB():
    env = Env(moduleArgs='CreateVenv 1')
    conn = getConnectionByEnv(env)
    env.expect('RG.PYEXECUTE', "import redisgraph", 'REQUIREMENTS', 'redisgraph').ok()
    for _ in env.reloading_iterator():
        res, err = env.cmd('RG.PYEXECUTE', "GB('ShardsIDReader').flatmap(lambda x: execute('RG.PYDUMPREQS')).run()")
        env.assertEqual(len(err), 0)
        env.assertEqual(len(res), env.shardsCount)
        for r in res:
            env.assertContains("'IsDownloaded', 'yes', 'IsInstalled', 'yes'", r)
Exemplo n.º 10
0
def testDependenciesWithRegister():
    env = Env(moduleArgs='CreateVenv 1')
    env.skipOnCluster()
    env.expect('RG.PYEXECUTE', "GB()."
                               "map(lambda x: __import__('redisgraph'))."
                               "collect().distinct().register()", 'REQUIREMENTS', 'redisgraph').ok()

    for _ in env.reloading_iterator():
        res = env.cmd('RG.PYEXECUTE', "GB('ShardsIDReader')."
                                      "map(lambda x: str(__import__('redisgraph')))."
                                      "collect().distinct().run()")
        env.assertEqual(len(res[0]), env.shardsCount)
        env.assertEqual(len(res[1]), 0)
        env.assertContains("<module 'redisgraph'", res[0][0])
Exemplo n.º 11
0
def testDelReplicate():
    env = Env(useSlaves=True, forceTcp=True)
    env.skipOnCluster()

    master = env.getConnection()
    slave = env.getSlaveConnection()
    env.assertContains("PONG", master.execute_command("ping"))
    env.assertContains("PONG", slave.execute_command("ping"))
    env.assertOk(
        master.execute_command('ft.create', 'idx', 'schema', 'f', 'text'))

    checkSlaveSynced(env, slave, ('exists', 'idx:idx'), 1, time_out=20)

    for i in range(10):
        master.execute_command('ft.add', 'idx', 'doc%d' % i, 1.0, 'fields',
                               'f', 'hello world')

    checkSlaveSynced(env,
                     slave, ('ft.get', 'idx', 'doc9'), ['f', 'hello world'],
                     time_out=20)

    for i in range(10):
        # checking for insertion
        env.assertEqual(['f', 'hello world'],
                        master.execute_command('ft.get', 'idx', 'doc%d' % i))
        env.assertEqual(['f', 'hello world'],
                        slave.execute_command('ft.get', 'idx', 'doc%d' % i))

        # deleting
        env.assertEqual(
            1, master.execute_command('ft.del', 'idx', 'doc%d' % i, 'DD'))

    checkSlaveSynced(env, slave, ('ft.get', 'idx', 'doc9'), None, time_out=20)

    for i in range(10):
        # checking for deletion
        env.assertEqual(None,
                        master.execute_command('ft.get', 'idx', 'doc%d' % i))
        env.assertEqual(None,
                        slave.execute_command('ft.get', 'idx', 'doc%d' % i))
Exemplo n.º 12
0
class TestIGet():

    def __init__(self):
        self.env = Env()

    def test_iget_should_return_error_when_missing_key(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('iget').error()

    def test_iget_should_return_error_when_missing_parameters(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('iget', 'intervals').error()

    def test_iget_should_return_empty_when_member_not_exists(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('iget', 'intervals', 'i1').equal([])

    def test_iget_should_return_i1(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        res = self.env.cmd('iget', 'intervals', 'i1')
        self.env.assertEqual(len(res), 1)
        self.env.assertContains([b'i1', b'1', b'0', b'1', b'1'], res)

    def test_iget_should_return_i2(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '2,3', 'i2')
        res = self.env.cmd('iget', 'intervals', 'i2')
        self.env.assertEqual(len(res), 1)
        self.env.assertContains([b'i2', b'1', b'2', b'1', b'3'], res)

    def test_iget_should_return_i1_and_i2(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '2,3', 'i2')
        res = self.env.cmd('iget', 'intervals', 'i1', 'i2')
        self.env.assertEqual(len(res), 2)
        self.env.assertContains([b'i1', b'1', b'0', b'1', b'1'], res)
        self.env.assertContains([b'i2', b'1', b'2', b'1', b'3'], res)

    def test_iget_should_return_only_existing_member(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '2,3', 'i2')
        self.env.expect('iget', 'intervals', 'i1', 'i3').equal([[b'i1', b'1', b'0', b'1', b'1']])

    def test_iget_should_return_empty_because_no_members_exists(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '2,3', 'i2')
        self.env.expect('iget', 'intervals', 'i3', 'i4').equal([])
Exemplo n.º 13
0
class testFunctionCallsFlow(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global graph
        global redis_con
        redis_con = self.env.getConnection()
        graph = Graph("G", redis_con)
        self.populate_graph()

    def populate_graph(self):
        global graph
        nodes = {}
        # Create entities
        for idx, p in enumerate(people):
            node = Node(label="person", properties={"name": p, "val": idx})
            graph.add_node(node)
            nodes[p] = node

        # Fully connected graph
        for src in nodes:
            for dest in nodes:
                if src != dest:
                    edge = Edge(nodes[src], "know", nodes[dest])
                    graph.add_edge(edge)

        for src in nodes:
            for dest in nodes:
                if src != dest:
                    edge = Edge(nodes[src], "works_with", nodes[dest])
                    graph.add_edge(edge)

        graph.commit()
        query = """MATCH (a)-[:know]->(b) CREATE (a)-[:know]->(b)"""
        graph.query(query)

    def expect_type_error(self, query):
        try:
            graph.query(query)
            assert (False)
        except redis.exceptions.ResponseError as e:
            # Expecting a type error.
            self.env.assertIn("Type mismatch", str(e))

    def expect_error(self, query, expected_err_msg):
        try:
            graph.query(query)
            assert (False)
        except redis.exceptions.ResponseError as e:
            # Expecting a type error.
            self.env.assertIn(expected_err_msg, str(e))

    # Validate capturing of errors prior to query execution.
    def test01_compile_time_errors(self):
        query = """RETURN toUpper(5)"""
        self.expect_type_error(query)

        query = """RETURN 'a' * 2"""
        self.expect_type_error(query)

        query = """RETURN max(1 + min(2))"""
        self.expect_error(
            query,
            "Can't use aggregate functions inside of aggregate functions")

    def test02_boolean_comparisons(self):
        query = """RETURN true = 5"""
        actual_result = graph.query(query)
        expected_result = [[False]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """RETURN true <> 'str'"""
        actual_result = graph.query(query)
        expected_result = [[True]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """RETURN 'anything' <> NULL"""
        actual_result = graph.query(query)
        expected_result = [[None]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """RETURN 'anything' = NULL"""
        actual_result = graph.query(query)
        expected_result = [[None]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """RETURN 10 >= 1.5"""
        actual_result = graph.query(query)
        expected_result = [[True]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """RETURN -1 < 1"""
        actual_result = graph.query(query)
        expected_result = [[True]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test03_boolean_errors(self):
        query = """RETURN 'str' < 5.5"""
        self.expect_type_error(query)

        query = """RETURN true > 5"""
        self.expect_type_error(query)

        query = """MATCH (a) RETURN a < 'anything' LIMIT 1"""
        self.expect_type_error(query)

    def test04_entity_functions(self):
        query = "RETURN ID(5)"
        self.expect_type_error(query)

        query = "MATCH (a) RETURN ID(a) ORDER BY ID(a) LIMIT 3"
        actual_result = graph.query(query)
        expected_result = [[0], [1], [2]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "MATCH (a)-[e]->() RETURN ID(e) ORDER BY ID(e) LIMIT 3"
        actual_result = graph.query(query)
        expected_result = [[0], [1], [2]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "RETURN EXISTS(null)"
        actual_result = graph.query(query)
        expected_result = [[False]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "RETURN EXISTS('anything')"
        actual_result = graph.query(query)
        expected_result = [[True]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test07_nonmap_errors(self):
        query = """MATCH (a) WITH a.name AS scalar RETURN scalar.name"""
        self.expect_type_error(query)

    def test08_apply_all_function(self):
        query = "MATCH () RETURN COUNT(*)"
        actual_result = graph.query(query)
        expected_result = [[4]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "UNWIND [1, 2] AS a RETURN COUNT(*)"
        actual_result = graph.query(query)
        expected_result = [[2]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # COLLECT should associate false and 'false' to different groups.
        query = "UNWIND [false,'false',0,'0'] AS a RETURN a, count(a)"
        actual_result = graph.query(query)
        expected_result = [[0, 1], [False, 1], ["false", 1], ['0', 1]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test09_static_aggregation(self):
        query = "RETURN count(*)"
        actual_result = graph.query(query)
        expected_result = [[1]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "RETURN max(2)"
        actual_result = graph.query(query)
        expected_result = [[2]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = "RETURN min(3)"
        actual_result = graph.query(query)
        expected_result = [[3]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test10_modulo_inputs(self):
        # Validate modulo with integer inputs.
        query = "RETURN 5 % 2"
        actual_result = graph.query(query)
        expected_result = [[1]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo with a floating-point dividend.
        query = "RETURN 5.5 % 2"
        actual_result = graph.query(query)
        expected_result = [[1.5]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo with a floating-point divisor.
        query = "RETURN 5 % 2.5"
        actual_result = graph.query(query)
        expected_result = [[0]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo with both a floating-point dividen and a floating-point divisor.
        query = "RETURN 5.5 % 2.5"
        actual_result = graph.query(query)
        expected_result = [[0.5]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo with negative integer inputs.
        query = "RETURN -5 % -2"
        actual_result = graph.query(query)
        expected_result = [[-1]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo with negative floating-point inputs.
        query = "RETURN -5.5 % -2.5"
        actual_result = graph.query(query)
        expected_result = [[-0.5]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Validate modulo by 0
        query = "RETURN 3 % 0"
        try:
            actual_result = graph.query(query)
        except redis.ResponseError as e:
            self.env.assertContains("Division by zero", str(e))

    # Aggregate functions should handle null inputs appropriately.
    def test11_null_aggregate_function_inputs(self):
        # SUM should sum all non-null inputs.
        query = """UNWIND [1, NULL, 3] AS a RETURN sum(a)"""
        actual_result = graph.query(query)
        expected_result = [[4]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # SUM should return 0 given a fully NULL input.
        query = """WITH NULL AS a RETURN sum(a)"""
        actual_result = graph.query(query)
        expected_result = [[0]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # COUNT should count all non-null inputs.
        query = """UNWIND [1, NULL, 3] AS a RETURN count(a)"""
        actual_result = graph.query(query)
        expected_result = [[2]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # COUNT should return 0 given a fully NULL input.
        query = """WITH NULL AS a RETURN count(a)"""
        actual_result = graph.query(query)
        expected_result = [[0]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # COLLECT should ignore null inputs.
        query = """UNWIND [1, NULL, 3] AS a RETURN collect(a)"""
        actual_result = graph.query(query)
        expected_result = [[[1, 3]]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # COLLECT should return an empty array on all null inputs.
        query = """WITH NULL AS a RETURN collect(a)"""
        actual_result = graph.query(query)
        expected_result = [[[]]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # Verify that nested functions that perform heap allocations return properly.
    def test12_nested_heap_functions(self):
        query = """MATCH p = (n) WITH head(nodes(p)) AS node RETURN node.name ORDER BY node.name"""
        actual_result = graph.query(query)
        expected_result = [['Ailon'], ['Alon'], ['Boaz'], ['Roi']]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # CASE...WHEN statements should properly handle NULL, false, and true evaluations.
    def test13_case_when_inputs(self):
        # Simple case form: single value evaluation.
        query = """UNWIND [NULL, true, false] AS v RETURN v, CASE v WHEN true THEN v END"""
        actual_result = graph.query(query)
        expected_result = [[None, None], [True, True], [False, None]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """UNWIND [NULL, true, false] AS v RETURN v, CASE v WHEN true THEN v WHEN false THEN v END"""
        actual_result = graph.query(query)
        expected_result = [[None, None], [True, True], [False, False]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Generic case form: evaluation for each case.
        query = """UNWIND [NULL, true, false] AS v RETURN v, CASE WHEN v THEN v END"""
        actual_result = graph.query(query)
        # Only the true value should return non-NULL.
        expected_result = [[None, None], [True, True], [False, None]]
        self.env.assertEquals(actual_result.result_set, expected_result)

        query = """UNWIND [NULL, true, false] AS v RETURN v, CASE WHEN v IS NOT NULL THEN v END"""
        actual_result = graph.query(query)
        # The true and false values should both return non-NULL.
        expected_result = [[None, None], [True, True], [False, False]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    # CASE...WHEN statements should manage allocated values properly.
    def test14_case_when_memory_management(self):
        # Simple case form: single value evaluation.
        query = """WITH 'A' AS a WITH CASE a WHEN 'A' THEN toString(a) END AS key RETURN toLower(key)"""
        actual_result = graph.query(query)
        expected_result = [['a']]
        self.env.assertEquals(actual_result.result_set, expected_result)
        # Generic case form: evaluation for each case.
        query = """WITH 'A' AS a WITH CASE WHEN true THEN toString(a) END AS key RETURN toLower(key)"""
        actual_result = graph.query(query)
        expected_result = [['a']]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test15_aggregate_error_handling(self):
        functions = [
            "avg", "collect", "count", "max", "min", "sum", "percentileDisc",
            "percentileCont", "stDev"
        ]
        # Test all functions for invalid argument counts.
        for function in functions:
            query = """UNWIND range(0, 10) AS val RETURN %s(val, val, val)""" % (
                function)
            self.expect_error(query, "Received 3 arguments")

        # Test numeric functions for invalid input types.
        numeric_functions = ["avg", "sum", "stDev"]
        for function in numeric_functions:
            query = """UNWIND ['a', 'b', 'c'] AS val RETURN %s(val)""" % (
                function)
            self.expect_type_error(query)

        # Test invalid numeric input for percentile function.
        query = """UNWIND range(0, 10) AS val RETURN percentileDisc(val, -1)"""
        self.expect_error(query, "must be a number in the range 0.0 to 1.0")

    # startNode and endNode calls should return the appropriate nodes.
    def test16_edge_endpoints(self):
        query = """MATCH (a)-[e]->(b) RETURN a.name, startNode(e).name, b.name, endNode(e).name"""
        actual_result = graph.query(query)
        for row in actual_result.result_set:
            self.env.assertEquals(row[0], row[1])
            self.env.assertEquals(row[2], row[3])

    def test17_to_json(self):
        # Test JSON literal values in an array.
        query = """RETURN toJSON([1, 'str', true, NULL])"""
        actual_result = graph.query(query)
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(parsed, [1, "str", True, None])

        # Test JSON an empty array value.
        query = """WITH [] AS arr RETURN toJSON(arr)"""
        actual_result = graph.query(query)
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(parsed, [])

        # Test JSON an empty map value.
        query = """WITH {} AS map RETURN toJSON(map)"""
        actual_result = graph.query(query)
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(parsed, {})

        # Test converting a map projection.
        query = """MATCH (n {val: 1}) RETURN toJSON(n {.val, .name})"""
        actual_result = graph.query(query)
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(parsed, {"name": "Alon", "val": 1})

        # Test converting a full node.
        query = """MATCH (n {val: 1}) RETURN toJSON(n)"""
        actual_result = graph.query(query)
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(
            parsed, {
                "type": "node",
                "id": 1,
                "labels": ["person"],
                "properties": {
                    "name": "Alon",
                    "val": 1
                }
            })

        # Test converting a full edge.
        query = """MATCH ({val: 0})-[e:works_with]->({val: 1}) RETURN toJSON(e)"""
        actual_result = graph.query(query)
        start = {
            "id": 0,
            "labels": ["person"],
            "properties": {
                "name": "Roi",
                "val": 0
            }
        }
        end = {
            "id": 1,
            "labels": ["person"],
            "properties": {
                "name": "Alon",
                "val": 1
            }
        }
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(
            parsed, {
                "type": "relationship",
                "id": 12,
                "relationship": "works_with",
                "properties": {},
                "start": start,
                "end": end
            })

        # Test converting a path.
        query = """MATCH path=({val: 0})-[e:works_with]->({val: 1}) RETURN toJSON(path)"""
        actual_result = graph.query(query)
        expected = [{
            'type': 'node',
            'id': 0,
            'labels': ['person'],
            'properties': {
                'name': 'Roi',
                'val': 0
            }
        }, {
            'type': 'relationship',
            'id': 12,
            'relationship': 'works_with',
            'properties': {},
            'start': {
                'id': 0,
                'labels': ['person'],
                'properties': {
                    'name': 'Roi',
                    'val': 0
                }
            },
            'end': {
                'id': 1,
                'labels': ['person'],
                'properties': {
                    'name': 'Alon',
                    'val': 1
                }
            }
        }, {
            'type': 'node',
            'id': 1,
            'labels': ['person'],
            'properties': {
                'name': 'Alon',
                'val': 1
            }
        }]
        parsed = json.loads(actual_result.result_set[0][0])
        self.env.assertEquals(parsed, expected)

    # Memory should be freed properly when the key values are heap-allocated.
    def test18_allocated_keys(self):
        query = """UNWIND ['str1', 'str1', 'str2', 'str1'] AS key UNWIND [1, 2, 3] as agg RETURN toUpper(key) AS key, collect(DISTINCT agg) ORDER BY key"""
        actual_result = graph.query(query)
        expected_result = [['STR1', [1, 2, 3]], ['STR2', [1, 2, 3]]]
        self.env.assertEquals(actual_result.result_set, expected_result)

    def test19_has_labels(self):
        # Test existing label
        query = """MATCH (n) WHERE n:person RETURN n.name"""
        actual_result = graph.query(query)
        expected_result = [['Roi'], ['Alon'], ['Ailon'], ['Boaz']]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Test not existing label
        query = """MATCH (n) WHERE n:L RETURN n.name"""
        actual_result = graph.query(query)
        expected_result = []
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Test multi label
        query = """MATCH (n) WHERE n:person:L RETURN n.name"""
        actual_result = graph.query(query)
        expected_result = []
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Test or between different labels label
        query = """MATCH (n) WHERE n:person OR n:L RETURN n.name"""
        actual_result = graph.query(query)
        expected_result = [['Roi'], ['Alon'], ['Ailon'], ['Boaz']]
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Test multi label using functions
        query = """MATCH (n) WHERE hasLabels(n, ['person', 'L']) RETURN n.name"""
        actual_result = graph.query(query)
        expected_result = []
        self.env.assertEquals(actual_result.result_set, expected_result)

        # Test has labels using functions mismatch type
        query = """MATCH (n) WHERE hasLabels(n, ['person', 1]) RETURN n.name"""
        try:
            graph.query(query)
        except redis.ResponseError as e:
            self.env.assertContains(
                "Type mismatch: expected String but was Integer", str(e))
Exemplo n.º 14
0
class testEntityUpdate(FlowTestsBase):
    def __init__(self):
        global graph
        global multiple_entity_graph
        self.env = Env(decodeResponses=True)
        # create a graph with a single node with attribute 'v'
        graph = Graph('update', self.env.getConnection())
        graph.query("CREATE ({v:1})")

        # create a graph with a two nodes connected by an edge
        multiple_entity_graph = Graph('multiple_entity_update',
                                      self.env.getConnection())
        multiple_entity_graph.query(
            "CREATE (:L {v1: 1})-[:R {v1: 3}]->(:L {v2: 2})")
        multiple_entity_graph.query("CREATE INDEX ON :L(v1)")
        multiple_entity_graph.query("CREATE INDEX ON :L(v2)")

    def test01_update_attribute(self):
        # update existing attribute 'v'
        result = graph.query("MATCH (n) SET n.v = 2")
        self.env.assertEqual(result.properties_set, 1)

    def test02_update_none_existing_attr(self):
        # introduce a new attribute 'x'
        result = graph.query("MATCH (n) SET n.x = 1")
        self.env.assertEqual(result.properties_set, 1)

    def test03_update_no_change(self):
        # setting 'x' to its current value
        result = graph.query("MATCH (n) SET n.x = 1")
        self.env.assertEqual(result.properties_set, 0)

        # setting both 'v' and 'x' to their current values
        result = graph.query("MATCH (n) SET n.v = 2, n.x = 1")
        self.env.assertEqual(result.properties_set, 0)

        # update 'v' to a new value, 'x' remains the same
        result = graph.query("MATCH (n) SET n.v = 1, n.x = 1")
        self.env.assertEqual(result.properties_set, 1)

        # update 'x' to a new value, 'v' remains the same
        result = graph.query("MATCH (n) SET n.v = 1, n.x = 2")
        self.env.assertEqual(result.properties_set, 1)

    def test04_update_remove_attribute(self):
        # remove the 'x' attribute
        result = graph.query("MATCH (n) SET n.x = NULL")
        self.env.assertEqual(result.properties_set, 1)

    def test05_update_from_projection(self):
        result = graph.query(
            "MATCH (n) UNWIND ['Calgary'] as city_name SET n.name = city_name RETURN n.v, n.name"
        )
        expected_result = [[1, 'Calgary']]
        self.env.assertEqual(result.properties_set, 1)
        self.env.assertEqual(result.result_set, expected_result)

    # Set the entity's properties to an empty map
    def test06_replace_property_map(self):
        empty_node = Node()
        result = graph.query("MATCH (n) SET n = {} RETURN n")
        expected_result = [[empty_node]]
        # The node originally had 2 properties, 'name' and 'city_name'
        self.env.assertEqual(result.properties_set, 2)
        self.env.assertEqual(result.result_set, expected_result)

    # Update the entity's properties by setting a specific property and merging property maps
    def test07_update_property_map(self):
        node = Node(properties={"v": 1, "v2": 2})
        result = graph.query("MATCH (n) SET n.v = 1, n += {v2: 2} RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.properties_set, 2)
        self.env.assertEqual(result.result_set, expected_result)

    # Replacement maps overwrite existing properties and previous SETs but do not modify subsequent non-replacement SETs
    def test08_multiple_updates_to_property_map(self):
        node = Node(properties={"v": 1, "v2": 2, "v4": 4})
        result = graph.query(
            "MATCH (n) SET n.v3 = 3, n = {v: 1}, n += {v2: 2}, n.v4 = 4 RETURN n"
        )
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # MERGE updates should support the same operations as SET updates
    def test09_merge_update_map(self):
        node = Node(properties={"v": 5})
        result = graph.query(
            "MERGE (n {v: 1}) ON MATCH SET n = {}, n.v = 5 RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Update properties with a map retrieved by alias
    def test10_property_map_from_identifier(self):
        # Overwrite existing properties
        node = Node(properties={"v2": 10})
        result = graph.query(
            "WITH {v2: 10} as props MATCH (n) SET n = props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

        # Merge property maps
        node = Node(properties={"v1": True, "v2": 10})
        result = graph.query(
            "WITH {v1: True} as props MATCH (n) SET n += props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Update properties with a map retrieved from a parameter
    def test11_property_map_from_parameter(self):
        # Overwrite existing properties
        node = Node(properties={"v2": 10})
        result = graph.query(
            "CYPHER props={v2: 10} MATCH (n) SET n = $props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

        # Merge property maps
        node = Node(properties={"v1": True, "v2": 10})
        result = graph.query(
            "CYPHER props={v1: true} MATCH (n) SET n += $props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Fail update an entity property when left hand side is not alias
    def test12_fail_update_property_of_non_alias_entity(self):
        try:
            graph.query("MATCH P=() SET nodes(P).prop = 1 RETURN nodes(P)")
            self.env.assertTrue(False)
        except ResponseError as e:
            self.env.assertContains(
                "RedisGraph does not currently support non-alias references on the left-hand side of SET expressions",
                str(e))

    # Fail when a property is a complex type nested within an array type
    def test13_invalid_complex_type_in_array(self):
        # Test combinations of invalid types with nested and top-level arrays
        # Invalid types are NULL, maps, nodes, edges, and paths
        queries = [
            "MATCH (a) SET a.v = [a]",
            "MATCH (a) SET a = {v: ['str', [1, NULL]]}",
            "MATCH (a) SET a += [[{k: 'v'}]]",
            "CREATE (a:L)-[e:R]->(:L) SET a.v = [e]"
        ]
        for query in queries:
            try:
                graph.query(query)
                self.env.assertTrue(False)
            except ResponseError as e:
                self.env.assertContains(
                    "Property values can only be of primitive types or arrays of primitive types",
                    str(e))

    # fail when attempting to perform invalid map assignment
    def test14_invalid_map_assignment(self):
        try:
            graph.query("MATCH (a) SET a.v = {f: true}")
            self.env.assertTrue(False)
        except ResponseError as e:
            self.env.assertContains(
                "Property values can only be of primitive types or arrays of primitive types",
                str(e))

    # update properties by attribute set reassignment
    def test15_assign_entity_properties(self):
        # merge attribute set of a node with existing properties
        node = Node(label="L", properties={"v1": 1, "v2": 2})
        result = multiple_entity_graph.query(
            "MATCH (n1 {v1: 1}), (n2 {v2: 2}) SET n1 += n2 RETURN n1")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)
        # validate index updates
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v1 > 0 RETURN n.v1 ORDER BY n.v1")
        expected_result = [[1]]
        self.env.assertEqual(result.result_set, expected_result)
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v2 > 0 RETURN n.v2 ORDER BY n.v2")
        expected_result = [[2], [2]]
        self.env.assertEqual(result.result_set, expected_result)

        # overwrite attribute set of node with attribute set of edge
        node = Node(label="L", properties={"v1": 3})
        result = multiple_entity_graph.query(
            "MATCH (n {v1: 1})-[e]->() SET n = e RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)
        # validate index updates
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v1 > 0 RETURN n.v1 ORDER BY n.v1")
        expected_result = [[3]]
        self.env.assertEqual(result.result_set, expected_result)
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v2 > 0 RETURN n.v2 ORDER BY n.v2")
        expected_result = [[2]]
        self.env.assertEqual(result.result_set, expected_result)

    # repeated attribute set reassignment
    def test16_assign_entity_properties(self):
        # repeated merges to the attribute set of a node
        node = Node(label="L", properties={"v1": 3, "v2": 2})
        result = multiple_entity_graph.query(
            "MATCH (n), (x) WHERE ID(n) = 0 WITH n, x ORDER BY ID(x) SET n += x RETURN n"
        )
        expected_result = [[node], [node]]
        self.env.assertEqual(result.result_set, expected_result)
        # validate index updates
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v1 > 0 RETURN n.v1 ORDER BY n.v1")
        expected_result = [[3]]
        self.env.assertEqual(result.result_set, expected_result)
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v2 > 0 RETURN n.v2 ORDER BY n.v2")
        expected_result = [[2], [2]]
        self.env.assertEqual(result.result_set, expected_result)

        # repeated updates to the attribute set of a node
        node = Node(label="L", properties={"v2": 2})
        result = multiple_entity_graph.query(
            "MATCH (n), (x) WHERE ID(n) = 0 WITH n, x ORDER BY ID(x) SET n = x RETURN n"
        )
        expected_result = [[node], [node]]
        self.env.assertEqual(result.result_set, expected_result)
        # validate index updates
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v1 > 0 RETURN n.v1 ORDER BY n.v1")
        expected_result = []
        self.env.assertEqual(result.result_set, expected_result)
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v2 > 0 RETURN n.v2 ORDER BY n.v2")
        expected_result = [[2], [2]]
        self.env.assertEqual(result.result_set, expected_result)

        # repeated multiple updates to the attribute set of a node
        node = Node(label="L", properties={"v2": 2})
        result = multiple_entity_graph.query(
            "MATCH (n), (x) WHERE ID(n) = 0 WITH n, x ORDER BY ID(x) SET n = x, n += x RETURN n"
        )
        expected_result = [[node], [node]]
        self.env.assertEqual(result.result_set, expected_result)
        # validate index updates
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v1 > 0 RETURN n.v1 ORDER BY n.v1")
        expected_result = []
        self.env.assertEqual(result.result_set, expected_result)
        result = multiple_entity_graph.query(
            "MATCH (n:L) WHERE n.v2 > 0 RETURN n.v2 ORDER BY n.v2")
        expected_result = [[2], [2]]
        self.env.assertEqual(result.result_set, expected_result)

    # fail when attempting to perform invalid entity assignment
    def test17_invalid_entity_assignment(self):
        queries = [
            "MATCH (a) SET a.v = [a]", "MATCH (a) SET a = a.v",
            "MATCH (a) SET a = NULL"
        ]
        for query in queries:
            try:
                graph.query(query)
                self.env.assertTrue(False)
            except ResponseError as e:
                self.env.assertContains(
                    "Property values can only be of primitive types or arrays of primitive types",
                    str(e))
Exemplo n.º 15
0
class testQueryTimeout(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        # skip test if we're running under Valgrind
        if self.env.envRunner.debugger is not None or os.getenv('COV') == '1':
            self.env.skip()  # queries will be much slower under Valgrind

        global redis_con
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph("timeout", redis_con)

    def test01_read_query_timeout(self):
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        try:
            # The query is expected to timeout
            redis_graph.query(query, timeout=1)
            assert (False)
        except ResponseError as error:
            self.env.assertContains("Query timed out", str(error))

        try:
            # The query is expected to succeed
            redis_graph.query(query, timeout=200)
        except:
            assert (False)

    def test02_configured_timeout(self):
        # Verify that the module-level timeout is set to the default of 0
        response = redis_con.execute_command("GRAPH.CONFIG GET timeout")
        self.env.assertEquals(response[1], 0)
        # Set a default timeout of 1 millisecond
        redis_con.execute_command("GRAPH.CONFIG SET timeout 1")
        response = redis_con.execute_command("GRAPH.CONFIG GET timeout")
        self.env.assertEquals(response[1], 1)

        # Validate that a read query times out
        query = "UNWIND range(0,100000) AS x WITH x AS x WHERE x = 10000 RETURN x"
        try:
            redis_graph.query(query)
            assert (False)
        except ResponseError as error:
            self.env.assertContains("Query timed out", str(error))

    def test03_write_query_ignore_timeout(self):
        #----------------------------------------------------------------------
        # verify that the timeout argument is ignored by write queries
        #----------------------------------------------------------------------
        write_queries = [
            # create query
            "UNWIND range(0, 10000) AS x CREATE (a:M)",
            # update query
            "MATCH (a:M) SET a.v = 2",
            # delete query
            "MATCH (a:M) DELETE a"
        ]

        # queries should complete successfully
        for q in write_queries:
            try:
                result = redis_graph.query(q, timeout=1)
                # the query should have taken longer than the timeout value
                self.env.assertGreater(result.run_time_ms, 2)
            except ResponseError:
                assert (False)

        #----------------------------------------------------------------------
        # index creation should ignore timeouts
        #----------------------------------------------------------------------
        query = "UNWIND range (0, 100000) AS x CREATE (:M {v:x})"
        redis_graph.query(query)

        # create index
        query = "CREATE INDEX ON :M(v)"
        try:
            # the query should complete successfully
            result = redis_graph.query(query, timeout=1)
            self.env.assertEquals(result.indices_created, 1)
        except ResponseError:
            assert (False)

        #----------------------------------------------------------------------
        # index deletion should ignore timeouts
        #----------------------------------------------------------------------
        query = "DROP INDEX ON :M(v)"
        try:
            # the query should complete successfully
            result = redis_graph.query(query, timeout=1)
            self.env.assertEquals(result.indices_deleted, 1)
        except ResponseError:
            assert (False)

    def test04_timeout_index_scan(self):
        # construct a graph and create multiple indices
        query = """UNWIND range(0, 100000) AS x CREATE (p:Person {age: x%90, height: x%200, weight: x%80})"""
        redis_graph.query(query)

        query = """CREATE INDEX ON :Person(age, height, weight)"""
        redis_graph.query(query)

        queries = [
            # full scan
            "MATCH (a) RETURN a",
            # ID scan
            "MATCH (a) WHERE ID(a) > 20 RETURN a",
            # label scan
            "MATCH (a:Person) RETURN a",
            # single index scan
            "MATCH (a:Person) WHERE a.age > 40 RETURN a",
            # index scan + full scan
            "MATCH (a:Person), (b) WHERE a.age > 40 RETURN a, b",
            # index scan + ID scan
            "MATCH (a:Person), (b) WHERE a.age > 40 AND ID(b) > 20 RETURN a, b",
            # index scan + label scan
            "MATCH (a:Person), (b:Person) WHERE a.age > 40 RETURN a, b",
            # multi full and index scans
            "MATCH (a:Person), (b:Person), (c), (d) WHERE a.age > 40 AND b.height < 150 RETURN a,b,c,d",
            # multi ID and index scans
            "MATCH (a:Person), (b:Person), (c:Person), (d) WHERE a.age > 40 AND b.height < 150 AND ID(c) > 20 AND ID(d) > 30 RETURN a,b,c,d",
            # multi label and index scans
            "MATCH (a:Person), (b:Person), (c:Person), (d:Person) WHERE a.age > 40 AND b.height < 150 RETURN a,b,c,d",
            # multi index scans
            "MATCH (a:Person), (b:Person), (c:Person) WHERE a.age > 40 AND b.height < 150 AND c.weight = 50 RETURN a,b,c"
        ]

        for q in queries:
            try:
                # query is expected to timeout
                redis_graph.query(q, timeout=2)
                assert (False)
            except ResponseError as error:
                self.env.assertContains("Query timed out", str(error))

        # validate that server didn't crash
        redis_con.ping()

        # rerun each query with timeout and limit
        # expecting queries to run to completion
        for q in queries:
            q += " LIMIT 2"
            redis_graph.query(q, timeout=10)

        # validate that server didn't crash
        redis_con.ping()
Exemplo n.º 16
0
class testBasic:
    def __init__(self):
        self.env = Env()
        conn = getConnectionByEnv(self.env)
        for i in range(100):
            conn.execute_command('set', str(i), str(i))

    def testShardsGB(self):
        self.env.expect(
            'rg.pyexecute', "GB('ShardsIDReader')."
            "map(lambda x:int(execute('dbsize')))."
            "aggregate(0, lambda r, x: x, lambda r, x:r + x).run()").contains(
                ['100'])

    def testKeysOnlyGB(self):
        self.env.expect(
            'rg.pyexecute', "GearsBuilder('KeysOnlyReader')."
            "map(lambda x:int(execute('get', x)))."
            "aggregate(0, lambda r, x: r + x, lambda r, x:r + x).run()"
        ).contains(['4950'])

    def testAvg(self):
        self.env.expect(
            'rg.pyexecute', "GearsBuilder()."
            "map(lambda x:int(x['value']))."
            "avg().run()").contains(['49.5'])

    def testAggregate(self):
        self.env.expect(
            'rg.pyexecute', "GearsBuilder()."
            "map(lambda x:int(x['value']))."
            "aggregate(0, lambda r, x: x + r, lambda r, x: x + r).run()"
        ).contains(['4950'])

    def testCount(self):
        self.env.expect('rg.pyexecute',
                        "GearsBuilder().count().run()").contains(['100'])

    def testSort(self):
        self.env.expect(
            'rg.pyexecute',
            "GearsBuilder().map(lambda x:int(x['value'])).sort().run()"
        ).contains([str(i) for i in range(100)])

    def testCountBy(self):
        res = self.env.cmd(
            'rg.pyexecute', 'GearsBuilder().'
            'map(lambda x: {"key":x["key"], "value": 0 if int(x["value"]) < 50 else 100}).'
            'countby(lambda x: x["value"]).collect().run()')
        a = []
        for r in res[0]:
            a.append(eval(r))
        self.env.assertContains({'key': '100', 'value': 50}, a)
        self.env.assertContains({'value': 50, 'key': '0'}, a)

    def testLocalAggregate(self):
        self.env.skipOnCluster()
        res = self.env.cmd(
            'rg.pyexecute', 'GearsBuilder().'
            'map(lambda x: {"key":x["key"], "value": 0 if int(x["value"]) < 50 else 100}).'
            '__localAggregateby__(lambda x:x["value"], 0, lambda k, a, x: 1 + a).'
            'map(lambda x:(x["key"], x["value"])).run()')
        a = []
        for r in res[0]:
            a.append(eval(r))
        self.env.assertContains(('100', 50), a)
        self.env.assertContains(('0', 50), a)

    def testBasicQuery(self):
        id = self.env.cmd(
            'rg.pyexecute',
            "GearsBuilder().map(lambda x:str(x)).collect().run()",
            'UNBLOCKING')
        res = self.env.cmd('rg.getresultsblocking', id)
        res = [yaml.load(r) for r in res[0]]
        for i in range(100):
            self.env.assertContains(
                {
                    'value': str(i),
                    'type': 'string',
                    'event': 'None',
                    'key': str(i)
                }, res)
        self.env.cmd('rg.dropexecution', id)

    def testBasicFilterQuery(self):
        id = self.env.cmd(
            'rg.pyexecute',
            'GearsBuilder().filter(lambda x: int(x["value"]) >= 50).map(lambda x:str(x)).collect().run()',
            'UNBLOCKING')
        res = self.env.cmd('rg.getresultsblocking', id)
        res = [yaml.load(r) for r in res[0]]
        for i in range(50, 100):
            self.env.assertContains(
                {
                    'value': str(i),
                    'type': 'string',
                    'event': 'None',
                    'key': str(i)
                }, res)
        self.env.cmd('rg.dropexecution', id)

    def testBasicMapQuery(self):
        id = self.env.cmd(
            'rg.pyexecute',
            'GearsBuilder().map(lambda x: x["value"]).map(lambda x:str(x)).collect().run()',
            'UNBLOCKING')
        res = self.env.cmd('rg.getresultsblocking', id)
        res = [yaml.load(r) for r in res[0]]
        self.env.assertEqual(set(res), set([i for i in range(100)]))
        self.env.cmd('rg.dropexecution', id)

    def testBasicGroupByQuery(self):
        id = self.env.cmd(
            'rg.pyexecute', 'GearsBuilder().'
            'map(lambda x: {"key":x["key"], "value": 0 if int(x["value"]) < 50 else 100}).'
            'groupby(lambda x: str(x["value"]), lambda key, a, vals: 1 + (a if a else 0)).'
            'map(lambda x:str(x)).collect().run()', 'UNBLOCKING')
        res = self.env.cmd('rg.getresultsblocking', id)
        self.env.assertContains("{'key': '100', 'value': 50}", res[0])
        self.env.assertContains("{'key': '0', 'value': 50}", res[0])
        self.env.cmd('rg.dropexecution', id)

    def testBasicAccumulate(self):
        id = self.env.cmd(
            'rg.pyexecute', 'GearsBuilder().'
            'map(lambda x: int(x["value"])).'
            'accumulate(lambda a,x: x + (a if a else 0)).'
            'collect().'
            'accumulate(lambda a,x: x + (a if a else 0)).'
            'map(lambda x:str(x)).run()', 'UNBLOCKING')
        res = self.env.cmd('rg.getresultsblocking', id)[0]
        self.env.assertEqual(sum([a for a in range(100)]), int(res[0]))
        self.env.cmd('rg.dropexecution', id)
Exemplo n.º 17
0
class TestIContains():
    def __init__(self):
        self.env = Env()

    def test_icontains_should_return_an_error_with_0_parameters(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains').error()

    def test_icontains_should_return_an_error_with_1_parameter(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals').error()

    def test_icontains_should_return_an_error_when_second_parameter_is_not_a_number(
            self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals', 'test').error()

    def test_icontains_should_return_an_error_when_count_is_incomplete(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals', '1', 'COUNT').error()

    def test_icontains_should_return_an_error_when_count_amount_is_negative(
            self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals', '1', 'COUNT', '-1').error()

    def test_icontains_should_return_an_error_when_count_amount_is_not_a_number(
            self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals', '1', 'COUNT', 'test').error()

    def test_icontains_should_return_empty_when_key_is_empty(self):
        self.env.cmd('FLUSHALL')
        self.env.expect('icontains', 'intervals', '0').equal([])

    def test_icontains_should_return_one_member(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.expect('icontains', 'intervals',
                        '0').equal([[b'i1', b'1', b'0', b'1', b'1']])

    def test_icontains_should_return_empty_when_value_is_out_of_interval(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.expect('icontains', 'intervals', '2').equal([])

    def test_icontains_should_return_empty_when_value_is_equals_to_upper_and_upper_is_excluded(
            self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1[', 'i1')
        self.env.expect('icontains', 'intervals', '1').equal([])

    def test_icontains_should_return_empty_when_value_is_equals_to_lower_and_lower_is_excluded(
            self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', ']0,1', 'i1')
        self.env.expect('icontains', 'intervals', '0').equal([])

    def test_icontains_should_return_2_members(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,3', 'i1')
        self.env.cmd('iadd', 'intervals', '0,5', 'i2')
        res = self.env.cmd('icontains', 'intervals', '2')
        self.env.assertEqual(len(res), 2)
        self.env.assertContains([b'i2', b'1', b'0', b'1', b'5'], res)
        self.env.assertContains([b'i1', b'1', b'0', b'1', b'3'], res)

    def test_icontains_should_return_one_interval(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,10', 'i1')
        self.env.cmd('iadd', 'intervals', '5,15', 'i2')
        res = self.env.cmd('icontains', 'intervals', '7', 'COUNT', '1')
        self.env.assertEqual(len(res), 1)

    def test_icontains_should_return_two_intervals(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,10', 'i1')
        self.env.cmd('iadd', 'intervals', '5,15', 'i2')
        self.env.cmd('iadd', 'intervals', '5,20', 'i3')
        res = self.env.cmd('icontains', 'intervals', '7', 'COUNT', '2')
        self.env.assertEqual(len(res), 2)

    def test_icontains_should_store_result_in_an_other_key(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '0,2', 'i2')
        self.env.cmd('iadd', 'intervals', '0,3', 'i3')
        self.env.cmd('iadd', 'intervals', '0,4', 'i4')
        self.env.cmd('icontains', 'intervals', '3', 'STORE', 'destination')
        res = self.env.cmd('icard', 'destination')
        self.env.assertEqual(res, 2)

    def test_icontains_should_store_result_in_an_other_key_only_1_member(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'intervals', '0,2', 'i2')
        self.env.cmd('iadd', 'intervals', '0,3', 'i3')
        self.env.cmd('iadd', 'intervals', '0,4', 'i4')
        self.env.cmd('icontains', 'intervals', '3', 'COUNT', '1', 'STORE',
                     'destination')
        res = self.env.cmd('icard', 'destination')
        self.env.assertEqual(res, 1)

    def test_icontains_should_return_an_error_if_store_key_is_not_an_interval_set(
            self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('set', 'foo', 'bar')
        self.env.expect('icontains', 'intervals', '3', 'COUNT', '1', 'STORE',
                        'foo').error()

    def test_icontains_should_store_result_in_an_other_not_empty_interval_set(
            self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'destination', '2,3', 'i2')
        self.env.cmd('icontains', 'intervals', '1', 'STORE', 'destination')
        res = self.env.cmd('icard', 'destination')
        self.env.assertEqual(res, 2)

    def test_icontains_should_update_key_in_other_interval_set(self):
        self.env.cmd('FLUSHALL')
        self.env.cmd('iadd', 'intervals', '0,1', 'i1')
        self.env.cmd('iadd', 'destination', '2,3', 'i1')
        self.env.expect('icontains', 'intervals', '1', 'STORE',
                        'destination').equal(1)
        res = self.env.cmd('iget', 'destination', 'i1')
        self.env.assertEqual(res, [[b'i1', b'1', b'0', b'1', b'1']])
Exemplo n.º 18
0
class testStepsWrongArgs:
    def __init__(self):
        self.env = Env()
        self.conn = getConnectionByEnv(self.env)

    def testRegisterWithWrongRegexType(self):
        self.env.expect('rg.pyexecute', 'GB().register(1)').error().contains(
            'regex argument must be a string')

    def testRegisterWithWrongEventKeysTypesList(self):
        self.env.expect(
            'rg.pyexecute',
            'GB().register(regex="*", eventTypes=1)').error().contains(
                'not iterable')
        self.env.expect(
            'rg.pyexecute',
            'GB().register(regex="*", keyTypes=1)').error().contains(
                'not iterable')
        self.env.expect(
            'rg.pyexecute',
            'GB().register(regex="*", eventTypes=[1, 2, 3])').error().contains(
                'type is not string')
        self.env.expect(
            'rg.pyexecute',
            'GB().register(regex="*", keyTypes=[1, 2, 3])').error().contains(
                'type is not string')

    def testGearsBuilderWithWrongBuilderArgType(self):
        self.env.expect(
            'rg.pyexecute',
            'GB(1).run()').error().contains('reader argument must be a string')

    def testExecuteWithWrongCommandArgType(self):
        self.env.expect('rg.pyexecute', 'execute(1)').error().contains(
            'the given command must be a string')

    def testTimeEventWithWrongCallbackArg(self):
        self.env.expect(
            'rg.pyexecute',
            'registerTE(2, 2)').error().contains('callback must be a function')

    def testTimeEventWithWrongTimeArg(self):
        self.env.expect('rg.pyexecute',
                        'registerTE("2", lambda x: str(x))').error().contains(
                            'time argument must be a long')

    def testMapWrongArgs(self):
        self.env.expect(
            'rg.pyexecute',
            'GB().map(1, 2).run()').error().contains('wrong number of args')
        self.env.expect('rg.pyexecute', 'GB().map(1).run()').error().contains(
            'argument must be a function')

    def testFilterWrongArgs(self):
        self.env.expect(
            'rg.pyexecute',
            'GB().filter(1, 2).run()').error().contains('wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().filter(1).run()').error().contains(
                            'argument must be a function')

    def testGroupByWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().groupby(1, 2, 3).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().groupby(1, 2).run()').error().contains(
                            'argument must be a function')

    def testBatchGroupByWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().batchgroupby(1, 2, 3).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().batchgroupby(1, 2).run()').error().contains(
                            'argument must be a function')

    def testCollectWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().collect(1, 2, 3).run()').error().contains(
                            'wrong number of args')

    def testForEachWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().foreach(1, 2).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().foreach(1).run()').error().contains(
                            'argument must be a function')

    def testRepartitionWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().repartition(1, 2).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().repartition(1).run()').error().contains(
                            'argument must be a function')

    def testLimitWrongArgs(self):
        self.env.expect(
            'rg.pyexecute',
            'GB().limit().run()').error().contains('wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().limit(1, 2, 3).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().limit("awdwada").run()').error().contains(
                            'argument must be a number')
        self.env.expect('rg.pyexecute',
                        'GB().limit(1, "kakaka").run()').error().contains(
                            'argument must be a number')

    def testAccumulateWrongArgs(self):
        self.env.expect('rg.pyexecute',
                        'GB().accumulate(1, 2).run()').error().contains(
                            'wrong number of args')
        self.env.expect('rg.pyexecute',
                        'GB().accumulate(1).run()').error().contains(
                            'argument must be a function')

    def testAvgWrongArgs(self):
        self.env.expect('rg.pyexecute', 'GB().avg(1).run()').error().contains(
            'argument must be a function')

    def testPyReaderWithWrongArgument(self):
        self.env.expect('rg.pyexecute',
                        'GB("PythonReader").run("*")').error().contains(
                            'pyreader argument must be a functio')
        self.env.expect('rg.pyexecute',
                        'GB("PythonReader").run()').error().contains(
                            'pyreader argument must be a functio')
        self.env.expect('rg.pyexecute',
                        'GB("PythonReader", "*").run()').error().contains(
                            'pyreader argument must be a functio')
        self.env.expect(
            'rg.pyexecute', 'GB("PythonReader", shardReaderCallback).run("*")'
        ).error().contains('pyreader argument must be a functio')

    def testStreamReaderBadFromIdFormat(self):
        self.conn.execute_command('XADD', 's', '*', 'foo', 'bar', 'foo1',
                                  'bar1')
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").run("s", fromId="test")').equal([
                [],
                ['ERR Invalid stream ID specified as stream command argument']
            ])

    def testStreamReaderBadFromId(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").run("s", fromId=1)').error()

    def testKeysReaderNoScanBadValue(self):
        self.env.expect('rg.pyexecute', 'GearsBuilder().run(noScan=1)').error()

    def testKeysReaderReadValueBadValue(self):
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().run(readValue=1)').error()

    def testOnRegisteredBadValue(self):
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().register(onRegistered=1)').error()

    def testRegisterModeBadValue(self):
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().register(mode=1)').error()
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().register(mode="test")').error()

    def testRegisterPrefixBadValue(self):
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().register(prefix=1)').error()

    def testStreamReaderBatchBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(batch="test")').error()

    def testStreamReaderBatchBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(batch="test")').error()

    def testStreamReaderDurationBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(duration="test")').error()

    def testStreamReaderOnFailedPolicyBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(onFailedPolicy="test")'
        ).error()
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(onFailedPolicy=1)').error()

    def testStreamReaderOnFailedRetryIntervalBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(onFailedRetryInterval="test")'
        ).error()

    def testStreamReaderTrimStreamBadValue(self):
        self.env.expect(
            'rg.pyexecute',
            'GearsBuilder("StreamReader").register(trimStream="test")').error(
            )

    def testKeysReadeReadValueBadValue(self):
        self.env.expect('rg.pyexecute',
                        'GearsBuilder().register(readValue=1)').error()

    def testKeysOnlyReadeBadCount(self):
        res = self.env.cmd(
            'rg.pyexecute',
            'GearsBuilder("KeysOnlyReader").run(count="noNunber")')
        self.env.assertContains('value is not an integer', res[1][0])

    def testKeysOnlyReadeBadPatternGenerator(self):
        res = self.env.cmd(
            'rg.pyexecute',
            'GearsBuilder("KeysOnlyReader").run(patternGenerator="adwaw")')
        self.env.assertContains('object is not callable', res[1][0])
Exemplo n.º 19
0
class testProcedures(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global redis_con
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph(GRAPH_ID, redis_con)
        self.populate_graph()

    def populate_graph(self):
        if redis_con.exists(GRAPH_ID):
            return

        edge = Edge(node1, 'goWellWith', node5)
        redis_graph.add_node(node1)
        redis_graph.add_node(node2)
        redis_graph.add_node(node3)
        redis_graph.add_node(node4)
        redis_graph.add_node(node5)
        redis_graph.add_edge(edge)
        redis_graph.commit()

        # Create full-text index.
        redis_graph.call_procedure("db.idx.fulltext.createNodeIndex", 'fruit', 'name')

    # Compares two nodes based on their properties.
    def _compareNodes(self, a, b):
        return a.properties == b.properties

    # Make sure given item is found within resultset.
    def _inResultSet(self, item, resultset):
        for i in range(len(resultset)):
            result = resultset[i][0]
            if self._compareNodes(item, result):
                return True
        return False

    # Issue query and validates resultset.
    def queryAndValidate(self, query, expected_results, query_params={}):
        actual_resultset = redis_graph.query(query, query_params).result_set
        self.env.assertEquals(len(actual_resultset), len(expected_results))
        for i in range(len(actual_resultset)):
            self.env.assertTrue(self._inResultSet(expected_results[i], actual_resultset))
    
    # Call procedure, omit yield, expecting all procedure outputs to
    # be included in result-set.
    def test01_no_yield(self):
        actual_result = redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1")
        assert(len(actual_result.result_set) == 1)

        header = actual_result.header
        data = actual_result.result_set[0]
        assert(header[0][1] == 'node')
        assert(data[0] is not None)

    # Call procedure specify different outputs.
    def test02_yield(self):
        actual_result = redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["node"])
        assert(len(actual_result.result_set) == 1)

        header = actual_result.header
        data = actual_result.result_set[0]
        assert(header[0][1] == 'node')
        assert(data[0] is not None)

        # Yield an unknown output.
        # Expect an error when trying to use an unknown procedure output.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["unknown"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
        
        # Yield the same output multiple times.
        # Expect an error when trying to use the same output multiple times.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "Orange1", y=["node", "node"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
    
    def test03_arguments(self):
        # Omit arguments.
        # Expect an error when trying to omit arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass
        
        # Omit arguments, queryNodes expecting 2 argument, provide 1.
        # Expect an error when trying to omit arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "arg1")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

        # Overload arguments.
        # Expect an error when trying to send too many arguments.
        try:
            redis_graph.call_procedure("db.idx.fulltext.queryNodes", "fruit", "query", "fruit", "query", y=["node"])
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

    # Test procedure call while mixing a number of addition clauses.
    def test04_mix_clauses(self):
        query_params = {'prefix': 'Orange*'}
        # CALL + RETURN.

        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node"""
        expected_results = [node4, node2, node3, node1]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # The combination of CALL and WHERE currently creates a syntax error in libcypher-parser.
        # CALL + WHERE + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    """
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    LIMIT 2"""
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # CALL + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    """
        expected_results = [node1, node2, node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    """
        expected_results = [node2, node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    LIMIT 2
                    """
        expected_results = [node1, node2]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1
                    """
        expected_results = [node2]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value"""
        expected_results = [node3, node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    LIMIT 1"""
        expected_results = [node3]
        self.queryAndValidate(query, expected_results, query_params=query_params)


        # CALL + WHERE + RETURN + ORDER + SKIP + LIMIT.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
                    WHERE node.value > 2
                    RETURN node
                    ORDER BY node.value
                    SKIP 1
                    LIMIT 1"""
        expected_results = [node4]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # CALL + MATCH + RETURN.
        query = """CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node
            MATCH (node)-[]->(z)
            RETURN z"""
        expected_results = [node5]
        self.queryAndValidate(query, expected_results, query_params=query_params)

        # UNWIND + CALL + RETURN.
        query = """UNWIND([1,2]) AS x CALL db.idx.fulltext.queryNodes('fruit', $prefix) YIELD node RETURN node"""
        expected_results = [node4, node2, node3, node1, node4, node2, node3, node1]
        self.queryAndValidate(query, expected_results, query_params=query_params)

    def test05_procedure_labels(self):
        actual_resultset = redis_graph.call_procedure("db.labels").result_set
        expected_results = [["fruit"]]        
        self.env.assertEquals(actual_resultset, expected_results)
    
    def test06_procedure_relationshipTypes(self):
        actual_resultset = redis_graph.call_procedure("db.relationshipTypes").result_set
        expected_results = [["goWellWith"]]
        self.env.assertEquals(actual_resultset, expected_results)
    
    def test07_procedure_propertyKeys(self):
        actual_resultset = redis_graph.call_procedure("db.propertyKeys").result_set
        expected_results = [["name"], ["value"]]
        self.env.assertEquals(actual_resultset, expected_results)

    def test08_procedure_fulltext_syntax_error(self):
        try:
            query = """CALL db.idx.fulltext.queryNodes('fruit', 'Orange || Apple') YIELD node RETURN node"""
            redis_graph.query(query)
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

    def test09_procedure_lookup(self):
        try:
            redis_graph.call_procedure("dB.LaBeLS")
        except redis.exceptions.ResponseError:
            # This should not cause an error
            self.env.assertFalse(1)
            pass

        try:
            # looking for a non existing procedure
            redis_graph.call_procedure("db.nonExistingProc")
            self.env.assertFalse(1)
        except redis.exceptions.ResponseError:
            # Expecting an error.
            pass

        try:
            redis_graph.call_procedure("db.IDX.FulLText.QueRyNoDes", "fruit", "or")
        except redis.exceptions.ResponseError:
            # This should not cause an error
            self.env.assertFalse(1)
            pass

    def test10_procedure_get_all_procedures(self):
        actual_resultset = redis_graph.call_procedure("dbms.procedures").result_set

        # The following two procedure are a part of the expected results
        expected_result = [["db.labels", "READ"], ["db.idx.fulltext.createNodeIndex", "WRITE"],
                           ["db.propertyKeys", "READ"], ["dbms.procedures", "READ"], ["db.relationshipTypes", "READ"],
                           ["algo.BFS", "READ"], ["algo.pageRank", "READ"], ["db.idx.fulltext.queryNodes", "READ"],
                           ["db.idx.fulltext.drop", "WRITE"]]
        for res in expected_result:
            self.env.assertContains(res, actual_resultset)

    def test11_procedure_indexes(self):
        # Verify that the full-text index is reported properly.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties").result_set
        expected_results = [["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Add an exact-match index to a different property on the same label..
        result = redis_graph.query("CREATE INDEX ON :fruit(other_property)")
        self.env.assertEquals(result.indices_created, 1)

        # Verify that all indexes are reported.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties RETURN type, label, properties ORDER BY type").result_set
        expected_results = [["exact-match", "fruit", ["other_property"]],
                            ["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Add an exact-match index to the full-text indexed property on the same label..
        result = redis_graph.query("CREATE INDEX ON :fruit(name)")
        self.env.assertEquals(result.indices_created, 1)

        # Verify that all indexes are reported.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD type, label, properties RETURN type, label, properties ORDER BY type").result_set
        expected_results = [["exact-match", "fruit", ["other_property", "name"]],
                            ["full-text", "fruit", ["name"]]]
        self.env.assertEquals(actual_resultset, expected_results)

        # Validate the results when yielding only one element.
        actual_resultset = redis_graph.query("CALL db.indexes() YIELD label").result_set
        expected_results = [["fruit"],
                            ["fruit"]]
        self.env.assertEquals(actual_resultset, expected_results)
Exemplo n.º 20
0
class testConfig(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global redis_con
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph("config", redis_con)

    def test01_config_get(self):
        global redis_graph

        # Try reading 'QUERY_MEM_CAPACITY' from config
        config_name = "QUERY_MEM_CAPACITY"
        response = redis_con.execute_command("GRAPH.CONFIG GET " + config_name)
        expected_response = [config_name, 0] # capacity=QUERY_MEM_CAPACITY_UNLIMITED
        self.env.assertEqual(response, expected_response)

        # Try reading all configurations
        config_name = "*"
        response = redis_con.execute_command("GRAPH.CONFIG GET " + config_name)
        # At least 9 configurations should be reported
        self.env.assertGreaterEqual(len(response), 9)

    def test02_config_get_invalid_name(self):
        global redis_graph

        # Ensure that getter fails on invalid parameters appropriately
        fake_config_name = "FAKE_CONFIG_NAME"

        try:
            redis_con.execute_command("GRAPH.CONFIG GET " + fake_config_name)
            assert(False)
        except redis.exceptions.ResponseError as e:
            # Expecting an error.
            assert("Unknown configuration field" in str(e))
            pass

    def test03_config_set(self):
        global redis_graph

        config_name = "RESULTSET_SIZE"
        config_value = 3

        # Set configuration
        response = redis_con.execute_command("GRAPH.CONFIG SET %s %d" % (config_name, config_value))
        self.env.assertEqual(response, "OK")

        # Make sure config been updated.
        response = redis_con.execute_command("GRAPH.CONFIG GET " + config_name)
        expected_response = [config_name, config_value]
        self.env.assertEqual(response, expected_response)

        config_name = "QUERY_MEM_CAPACITY"
        config_value = 1<<20 # 1MB

        # Set configuration
        response = redis_con.execute_command("GRAPH.CONFIG SET %s %d" % (config_name, config_value))
        self.env.assertEqual(response, "OK")

        # Make sure config been updated.
        response = redis_con.execute_command("GRAPH.CONFIG GET " + config_name)
        expected_response = [config_name, config_value]
        self.env.assertEqual(response, expected_response)

    def test04_config_set_multi(self):
        # Set multiple configuration values
        response = redis_con.execute_command("GRAPH.CONFIG SET RESULTSET_SIZE 3 QUERY_MEM_CAPACITY 100")
        self.env.assertEqual(response, "OK")

        # Make sure both values been updated
        names = ["RESULTSET_SIZE", "QUERY_MEM_CAPACITY"]
        values = [3, 100]
        for name, val in zip(names, values):
            response = redis_con.execute_command("GRAPH.CONFIG GET %s" % name)
            expected_response = [name, val]
            self.env.assertEqual(response, expected_response)

    def test05_config_set_invalid_multi(self):
        # Get current configuration
        prev_conf = redis_con.execute_command("GRAPH.CONFIG GET *")

        try:
            # Set multiple configuration values, VKEY_MAX_ENTITY_COUNT is NOT
            # a runtime configuration, expecting this command to fail
            response = redis_con.execute_command("GRAPH.CONFIG SET QUERY_MEM_CAPACITY 150 VKEY_MAX_ENTITY_COUNT 40")
            assert(False)
        except redis.exceptions.ResponseError as e:
            # Expecting an error.
            assert("Field can not be re-configured" in str(e))

        try:
            # Set multiple configuration values, FAKE_CONFIG_NAME is NOT a valid
            # configuration, expecting this command to fail
            response = redis_con.execute_command("GRAPH.CONFIG SET QUERY_MEM_CAPACITY 150 FAKE_CONFIG_NAME 40")
            assert(False)
        except redis.exceptions.ResponseError as e:
            # Expecting an error.
            assert("Unknown configuration field" in str(e))

        try:
            # Set multiple configuration values, -1 is not a valid value for
            # MAX_QUEUED_QUERIES, expecting this command to fail
            response = redis_con.execute_command("GRAPH.CONFIG SET QUERY_MEM_CAPACITY 150 MAX_QUEUED_QUERIES -1")
            assert(False)
        except redis.exceptions.ResponseError as e:
            # Expecting an error.
            assert("Failed to set config value" in str(e))

        # make sure configuration wasn't modified
        current_conf = redis_con.execute_command("GRAPH.CONFIG GET *")
        self.env.assertEqual(prev_conf, current_conf)

    def test06_config_set_invalid_name(self):

        # Ensure that setter fails on unknown configuration field
        fake_config_name = "FAKE_CONFIG_NAME"

        try:
            redis_con.execute_command("GRAPH.CONFIG SET " + fake_config_name + " 5")
            assert(False)
        except redis.exceptions.ResponseError as e:
            # Expecting an error.
            assert("Unknown configuration field" in str(e))
            pass

    def test07_config_invalid_subcommand(self):

        # Ensure failure on invalid sub-command, e.g. GRAPH.CONFIG DREP...
        config_name = "RESULTSET_SIZE"
        try:
            response = redis_con.execute_command("GRAPH.CONFIG DREP " + config_name + " 3")
            assert(False)
        except redis.exceptions.ResponseError as e:
            assert("Unknown subcommand for GRAPH.CONFIG" in str(e))
            pass

    def test08_config_reset_to_defaults(self):
        # Revert memory limit to default
        response = redis_con.execute_command("GRAPH.CONFIG SET QUERY_MEM_CAPACITY 0")
        self.env.assertEqual(response, "OK")

        # Change timeout value from default
        response = redis_con.execute_command("GRAPH.CONFIG SET TIMEOUT 10")
        self.env.assertEqual(response, "OK")

        # Make sure config been updated.
        response = redis_con.execute_command("GRAPH.CONFIG GET TIMEOUT")
        expected_response = ["TIMEOUT", 10]
        self.env.assertEqual(response, expected_response)

        query = """UNWIND range(1,1000000) AS v RETURN COUNT(v)"""
        # Ensure long-running query triggers a timeout
        try:
            result = redis_graph.query(query)
            assert(False)
        except redis.exceptions.ResponseError as e:
            self.env.assertContains("Query timed out", str(e))

        # Revert timeout to unlimited
        response = redis_con.execute_command("GRAPH.CONFIG SET TIMEOUT 0")
        self.env.assertEqual(response, "OK")

        # Make sure config been updated.
        response = redis_con.execute_command("GRAPH.CONFIG GET TIMEOUT")
        expected_response = ["TIMEOUT", 0]
        self.env.assertEqual(response, expected_response)

        # Issue long-running query to validate the reconfiguration
        result = redis_graph.query(query)
        self.env.assertEqual(result.result_set[0][0], 1000000)

        # Change resultset_size from default
        response = redis_con.execute_command("GRAPH.CONFIG SET RESULTSET_SIZE 2")
        self.env.assertEqual(response, "OK")

        # Validate modified resultset_size
        result = redis_graph.query("UNWIND range(1, 10) AS v RETURN v")
        self.env.assertEqual(len(result.result_set), 2)

        # Revert resultset_size to unlimited with a negative argument
        response = redis_con.execute_command("GRAPH.CONFIG SET RESULTSET_SIZE -100")
        self.env.assertEqual(response, "OK")

        # Make sure resultset_size has been updated to unlimited.
        response = redis_con.execute_command("GRAPH.CONFIG GET RESULTSET_SIZE")
        expected_response = ["RESULTSET_SIZE", -1]
        self.env.assertEqual(response, expected_response)

    def test09_set_invalid_values(self):
        # The run-time configurations supported by RedisGraph are:
        # MAX_QUEUED_QUERIES
        # TIMEOUT
        # QUERY_MEM_CAPACITY
        # DELTA_MAX_PENDING_CHANGES
        # RESULTSET_SIZE

        # Validate that attempting to set these configurations to
        # invalid values fails
        try:
            # MAX_QUEUED_QUERIES must be a positive value
            redis_con.execute_command("GRAPH.CONFIG SET MAX_QUEUED_QUERIES 0")
            assert(False)
        except redis.exceptions.ResponseError as e:
            assert("Failed to set config value MAX_QUEUED_QUERIES to 0" in str(e))
            pass

        # TIMEOUT, QUERY_MEM_CAPACITY, and DELTA_MAX_PENDING_CHANGES must be
        # non-negative values, 0 resets to default
        for config in ["TIMEOUT", "QUERY_MEM_CAPACITY", "DELTA_MAX_PENDING_CHANGES"]:
            try:
                redis_con.execute_command("GRAPH.CONFIG SET %s -1" % config)
                assert(False)
            except redis.exceptions.ResponseError as e:
                assert("Failed to set config value %s to -1" % config in str(e))
                pass

        # No configuration can be set to a string
        for config in ["MAX_QUEUED_QUERIES", "TIMEOUT", "QUERY_MEM_CAPACITY",
                       "DELTA_MAX_PENDING_CHANGES", "RESULTSET_SIZE"]:
            try:
                redis_con.execute_command("GRAPH.CONFIG SET %s invalid" % config)
                assert(False)
            except redis.exceptions.ResponseError as e:
                assert(("Failed to set config value %s to invalid" % config) in str(e))
Exemplo n.º 21
0
class testGraphCreationFlow(FlowTestsBase):
    def __init__(self):
        self.env = Env(decodeResponses=True)
        global redis_graph
        redis_con = self.env.getConnection()
        redis_graph = Graph(GRAPH_ID, redis_con)

    def test01_create_return(self):
        query = """CREATE (a:person {name:'A'}), (b:person {name:'B'})"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.nodes_created, 2)

        query = """MATCH (src:person) CREATE (src)-[e:knows]->(dest {name:'C'}) RETURN src,e,dest ORDER BY ID(src) DESC LIMIT 1"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.relationships_created, 2)
        self.env.assertEquals(len(result.result_set), 1)
        self.env.assertEquals(result.result_set[0][0].properties['name'], 'B')

    def test02_create_from_prop(self):
        query = """MATCH (p:person)-[e:knows]->() CREATE (c:clone {doublename: p.name + toLower(p.name), source_of: TYPE(e)}) RETURN c.doublename, c.source_of ORDER BY c.doublename"""
        result = redis_graph.query(query)
        expected_result = [['Aa', 'knows'], ['Bb', 'knows']]

        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.properties_set, 4)
        self.env.assertEquals(result.result_set, expected_result)

    def test03_create_from_projection(self):
        query = """UNWIND [10,20,30] AS x CREATE (p:person {age:x}) RETURN p.age ORDER BY p.age"""
        result = redis_graph.query(query)
        expected_result = [[10], [20], [30]]
        self.env.assertEquals(result.nodes_created, 3)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.result_set, expected_result)

        query = """UNWIND ['Vancouver', 'Portland', 'Calgary'] AS city CREATE (p:person {birthplace: city}) RETURN p.birthplace ORDER BY p.birthplace"""
        result = redis_graph.query(query)
        expected_result = [['Calgary'], ['Portland'], ['Vancouver']]
        self.env.assertEquals(result.nodes_created, 3)
        self.env.assertEquals(result.properties_set, 3)
        self.env.assertEquals(result.result_set, expected_result)

    def test04_create_with_null_properties(self):
        query = """CREATE (a:L {v1: NULL, v2: 'prop'}) RETURN a"""
        result = redis_graph.query(query)
        node = Node(label="L", properties={"v2": "prop"})
        expected_result = [[node]]

        self.env.assertEquals(result.labels_added, 1)
        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.properties_set, 1)
        self.env.assertEquals(result.result_set, expected_result)

        # Create 2 new nodes, one with no properties and one with a property 'v'
        query = """CREATE (:M), (:M {v: 1})"""
        redis_graph.query(query)

        # Verify that a MATCH...CREATE accesses the property correctly.
        query = """MATCH (m:M) WITH m ORDER BY m.v DESC CREATE ({v: m.v})"""
        result = redis_graph.query(query)
        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.properties_set, 1)

    def test05_create_with_property_reference(self):
        # Skip this test if running under Valgrind, as it causes a memory leak.
        if self.env.envRunner.debugger is not None:
            self.env.skip()

        # Queries that reference properties before they have been created should emit an error.
        try:
            query = """CREATE (a {val: 2}), (b {val: a.val})"""
            redis_graph.query(query)
            self.env.assertTrue(False)
        except redis.exceptions.ResponseError as e:
            self.env.assertIn("undefined property", str(e))

    def test06_create_project_volatile_value(self):
        # The path e is volatile; verify that it can be projected after entity creation.
        query = """MATCH ()-[e*]->() CREATE (:L) WITH e RETURN 5"""
        result = redis_graph.query(query)
        expected_result = [[5], [5]]

        self.env.assertEquals(result.nodes_created, 2)
        self.env.assertEquals(result.result_set, expected_result)

        query = """UNWIND [1, 2] AS val WITH collect(val) AS arr CREATE (:L) RETURN arr"""
        result = redis_graph.query(query)
        expected_result = [[[1, 2]]]

        self.env.assertEquals(result.nodes_created, 1)
        self.env.assertEquals(result.result_set, expected_result)

    # Fail when a property is a complex type nested within an array type
    def test07_create_invalid_complex_type_in_array(self):
        # Test combinations of invalid types with nested and top-level arrays
        # Invalid types are NULL, maps, nodes, edges, and paths
        queries = ["CREATE (a), (b) SET a.v = [b]",
                   "CREATE (a {v: ['str', [1, NULL]]})",
                   "CREATE (a {v: [[{k: 'v'}]]})",
                   "CREATE (a:L {v: [e]})-[e:R]->(:L)"]
        for query in queries:
            try:
                redis_graph.query(query)
                self.env.assertTrue(False)
            except redis.exceptions.ResponseError as e:
                self.env.assertContains("Property values can only be of primitive types or arrays of primitive types", str(e))
Exemplo n.º 22
0
class testConcurrentQueryFlow(FlowTestsBase):
    def __init__(self):
        # skip test if we're running under Valgrind
        if Env().envRunner.debugger is not None:
            Env().skip(
            )  # valgrind is not working correctly with multi processing

        self.env = Env(decodeResponses=True)
        global graphs
        graphs = []
        for i in range(0, CLIENT_COUNT):
            redis_con = self.env.getConnection()
            graphs.append(Graph(GRAPH_ID, redis_con))
        self.populate_graph()

    def populate_graph(self):
        nodes = {}
        graph = graphs[0]

        # Create entities
        for p in people:
            node = Node(label="person", properties={"name": p})
            graph.add_node(node)
            nodes[p] = node

        # Fully connected graph
        for src in nodes:
            for dest in nodes:
                if src != dest:
                    edge = Edge(nodes[src], "know", nodes[dest])
                    graph.add_edge(edge)

        graph.commit()

    # Count number of nodes in the graph
    def test01_concurrent_aggregation(self):
        q = """MATCH (p:person) RETURN count(p)"""
        queries = [q] * CLIENT_COUNT
        run_concurrent(self.env, queries, query_aggregate)

    # Concurrently get neighbors of every node.
    def test02_retrieve_neighbors(self):
        q = """MATCH (p:person)-[know]->(n:person) RETURN n.name"""
        queries = [q] * CLIENT_COUNT
        run_concurrent(self.env, queries, query_neighbors)

    # Concurrent writes
    def test_03_concurrent_write(self):
        queries = [
            """CREATE (c:country {id:"%d"})""" % i for i in range(CLIENT_COUNT)
        ]
        run_concurrent(self.env, queries, query_write)

    # Try to delete graph multiple times.
    def test_04_concurrent_delete(self):
        pool = Pool(nodes=CLIENT_COUNT)

        # invoke queries
        assertions = pool.map(delete_graph, graphs)

        # Exactly one thread should have successfully deleted the graph.
        self.env.assertEquals(assertions.count(True), 1)

    # Try to delete a graph while multiple queries are executing.
    def test_05_concurrent_read_delete(self):
        redis_con = self.env.getConnection()

        ##############################################################################################
        # Delete graph via Redis DEL key.
        ##############################################################################################
        self.populate_graph()
        pool = Pool(nodes=CLIENT_COUNT)

        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        queries = [q] * CLIENT_COUNT
        # invoke queries
        m = pool.amap(thread_run_query, graphs, queries)

        redis_con.delete(GRAPH_ID)

        # wait for processes to return
        m.wait()

        # get the results
        result = m.get()

        # validate result.
        self.env.assertTrue(all([r.result_set[0][0] == 900 for r in result]))

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

        ##############################################################################################
        # Delete graph via Redis FLUSHALL.
        ##############################################################################################
        self.populate_graph()
        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        queries = [q] * CLIENT_COUNT
        # invoke queries
        m = pool.amap(thread_run_query, graphs, queries)

        redis_con.flushall()

        # wait for processes to return
        m.wait()

        # get the results
        result = m.get()

        # validate result.
        self.env.assertTrue(all([r.result_set[0][0] == 900 for r in result]))

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

        ##############################################################################################
        # Delete graph via GRAPH.DELETE.
        ##############################################################################################
        self.populate_graph()
        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        queries = [q] * CLIENT_COUNT
        # invoke queries
        m = pool.amap(thread_run_query, graphs, queries)

        graphs[-1].delete()

        # wait for processes to return
        m.wait()

        # get the results
        result = m.get()

        # validate result.
        self.env.assertTrue(all([r.result_set[0][0] == 900 for r in result]))

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

    def test_06_concurrent_write_delete(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")

        pool = Pool(nodes=1)
        redis_con = self.env.getConnection()
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = pool.apipe(thread_run_query, graphs[0], heavy_write_query)
        redis_con.delete(GRAPH_ID)
        writer.wait()
        possible_exceptions = [
            "Encountered different graph value when opened key " + GRAPH_ID,
            "Encountered an empty key when opened key " + GRAPH_ID
        ]
        result = writer.get()
        if isinstance(result, str):
            self.env.assertContains(result, possible_exceptions)
        else:
            self.env.assertEquals(1000000, result.nodes_created)

    def test_07_concurrent_write_rename(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")

        pool = Pool(nodes=1)
        redis_con = self.env.getConnection()
        new_graph = GRAPH_ID + "2"
        # Create new empty graph with id GRAPH_ID + "2"
        redis_con.execute_command("GRAPH.QUERY", new_graph,
                                  """MATCH (n) return n""", "--compact")
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = pool.apipe(thread_run_query, graphs[0], heavy_write_query)
        redis_con.rename(GRAPH_ID, new_graph)
        writer.wait()
        # Possible scenarios:
        # 1. Rename is done before query is sent. The name in the graph context is new_graph, so when upon commit, when trying to open new_graph key, it will encounter an empty key since new_graph is not a valid key.
        #    Note: As from https://github.com/RedisGraph/RedisGraph/pull/820 this may not be valid since the rename event handler might actually rename the graph key, before the query execution.
        # 2. Rename is done during query executing, so when commiting and comparing stored graph context name (GRAPH_ID) to the retrived value graph context name (new_graph), the identifiers are not the same, since new_graph value is now stored at GRAPH_ID value.

        possible_exceptions = [
            "Encountered different graph value when opened key " + GRAPH_ID,
            "Encountered an empty key when opened key " + new_graph
        ]

        result = writer.get()
        if isinstance(result, str):
            self.env.assertContains(result, possible_exceptions)
        else:
            self.env.assertEquals(1000000, result.nodes_created)

    def test_08_concurrent_write_replace(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")

        pool = Pool(nodes=1)
        redis_con = self.env.getConnection()
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = pool.apipe(thread_run_query, graphs[0], heavy_write_query)
        set_result = redis_con.set(GRAPH_ID, "1")
        writer.wait()
        possible_exceptions = [
            "Encountered a non-graph value type when opened key " + GRAPH_ID,
            "WRONGTYPE Operation against a key holding the wrong kind of value"
        ]

        result = writer.get()
        if isinstance(result, str):
            # If the SET command attempted to execute while the CREATE query was running,
            # an exception should have been issued.
            self.env.assertContains(result, possible_exceptions)
        else:
            # Otherwise, both the CREATE query and the SET command should have succeeded.
            self.env.assertEquals(1000000, result.nodes_created)
            self.env.assertEquals(set_result, True)

    def test_09_concurrent_multiple_readers_after_big_write(self):
        # Test issue #890
        redis_graphs = []
        for i in range(0, CLIENT_COUNT):
            redis_con = self.env.getConnection()
            redis_graphs.append(Graph("G890", redis_con))
        redis_graphs[0].query(
            """UNWIND(range(0,999)) as x CREATE()-[:R]->()""")
        read_query = """MATCH (n)-[r:R]->(m) RETURN n, r, m"""

        queries = [read_query] * CLIENT_COUNT
        pool = Pool(nodes=CLIENT_COUNT)

        # invoke queries
        m = pool.amap(thread_run_query, redis_graphs, queries)

        # wait for processes to return
        m.wait()

        # get the results
        result = m.get()

        for i in range(CLIENT_COUNT):
            if isinstance(result[i], str):
                self.env.assertIsNone(result[i])
            else:
                self.env.assertEquals(1000, len(result[i].result_set))
Exemplo n.º 23
0
class testConcurrentQueryFlow(FlowTestsBase):
    def __init__(self):
        self.env = Env()
        global graphs
        graphs = []
        for i in range(0, CLIENT_COUNT):
            redis_con = self.env.getConnection()
            graphs.append(Graph(GRAPH_ID, redis_con))
        self.populate_graph()

    def populate_graph(self):
        nodes = {}
        graph = graphs[0]

        # Create entities
        for p in people:
            node = Node(label="person", properties={"name": p})
            graph.add_node(node)
            nodes[p] = node

        # Fully connected graph
        for src in nodes:
            for dest in nodes:
                if src != dest:
                    edge = Edge(nodes[src], "know", nodes[dest])
                    graph.add_edge(edge)

        graph.commit()

    # Count number of nodes in the graph
    def test01_concurrent_aggregation(self):
        q = """MATCH (p:person) RETURN count(p)"""
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=query_aggregate, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertTrue(assertions[i])

    # Concurrently get neighbors of every node.
    def test02_retrieve_neighbors(self):
        q = """MATCH (p:person)-[know]->(n:person) RETURN n.name"""
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=query_neighbors, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertTrue(assertions[i])

    # Concurrent writes
    def test_03_concurrent_write(self):
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            q = """CREATE (c:country {id:"%d"})""" % i
            t = threading.Thread(target=query_write, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertTrue(assertions[i])

    # Try to delete graph multiple times.
    def test_04_concurrent_delete(self):
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=delete_graph, args=(graph, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()

        # Exactly one thread should have successfully deleted the graph.
        self.env.assertEquals(assertions.count(True), 1)

    # Try to delete a graph while multiple queries are executing.
    def test_05_concurrent_read_delete(self):
        redis_con = self.env.getConnection()

        ##############################################################################################
        # Delete graph via Redis DEL key.
        ##############################################################################################
        self.populate_graph()
        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=thread_run_query, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        redis_con.delete(GRAPH_ID)

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertEquals(assertions[i].result_set[0][0], 900)

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

        ##############################################################################################
        # Delete graph via Redis FLUSHALL.
        ##############################################################################################
        self.populate_graph()
        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=thread_run_query, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        redis_con.flushall()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertEquals(assertions[i].result_set[0][0], 900)

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

        ##############################################################################################
        # Delete graph via GRAPH.DELETE.
        ##############################################################################################
        self.populate_graph()
        q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x"""
        threads = []
        for i in range(CLIENT_COUNT):
            graph = graphs[i]
            t = threading.Thread(target=thread_run_query, args=(graph, q, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        graphs[i].delete()

        # Wait for threads to return.
        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()
            self.env.assertEquals(assertions[i].result_set[0][0], 900)

        # Make sure Graph is empty, e.g. graph was deleted.
        resultset = graphs[0].query("MATCH (n) RETURN count(n)").result_set
        self.env.assertEquals(resultset[0][0], 0)

    def test_06_concurrent_write_delete(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")
        assertions[0] = None
        exceptions[0] = None

        redis_con = self.env.getConnection()
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = threading.Thread(target=thread_run_query,
                                  args=(graphs[0], heavy_write_query, 0))
        writer.setDaemon(True)
        writer.start()
        redis_con.delete(GRAPH_ID)
        writer.join()
        if exceptions[0] is not None:
            self.env.assertEquals(
                exceptions[0],
                "Encountered an empty key when opened key " + GRAPH_ID)
        else:
            self.env.assertEquals(1000000, assertions[0].nodes_created)

    def test_07_concurrent_write_rename(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")
        assertions[0] = None
        exceptions[0] = None

        redis_con = self.env.getConnection()
        new_graph = GRAPH_ID + "2"
        # Create new empty graph with id GRAPH_ID + "2"
        redis_con.execute_command("GRAPH.QUERY", new_graph,
                                  """MATCH (n) return n""", "--compact")
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = threading.Thread(target=thread_run_query,
                                  args=(graphs[0], heavy_write_query, 0))
        writer.setDaemon(True)
        writer.start()
        redis_con.rename(new_graph, GRAPH_ID)
        writer.join()
        # Possible scenarios:
        # 1. Rename is done before query is sent. The name in the graph context is new_graph, so when upon commit, when trying to open new_graph key, it will encounter an empty key since new_graph is not a valid key.
        #    Note: As from https://github.com/RedisGraph/RedisGraph/pull/820 this may not be valid since the rename event handler might actually rename the graph key, before the query execution.
        # 2. Rename is done during query executing, so when commiting and comparing stored graph context name (GRAPH_ID) to the retrived value graph context name (new_graph), the identifiers are not the same, since new_graph value is now stored at GRAPH_ID value.

        possible_exceptions = [
            "Encountered different graph value when opened key " + GRAPH_ID,
            "Encountered an empty key when opened key " + new_graph
        ]
        if exceptions[0] is not None:
            self.env.assertContains(exceptions[0], possible_exceptions)
        else:
            self.env.assertEquals(1000000, assertions[0].nodes_created)

    def test_08_concurrent_write_replace(self):
        # Test setup - validate that graph exists and possible results are None
        graphs[0].query("MATCH (n) RETURN n")
        assertions[0] = None
        exceptions[0] = None

        redis_con = self.env.getConnection()
        heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n)"""
        writer = threading.Thread(target=thread_run_query,
                                  args=(graphs[0], heavy_write_query, 0))
        writer.setDaemon(True)
        writer.start()
        set_result = redis_con.set(GRAPH_ID, "1")
        writer.join()
        if exceptions[0] is not None:
            # If the SET command attempted to execute while the CREATE query was running,
            # an exception should have been issued.
            self.env.assertEquals(
                exceptions[0],
                "Encountered a non-graph value type when opened key " +
                GRAPH_ID)
        else:
            # Otherwise, both the CREATE query and the SET command should have succeeded.
            self.env.assertEquals(1000000, assertions[0].nodes_created)
            self.env.assertEquals(set_result, True)

    def test_09_concurrent_multiple_readers_after_big_write(self):
        # Test issue #890
        global assertions
        global exceptions
        redis_con = self.env.getConnection()
        redis_graph = Graph("G890", redis_con)
        redis_graph.query("""UNWIND(range(0,999)) as x CREATE()-[:R]->()""")
        read_query = """MATCH (n)-[r:R]->(m) RETURN n, r, m"""
        assertions = [True] * CLIENT_COUNT
        exceptions = [None] * CLIENT_COUNT
        threads = []
        for i in range(CLIENT_COUNT):
            t = threading.Thread(target=thread_run_query,
                                 args=(redis_graph, read_query, i))
            t.setDaemon(True)
            threads.append(t)
            t.start()

        for i in range(CLIENT_COUNT):
            t = threads[i]
            t.join()

        for i in range(CLIENT_COUNT):
            self.env.assertIsNone(exceptions[i])
            self.env.assertEquals(1000, len(assertions[i].result_set))
Exemplo n.º 24
0
class testEntityUpdate(FlowTestsBase):
    def __init__(self):
        global graph
        self.env = Env(decodeResponses=True)
        graph = Graph('update', self.env.getConnection())

        # create a single node with attribute 'v'
        graph.query("CREATE ({v:1})")

    def test01_update_attribute(self):
        # update existing attribute 'v'
        result = graph.query("MATCH (n) SET n.v = 2")
        self.env.assertEqual(result.properties_set, 1)

    def test02_update_none_existing_attr(self):
        # introduce a new attribute 'x'
        result = graph.query("MATCH (n) SET n.x = 1")
        self.env.assertEqual(result.properties_set, 1)

    def test03_update_no_change(self):
        # setting 'x' to its current value
        result = graph.query("MATCH (n) SET n.x = 1")
        self.env.assertEqual(result.properties_set, 0)

        # setting both 'v' and 'x' to their current values
        result = graph.query("MATCH (n) SET n.v = 2, n.x = 1")
        self.env.assertEqual(result.properties_set, 0)

        # update 'v' to a new value, 'x' remains the same
        result = graph.query("MATCH (n) SET n.v = 1, n.x = 1")
        self.env.assertEqual(result.properties_set, 1)

        # update 'x' to a new value, 'v' remains the same
        result = graph.query("MATCH (n) SET n.v = 1, n.x = 2")
        self.env.assertEqual(result.properties_set, 1)

    def test04_update_remove_attribute(self):
        # remove the 'x' attribute
        result = graph.query("MATCH (n) SET n.x = NULL")
        self.env.assertEqual(result.properties_set, 1)

    def test05_update_from_projection(self):
        result = graph.query(
            "MATCH (n) UNWIND ['Calgary'] as city_name SET n.name = city_name RETURN n.v, n.name"
        )
        expected_result = [[1, 'Calgary']]
        self.env.assertEqual(result.properties_set, 1)
        self.env.assertEqual(result.result_set, expected_result)

    # Set the entity's properties to an empty map
    def test06_replace_property_map(self):
        empty_node = Node()
        result = graph.query("MATCH (n) SET n = {} RETURN n")
        expected_result = [[empty_node]]
        # The node originally had 2 properties, 'name' and 'city_name'
        self.env.assertEqual(result.properties_set, 2)
        self.env.assertEqual(result.result_set, expected_result)

    # Update the entity's properties by setting a specific property and merging property maps
    def test07_update_property_map(self):
        node = Node(properties={"v": 1, "v2": 2})
        result = graph.query("MATCH (n) SET n.v = 1, n += {v2: 2} RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.properties_set, 2)
        self.env.assertEqual(result.result_set, expected_result)

    # Replacement maps overwrite existing properties and previous SETs but do not modify subsequent non-replacement SETs
    def test08_multiple_updates_to_property_map(self):
        node = Node(properties={"v": 1, "v2": 2, "v4": 4})
        result = graph.query(
            "MATCH (n) SET n.v3 = 3, n = {v: 1}, n += {v2: 2}, n.v4 = 4 RETURN n"
        )
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # MERGE updates should support the same operations as SET updates
    def test09_merge_update_map(self):
        node = Node(properties={"v": 5})
        result = graph.query(
            "MERGE (n {v: 1}) ON MATCH SET n = {}, n.v = 5 RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Update properties with a map retrieved by alias
    def test10_property_map_from_identifier(self):
        # Overwrite existing properties
        node = Node(properties={"v2": 10})
        result = graph.query(
            "WITH {v2: 10} as props MATCH (n) SET n = props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

        # Merge property maps
        node = Node(properties={"v1": True, "v2": 10})
        result = graph.query(
            "WITH {v1: True} as props MATCH (n) SET n += props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Update properties with a map retrieved from a parameter
    def test11_property_map_from_parameter(self):
        # Overwrite existing properties
        node = Node(properties={"v2": 10})
        result = graph.query(
            "CYPHER props={v2: 10} MATCH (n) SET n = $props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

        # Merge property maps
        node = Node(properties={"v1": True, "v2": 10})
        result = graph.query(
            "CYPHER props={v1: true} MATCH (n) SET n += $props RETURN n")
        expected_result = [[node]]
        self.env.assertEqual(result.result_set, expected_result)

    # Fail update an entity property when left hand side is not alias
    def test12_fail_update_property_of_non_alias_enetity(self):
        try:
            graph.query("MATCH P=() SET nodes(P).prop = 1 RETURN nodes(P)")
            self.env.assertTrue(False)
        except ResponseError as e:
            self.env.assertContains(
                "RedisGraph does not currently support non-alias references on the left-hand side of SET expressions",
                str(e))
Exemplo n.º 25
0
class testMultiLabel():
    def __init__(self):
        global graph
        self.env = Env(decodeResponses=True)
        self.redis_con = self.env.getConnection()
        graph = Graph(GRAPH_ID, self.redis_con)
        self.populate_graph()

    def populate_graph(self):
        # Construct a graph with the form:
        # (v1:L0:L1)-[:E]->(v2:L1)-[:E]->(v3:L1:L2)
        q = "CREATE (v1:L0:L1 {v: 1})-[:E]->(v2:L1 {v: 2})-[:E]->(v3:L1:L2 {v: 3})"
        graph.query(q)

    # Validate basic multi-label scans.
    def test01_multilabel_scan(self):
        # Issue a query that matches the single (:L0) node.
        query = "MATCH (a:L0) RETURN LABELS(a)"
        query_result = graph.query(query)
        expected_result = [[['L0', 'L1']]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Issue a query that matches the single (:L0:L1) node.
        query = "MATCH (a:L0:L1) RETURN LABELS(a)"
        query_result = graph.query(query)
        expected_result = [[['L0', 'L1']]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Issue a query that matches the single (:L1:L0) node.
        query = "MATCH (a:L1:L0) RETURN LABELS(a)"
        query_result = graph.query(query)
        expected_result = [[['L0', 'L1']]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Issue a query that matches all 3 nodes with the label :L1.
        query = "MATCH (a:L1) RETURN LABELS(a) ORDER BY LABELS(a)"
        query_result = graph.query(query)
        expected_result = [[['L0', 'L1']], [['L1']], [['L1', 'L2']]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Issue a query that matches no nodes, as the (:L0:L2) label disjunction is not present.
        query = "MATCH (a:L0:L2) RETURN LABELS(a)"
        query_result = graph.query(query)
        expected_result = []
        self.env.assertEquals(query_result.result_set, expected_result)

    # Validate basic multi-label traversals.
    def test02_multilabel_traversal(self):
        # (v1:L0:L1)-[:E]->(v2:L1)-[:E]->(v3:L1:L2)
        queries = [
            "MATCH (a:L0:L1)-[]->(b) RETURN LABELS(a), LABELS(b)",
            "MATCH (a:L0:L1)-[:E]->(b) RETURN LABELS(a), LABELS(b)",
            "MATCH (a:L0:L1)-[:E]->(b:L1) RETURN LABELS(a), LABELS(b)",
            "MATCH (b:L1) WITH (b) MATCH (a:L0:L1)-[:E]->(b) RETURN LABELS(a), LABELS(b)",
            "MATCH (b:L1) WITH (b) MATCH (a:L0)-[:E]->(b) RETURN LABELS(a), LABELS(b)",
        ]

        expected_result = [[['L0', 'L1'], ['L1']]]

        for q in queries:
            query_result = graph.query(q)
            self.env.assertEquals(query_result.result_set, expected_result)

    # Validate that the graph properly handles label counts greater than its default.
    def test03_large_label_count(self):
        # Introduce a node with enough labels to force graph resizes.
        labels = ['L' + str(x) for x in range(10, 28)]
        query = "CREATE (n :" + ':'.join(labels) + ") RETURN LABELS(n)"
        query_result = graph.query(query)
        expected_result = [[labels]]
        self.env.assertEquals(query_result.result_set, expected_result)

    def test04_label_scan_optimization(self):
        # create graph with 10 A nodes, 100 B nodes and 1000 C nodes
        query = "UNWIND range(0, 10) AS x CREATE (:A)"
        graph.query(query)

        query = "UNWIND range(0, 100) AS x CREATE (:B)"
        graph.query(query)

        query = "UNWIND range(0, 1000) AS x CREATE (:C)"
        graph.query(query)

        labels = ['A', 'B', 'C']
        queries = [
            "MERGE (n:{ls}) RETURN n", "MATCH (n:{ls}) RETURN n",
            "MATCH (n:{ls})-[e:R]->(m) RETURN n",
            "MATCH (n:{ls})<-[e:R]-(m) RETURN n",
            "MATCH (n:{ls})-[e:R]-(m) RETURN n",
            "MATCH (n:{ls}) WHERE n.v = 1 RETURN n",
            "MATCH (n:{ls})-[e:R]->(m) WHERE n.v = 1 RETURN n",
            "MATCH (n:{ls})<-[e:R]-(m) WHERE n.v = 1 RETURN n",
            "MATCH (n:{ls})-[e:R]-(m) WHERE n.v = 1 RETURN n",
            "MATCH (a) WITH a AS a MATCH (n:{ls}) RETURN n",
            "CREATE (a) WITH a AS a MATCH (n:{ls}) RETURN n",
            "MERGE (a) WITH a AS a MATCH (n:{ls}) RETURN n"
        ]

        import itertools
        permutations = list(itertools.permutations(labels))
        for permutation in permutations:
            for query in queries:
                query = query.format(ls=':'.join(permutation))
                plan = graph.execution_plan(query)
                self.env.assertContains("Node By Label Scan | (n:A)", plan)

    # Validate behavior of index scans on multi-labeled nodes
    def test05_index_scan(self):
        query = """CREATE INDEX ON :L1(v)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.indices_created, 1)

        # Query the explicitly created index
        query = """MATCH (a:L1) WHERE a.v > 0 RETURN a.v ORDER BY a.v"""
        plan = graph.execution_plan(query)
        self.env.assertContains("Index Scan", plan)
        query_result = graph.query(query)
        expected_result = [[1], [2], [3]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Query the explicitly created index on a multi-labeled node
        queries = [
            "MATCH (a:L1:L2) WHERE a.v > 0 RETURN a.v ORDER BY a.v",
            "MATCH (a:L2:L1) WHERE a.v > 0 RETURN a.v ORDER BY a.v"
        ]

        for q in queries:
            plan = graph.execution_plan(q)
            self.env.assertContains("Index Scan", plan)
            query_result = graph.query(q)
            expected_result = [[3]]
            self.env.assertEquals(query_result.result_set, expected_result)

    # Validate the creation of multi-labeled nodes with the MERGE clause
    def test06_multi_label_merge(self):
        query = """MERGE (a:L2:L3 {v: 4}) RETURN labels(a)"""
        query_result = graph.query(query)
        expected_result = [[["L2", "L3"]]]
        self.env.assertEquals(query_result.nodes_created, 1)
        self.env.assertEquals(query_result.labels_added, 1)
        self.env.assertEquals(query_result.result_set, expected_result)

        # Repetition of the query should not create a new node
        query_result = graph.query(query)
        self.env.assertEquals(query_result.nodes_created, 0)
        self.env.assertEquals(query_result.labels_added, 0)
        self.env.assertEquals(query_result.result_set, expected_result)

    # Validate that OPTIONAL MATCH enforces multi-label constraints
    def test07_multi_label_optional_match(self):
        # Traverse to a multi-label destination in an OPTIONAL MATCH
        query = """MATCH (a:L1) OPTIONAL MATCH (a)-[]->(b:L2:L1) RETURN labels(a) AS la, labels(b) AS lb ORDER BY la, lb"""
        query_result = graph.query(query)
        expected_result = [[["L0", "L1"], None], [["L1"], ["L1", "L2"]],
                           [["L1", "L2"], None]]
        self.env.assertEquals(query_result.result_set, expected_result)

        # Specify an additional label for the source node in the OPTIONAL MATCH
        query = """MATCH (a:L0) OPTIONAL MATCH (a:L1)-[]->(b:L1) RETURN labels(a) AS la, labels(b) AS lb ORDER BY la, lb"""
        query_result = graph.query(query)
        expected_result = [[["L0", "L1"], ["L1"]]]
        self.env.assertEquals(query_result.result_set, expected_result)

    # Validate multi-labeled sources and destinations in variable-length traversals
    def test08_multi_label_variable_length_traversal(self):
        query = """MATCH (a {v: 1})-[*]->(b:L1:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        expected_result = [[["L0", "L1"], ["L1", "L2"]]]
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0 {v: 1})-[*]->(b:L1:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0 {v: 1})-[*]->(b:L1:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0:L1 {v: 1})-[*]->(b:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0:L1 {v: 1})-[*]->(b {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0)-[*]->(b:L1:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0:L1)-[*]->(b:L2 {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

        query = """MATCH (a:L0:L1)-[*]->(b {v: 3}) RETURN labels(a), labels(b)"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.result_set, expected_result)

    def test09_test_query_graph_populate_nodes_labels(self):
        graph = Graph('G', self.redis_con)

        # create node with label L1 for the test in the next query
        # we need to make sure we replace the starting point of the traversal
        # from all nodes with label L1 to all nodes with label L2
        query = """CREATE (a:L1 {v:0})-[:R1]->()"""
        query_result = graph.query(query)
        self.env.assertEquals(query_result.labels_added, 1)
        self.env.assertEquals(query_result.nodes_created, 2)
        self.env.assertEquals(query_result.relationships_created, 1)

        # node 'a' is mentioned twice in the following pattern
        # each time with a different label, when extracting a sub query-graph
        # we need to make sure all labels mentioned in the extracted pattern
        # are extracted.
        query = """MERGE ()-[:R2]->(a:L1)-[:R1]->(a:L2) RETURN *"""
        plan = graph.execution_plan(query)
        self.env.assertContains("Node By Label Scan | (a:L2)", plan)
        query_result = graph.query(query)
        self.env.assertEquals(query_result.nodes_created, 2)
        self.env.assertEquals(query_result.relationships_created, 2)