def test07_transposed_multi_hop(self): redis_con = self.env.getConnection() g = Graph("tran_multi_hop", redis_con) # (a)-[R]->(b)-[R]->(c)<-[R]-(d)<-[R]-(e) a = Node(properties={"val": 'a'}) b = Node(properties={"val": 'b'}) c = Node(properties={"val": 'c'}) d = Node(properties={"val": 'd'}) e = Node(properties={"val": 'e'}) g.add_node(a) g.add_node(b) g.add_node(c) g.add_node(d) g.add_node(e) ab = Edge(a, "R", b) bc = Edge(b, "R", c) ed = Edge(e, "R", d) dc = Edge(d, "R", c) g.add_edge(ab) g.add_edge(bc) g.add_edge(ed) g.add_edge(dc) g.flush() q = """MATCH (a)-[*2]->(b)<-[*2]-(c) RETURN a.val, b.val, c.val ORDER BY a.val, b.val, c.val""" actual_result = g.query(q) expected_result = [['a', 'c', 'a'], ['a', 'c', 'e'], ['e', 'c', 'a'], ['e', 'c', 'e']] self.env.assertEquals(actual_result.result_set, expected_result)
def test14_post_deletion_traversal_directions(self): self.env.flush() redis_con = self.env.getConnection() redis_graph = Graph("G", redis_con) nodes = {} # Create entities. labels = ["Dest", "Src", "Src2"] for idx, l in enumerate(labels): node = Node(label=l, properties={"val": idx}) redis_graph.add_node(node) nodes[l] = node edge = Edge(nodes["Src"], "R", nodes["Dest"]) redis_graph.add_edge(edge) edge = Edge(nodes["Src2"], "R", nodes["Dest"]) redis_graph.add_edge(edge) redis_graph.commit() # Delete a node. query = """MATCH (n:Src2) DELETE n""" actual_result = redis_graph.query(query) self.env.assertEquals(actual_result.nodes_deleted, 1) self.env.assertEquals(actual_result.relationships_deleted, 1) query = """MATCH (n1:Src)-[*]->(n2:Dest) RETURN COUNT(*)""" actual_result = redis_graph.query(query) expected_result = [[1]] self.env.assertEquals(actual_result.result_set, expected_result) # Perform the same traversal, this time traveling from destination to source. query = """MATCH (n1:Src)-[*]->(n2:Dest {val: 0}) RETURN COUNT(*)""" actual_result = redis_graph.query(query) expected_result = [[1]] self.env.assertEquals(actual_result.result_set, expected_result)
def create_simple(r: Redis): g = Graph('simple', r) v0 = Node(label='v0', properties={'name': 'v0'}) v1 = Node(label='v1', properties={'name': 'v1'}) v2 = Node(label='v2', properties={'name': 'v2'}) v3 = Node(label='v3', properties={'name': 'v3'}) v4 = Node(label='v4', properties={'name': 'v4'}) g.add_node(v0) g.add_node(v1) g.add_node(v2) g.add_node(v3) g.add_node(v4) e1 = Edge(v0, 'r0', v1, properties={'name': 'r0'}) e2 = Edge(v1, 'r1', v2, properties={'name': 'r1'}) e3 = Edge(v2, 'r0', v3, properties={'name': 'r0'}) e4 = Edge(v3, 'r1', v4, properties={'name': 'r1'}) g.add_edge(e1) g.add_edge(e2) g.add_edge(e3) g.add_edge(e4) g.commit() return g
def test04_repeated_edges(self): graphname = "repeated_edges" g = Graph(graphname, redis_con) src = Node(label='p', properties={'name': 'src'}) dest = Node(label='p', properties={'name': 'dest'}) edge1 = Edge(src, 'e', dest, properties={'val': 1}) edge2 = Edge(src, 'e', dest, properties={'val': 2}) g.add_node(src) g.add_node(dest) g.add_edge(edge1) g.add_edge(edge2) g.commit() # Verify the new edge q = """MATCH (a)-[e]->(b) RETURN e.val, a.name, b.name ORDER BY e.val""" actual_result = g.query(q) expected_result = [[ edge1.properties['val'], src.properties['name'], dest.properties['name'] ], [ edge2.properties['val'], src.properties['name'], dest.properties['name'] ]] assert (actual_result.result_set == expected_result) # Save RDB & Load from RDB redis_con.execute_command("DEBUG", "RELOAD") # Verify that the latest edge was properly saved and loaded actual_result = g.query(q) assert (actual_result.result_set == expected_result)
def test15_update_deleted_entities(self): self.env.flush() redis_con = self.env.getConnection() redis_graph = Graph("delete_test", redis_con) src = Node() dest = Node() edge = Edge(src, "R", dest) redis_graph.add_node(src) redis_graph.add_node(dest) redis_graph.add_edge(edge) redis_graph.flush() # Attempt to update entities after deleting them. query = """MATCH (a)-[e]->(b) DELETE a, b SET a.v = 1, e.v = 2, b.v = 3""" actual_result = redis_graph.query(query) self.env.assertEquals(actual_result.nodes_deleted, 2) self.env.assertEquals(actual_result.relationships_deleted, 1) # No properties should be set. # (Note that this behavior is left unspecified by Cypher.) self.env.assertEquals(actual_result.properties_set, 0) # Validate that the graph is empty. query = """MATCH (a) RETURN a""" actual_result = redis_graph.query(query) expected_result = [] self.env.assertEquals(actual_result.result_set, expected_result)
def test11_bidirectional_multiple_edge_type(self): # Construct a simple graph: # (a)-[E1]->(b), (c)-[E2]->(d) g = Graph("multi_edge_type", redis_con) a = Node(properties={'val': 'a'}) b = Node(properties={'val': 'b'}) c = Node(properties={'val': 'c'}) d = Node(properties={'val': 'd'}) g.add_node(a) g.add_node(b) g.add_node(c) g.add_node(d) ab = Edge(a, "E1", b) cd = Edge(c, "E2", d) g.add_edge(ab) g.add_edge(cd) g.flush() query = """MATCH (a)-[:E1|:E2]-(z) RETURN a.val, z.val ORDER BY a.val, z.val""" actual_result = g.query(query) expected_result = [['a', 'b'], ['b', 'a'], ['c', 'd'], ['d', 'c']] self.env.assertEquals(actual_result.result_set, expected_result)
def populate_graph(self, graph_name): # quick return if graph already exists if redis_con.exists(graph_name): return redis_graph people = ["Roi", "Alon", "Ailon", "Boaz", "Tal", "Omri", "Ori"] visits = [("Roi", "USA"), ("Alon", "Israel"), ("Ailon", "Japan"), ("Boaz", "United Kingdom")] countries = ["Israel", "USA", "Japan", "United Kingdom"] redis_graph = Graph(graph_name, redis_con) personNodes = {} countryNodes = {} # create nodes for p in people: person = Node(label="person", properties={ "name": p, "height": random.randint(160, 200) }) redis_graph.add_node(person) personNodes[p] = person for p in countries: country = Node(label="country", properties={ "name": p, "population": random.randint(100, 400) }) redis_graph.add_node(country) countryNodes[p] = country # create edges for v in visits: person = v[0] country = v[1] edge = Edge(personNodes[person], 'visit', countryNodes[country], properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) redis_graph.commit() # delete nodes, to introduce deleted item within our datablock query = """MATCH (n:person) WHERE n.name = 'Roi' or n.name = 'Ailon' DELETE n""" redis_graph.query(query) query = """MATCH (n:country) WHERE n.name = 'USA' DELETE n""" redis_graph.query(query) # create indices actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX ON :person(name, height)") actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX ON :country(name, population)") return redis_graph
def test_CRUD_replication(self): # create a simple graph env = self.env source_con = env.getConnection() replica_con = env.getSlaveConnection() # enable write commands on slave, required as all RedisGraph # commands are registered as write commands replica_con.config_set("slave-read-only", "no") # perform CRUD operations # create a simple graph graph = Graph(GRAPH_ID, source_con) replica = Graph(GRAPH_ID, replica_con) s = Node(label='L', properties={'id': 0, 'name': 'a'}) t = Node(label='L', properties={'id': 1, 'name': 'b'}) e = Edge(s, 'R', t) graph.add_node(s) graph.add_node(t) graph.add_edge(e) graph.flush() # create index q = "CREATE INDEX ON :L(id)" graph.query(q) # update entity q = "MATCH (n:L {id:0}) SET n.id = 2, n.name = 'c'" graph.query(q) # delete entity q = "MATCH (n:L {id:1}) DELETE n" graph.query(q) # give replica some time to catch up time.sleep(1) # make sure index is available on replica q = "MATCH (s:L {id:2}) RETURN s.name" plan = graph.execution_plan(q) replica_plan = replica.execution_plan(q) env.assertIn("Index Scan", plan) self.env.assertEquals(replica_plan, plan) # issue query on both source and replica # make sure results are the same result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result)
def populate_dense_graph(self, dense_graph_name): dense_graph = Graph(dense_graph_name, redis_con) if not redis_con.exists(dense_graph_name): nodes = [] for i in range(10): node = Node(label="n", properties={"val": i}) dense_graph.add_node(node) nodes.append(node) for n_idx, n in enumerate(nodes): for m_idx, m in enumerate(nodes[:n_idx]): dense_graph.add_edge(Edge(n, "connected", m)) dense_graph.flush() return dense_graph
def test_stringify_query_result(self): redis_graph = Graph('stringify', self.r) john = Node(alias='a', label='person', properties={ 'name': 'John Doe', 'age': 33, 'gender': 'male', 'status': 'single' }) redis_graph.add_node(john) japan = Node(alias='b', label='country', properties={'name': 'Japan'}) redis_graph.add_node(japan) edge = Edge(john, 'visited', japan, properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) self.assertEqual( str(john), """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" ) self.assertEqual( str(edge), """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" + """-[:visited{purpose:"pleasure"}]->""" + """(b:country{name:"Japan"})""") self.assertEqual(str(japan), """(b:country{name:"Japan"})""") redis_graph.commit() query = """MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) RETURN p, v, c""" result = redis_graph.query(query) person = result.result_set[0][0] visit = result.result_set[0][1] country = result.result_set[0][2] self.assertEqual( str(person), """(:person{age:33,gender:"male",name:"John Doe",status:"single"})""" ) self.assertEqual(str(visit), """()-[:visited{purpose:"pleasure"}]->()""") self.assertEqual(str(country), """(:country{name:"Japan"})""") redis_graph.delete()
def create_friends(r: Redis): g = Graph('friends', r) adam = Node(label='User', properties={'name': 'Adam', 'mail': 'f****r'}) pernilla = Node(label='User', properties={'name': 'Pernilla'}) david = Node(label='User', properties={'name': 'David'}) g.add_node(adam) g.add_node(pernilla) g.add_node(david) g.add_edge(Edge(adam, 'FRIEND', pernilla)) g.add_edge(Edge(pernilla, 'FRIEND', david)) g.commit() return g
def populate_acyclic_graph(self): global acyclic_graph acyclic_graph = Graph("G", redis_con) # Construct a graph with the form: # (v1)-[:E]->(v2)-[:E]->(v3) node_props = ['v1', 'v2', 'v3'] nodes = [] for idx, v in enumerate(node_props): node = Node(label="L", properties={"val": v}) nodes.append(node) acyclic_graph.add_node(node) edge = Edge(nodes[0], "E", nodes[1]) acyclic_graph.add_edge(edge) edge = Edge(nodes[1], "E", nodes[2]) acyclic_graph.add_edge(edge) acyclic_graph.commit()
def test13_delete_path_elements(self): self.env.flush() redis_con = self.env.getConnection() redis_graph = Graph("delete_test", redis_con) src = Node() dest = Node() edge = Edge(src, "R", dest) redis_graph.add_node(src) redis_graph.add_node(dest) redis_graph.add_edge(edge) redis_graph.flush() # Delete projected # Unwind path nodes. query = """MATCH p = (src)-[e]->(dest) WITH nodes(p)[0] AS node, relationships(p)[0] as edge DELETE node, edge""" actual_result = redis_graph.query(query) self.env.assertEquals(actual_result.nodes_deleted, 1) self.env.assertEquals(actual_result.relationships_deleted, 1)
def main(): r = redis.Redis(decode_responses=True) r.delete('social') redis_graph = Graph('social', r) john = Node(label='person', properties={'name':'John Doe', 'age': 33, 'gender':'male', 'status': 'single'}) redis_graph.add_node(john) japan = Node(label='country', properties={'name':'Japan'}) redis_graph.add_node(japan) edge_john = Edge(john, 'visited', japan, properties={'purpose':'pleasure'}) redis_graph.add_edge(edge_john) pearl = Node(label='person', properties={'name':'Pearl White', 'age':25, 'gender':'female', 'status':'married'}) redis_graph.add_node(pearl) australia = Node(label='country', properties={'name':'Australia'}) redis_graph.add_node(australia) edge_pearl = Edge(pearl, 'visited', australia, properties={'purpose':'business'}) redis_graph.add_edge(edge_pearl) mary = Node(label='person', properties={'name':'Mary Mueller', 'age':45, 'gender':'divers', 'status':'divers'}) redis_graph.add_node(mary) germany = Node(label='country', properties={'name':'Germany'}) redis_graph.add_node(germany) edge_mary = Edge(mary, 'visited', germany, properties={'purpose':'business'}) redis_graph.add_edge(edge_mary) redis_graph.commit() for i in ['pleasure', 'business']: print('==== Purpose: {} ===='.format(i)) query = '''MATCH (p:person)-[v:visited {{purpose:"{}"}}]->(c:country) RETURN p.name, p.age, p.status, c.name'''.format(i) result = redis_graph.query(query) print_res(result)
def test_graph_creation(self): redis_graph = Graph('social', self.r) john = Node(label='person', properties={ 'name': 'John Doe', 'age': 33, 'gender': 'male', 'status': 'single' }) redis_graph.add_node(john) japan = Node(label='country', properties={'name': 'Japan'}) redis_graph.add_node(japan) edge = Edge(john, 'visited', japan, properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) redis_graph.commit() query = ( 'MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) ' 'RETURN p, v, c') result = redis_graph.query(query) person = result.result_set[0][0] visit = result.result_set[0][1] country = result.result_set[0][2] self.assertEqual(person, john) self.assertEqual(visit.properties, edge.properties) self.assertEqual(country, japan) query = """RETURN [1, 2.3, "4", true, false, null]""" result = redis_graph.query(query) self.assertEqual([1, 2.3, "4", True, False, None], result.result_set[0][0]) # All done, remove graph. redis_graph.delete()
def worstcase(r: Redis): g = Graph('simple_2', r) v0 = Node(properties={'name': 'v0'}) v1 = Node(properties={'name': 'v1'}) v2 = Node(properties={'name': 'v2'}) v3 = Node(properties={'name': 'v3'}) v4 = Node(properties={'name': 'v4'}) g.add_node(v0) g.add_node(v1) g.add_node(v2) g.add_node(v3) g.add_node(v4) e1 = Edge(v0, 'A', v1, properties={'name': 'r1'}) e2 = Edge(v1, 'A', v2, properties={'name': 'r2'}) e3 = Edge(v2, 'A', v0, properties={'name': 'r3'}) e4 = Edge(v0, 'B', v3, properties={'name': 'r5'}) e5 = Edge(v3, 'B', v0, properties={'name': 'r6'}) g.add_edge(e1) g.add_edge(e2) g.add_edge(e3) g.add_edge(e4) g.add_edge(e5) g.commit() return g
def test_optional_match(self): redis_graph = Graph('optional', self.r) # Build a graph of form (a)-[R]->(b) node0 = Node(node_id=0, label="L1", properties={'value': 'a'}) node1 = Node(node_id=1, label="L1", properties={'value': 'b'}) edge01 = Edge(node0, "R", node1, edge_id=0) redis_graph.add_node(node0) redis_graph.add_node(node1) redis_graph.add_edge(edge01) redis_graph.flush() # Issue a query that collects all outgoing edges from both nodes (the second has none). query = """MATCH (a) OPTIONAL MATCH (a)-[e]->(b) RETURN a, e, b ORDER BY a.value""" expected_results = [[node0, edge01, node1], [node1, None, None]] result = redis_graph.query(query) self.assertEqual(expected_results, result.result_set) redis_graph.delete()
def test_path(self): redis_graph = Graph('social', self.r) node0 = Node(node_id=0, label="L1") node1 = Node(node_id=1, label="L1") edge01 = Edge(node0, "R1", node1, edge_id=0, properties={'value': 1}) redis_graph.add_node(node0) redis_graph.add_node(node1) redis_graph.add_edge(edge01) redis_graph.flush() path01 = Path.new_empty_path().add_node(node0).add_edge( edge01).add_node(node1) expected_results = [[path01]] query = "MATCH p=(:L1)-[:R1]->(:L1) RETURN p ORDER BY p" result = redis_graph.query(query) self.assertEqual(expected_results, result.result_set) # All done, remove graph. redis_graph.delete()
def test04_repeated_edges(self): graph_names = ["repeated_edges", "{tag}_repeated_edges"] for graph_name in graph_names: graph = Graph(graph_name, redis_con) src = Node(label='p', properties={'name': 'src'}) dest = Node(label='p', properties={'name': 'dest'}) edge1 = Edge(src, 'e', dest, properties={'val': 1}) edge2 = Edge(src, 'e', dest, properties={'val': 2}) graph.add_node(src) graph.add_node(dest) graph.add_edge(edge1) graph.add_edge(edge2) graph.flush() # Verify the new edge q = """MATCH (a)-[e]->(b) RETURN e.val, a.name, b.name ORDER BY e.val""" actual_result = graph.query(q) expected_result = [[ edge1.properties['val'], src.properties['name'], dest.properties['name'] ], [ edge2.properties['val'], src.properties['name'], dest.properties['name'] ]] self.env.assertEquals(actual_result.result_set, expected_result) # Save RDB & Load from RDB self.env.dumpAndReload() # Verify that the latest edge was properly saved and loaded actual_result = graph.query(q) self.env.assertEquals(actual_result.result_set, expected_result)
def test(): r = redis.Redis(host='localhost', port=6379) redis_graph = Graph('social', r) john = Node(label='person', properties={ 'name': 'John Doe', 'age': 33, 'gender': 'male', 'status': 'single' }) redis_graph.add_node(john) japan = Node(label='country', properties={'name': 'Japan'}) redis_graph.add_node(japan) edge = Edge(john, 'visited', japan, properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) redis_graph.commit() query = """MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) RETURN p.name, p.age, v.purpose, c.name""" result = redis_graph.query(query) # Print resultset result.pretty_print() # Iterate through resultset, skip header row at position 0 for record in result.result_set[1:]: person_name = record[0] person_age = record[1] visit_purpose = record[2] country_name = record[3]
def dump_data(n_records): r = redis.Redis(host='localhost', port=6379) data = data_gen(n_records) redis_graph = Graph('file_activity1', r) nodes = {} edges = {} pprint(data) for rec in data: _node = Node(label='file', properties={ 'fid': rec['_id'], 'name': rec['name'], 'date_added': rec['date_added'], 'platform': rec['platform'] }) r.set(rec['_id'], _node.alias) redis_graph.add_node(_node) nodes[rec['_id']] = _node for rec in data: for fileid, time_stamp in rec['downloaded']: edge = Edge(nodes[rec['_id']], 'DOWNLOADED', nodes[fileid], properties={ 'time': time_stamp, 'activity': 'downloaded' }) redis_graph.add_edge(edge) for fileid, time_stamp in rec['executed']: edge = Edge(nodes[rec['_id']], 'EXECUTED', nodes[fileid], properties={ 'time': time_stamp, 'activity': 'executed' }) redis_graph.add_edge(edge) for fileid, time_stamp in rec['removed']: edge = Edge(nodes[rec['_id']], 'REMOVED', nodes[fileid], properties={ 'time': time_stamp, 'activity': 'removed' }) redis_graph.add_edge(edge) redis_graph.commit() print("Graph created")
def populate_cyclic_graph(self): global graph_with_cycle graph_with_cycle = Graph("H", redis_con) # Construct a graph with the form: # (v1)-[:E]->(v2)-[:E]->(v3), (v2)-[:E]->(v1) node_props = ['v1', 'v2', 'v3'] nodes = [] for idx, v in enumerate(node_props): node = Node(label="L", properties={"val": v}) nodes.append(node) graph_with_cycle.add_node(node) edge = Edge(nodes[0], "E", nodes[1]) graph_with_cycle.add_edge(edge) edge = Edge(nodes[1], "E", nodes[2]) graph_with_cycle.add_edge(edge) # Introduce a cycle between v2 and v1. edge = Edge(nodes[1], "E", nodes[0]) graph_with_cycle.add_edge(edge) graph_with_cycle.commit()
class testConcurrentQueryFlow(FlowTestsBase): def __init__(self): self.env = Env(decodeResponses=True) # skip test if we're running under Valgrind if self.env.envRunner.debugger is not None: self.env.skip( ) # valgrind is not working correctly with multi processing self.conn = self.env.getConnection() self.graph = Graph(GRAPH_ID, self.conn) self.populate_graph() def populate_graph(self): nodes = {} # Create entities for p in people: node = Node(label="person", properties={"name": p}) self.graph.add_node(node) nodes[p] = node # Fully connected graph for src in nodes: for dest in nodes: if src != dest: edge = Edge(nodes[src], "know", nodes[dest]) self.graph.add_edge(edge) self.graph.commit() # Count number of nodes in the graph def test01_concurrent_aggregation(self): q = """MATCH (p:person) RETURN count(p)""" queries = [q] * CLIENT_COUNT results = run_concurrent(queries, thread_run_query) for result in results: person_count = result["result_set"][0][0] self.env.assertEqual(person_count, len(people)) # Concurrently get neighbors of every node. def test02_retrieve_neighbors(self): q = """MATCH (p:person)-[know]->(n:person) RETURN n.name""" queries = [q] * CLIENT_COUNT results = run_concurrent(queries, thread_run_query) # Fully connected graph + header row. expected_resultset_size = len(people) * (len(people) - 1) for result in results: self.env.assertEqual(len(result["result_set"]), expected_resultset_size) # Concurrent writes def test_03_concurrent_write(self): queries = [ """CREATE (c:country {id:"%d"})""" % i for i in range(CLIENT_COUNT) ] results = run_concurrent(queries, thread_run_query) for result in results: self.env.assertEqual(result["nodes_created"], 1) self.env.assertEqual(result["properties_set"], 1) # Try to delete graph multiple times. def test_04_concurrent_delete(self): pool = Pool(nodes=CLIENT_COUNT) # invoke queries assertions = pool.map(delete_graph, [GRAPH_ID] * CLIENT_COUNT) # Exactly one thread should have successfully deleted the graph. self.env.assertEquals(assertions.count(True), 1) # Try to delete a graph while multiple queries are executing. def test_05_concurrent_read_delete(self): ############################################################################################## # Delete graph via Redis DEL key. ############################################################################################## self.populate_graph() pool = Pool(nodes=CLIENT_COUNT) manager = pathos_multiprocess.Manager() barrier = manager.Barrier(CLIENT_COUNT) barriers = [barrier] * CLIENT_COUNT q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x""" queries = [q] * CLIENT_COUNT # invoke queries m = pool.amap(thread_run_query, queries, barriers) self.conn.delete(GRAPH_ID) # wait for processes to return m.wait() # get the results results = m.get() # validate result. self.env.assertTrue( all([r["result_set"][0][0] == 900 for r in results])) # Make sure Graph is empty, e.g. graph was deleted. resultset = self.graph.query("MATCH (n) RETURN count(n)").result_set self.env.assertEquals(resultset[0][0], 0) ############################################################################################## # Delete graph via Redis FLUSHALL. ############################################################################################## self.populate_graph() q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x""" queries = [q] * CLIENT_COUNT barrier = manager.Barrier(CLIENT_COUNT) barriers = [barrier] * CLIENT_COUNT # invoke queries m = pool.amap(thread_run_query, queries, barriers) self.conn.flushall() # wait for processes to return m.wait() # get the results results = m.get() # validate result. self.env.assertTrue( all([r["result_set"][0][0] == 900 for r in results])) # Make sure Graph is empty, e.g. graph was deleted. resultset = self.graph.query("MATCH (n) RETURN count(n)").result_set self.env.assertEquals(resultset[0][0], 0) ############################################################################################## # Delete graph via GRAPH.DELETE. ############################################################################################## self.populate_graph() q = """UNWIND (range(0, 10000)) AS x WITH x AS x WHERE (x / 900) = 1 RETURN x""" queries = [q] * CLIENT_COUNT barrier = manager.Barrier(CLIENT_COUNT) barriers = [barrier] * CLIENT_COUNT # invoke queries m = pool.amap(thread_run_query, queries, barriers) self.graph.delete() # wait for processes to return m.wait() # get the results results = m.get() # validate result. self.env.assertTrue( all([r["result_set"][0][0] == 900 for r in results])) # Make sure Graph is empty, e.g. graph was deleted. resultset = self.graph.query("MATCH (n) RETURN count(n)").result_set self.env.assertEquals(resultset[0][0], 0) def test_06_concurrent_write_delete(self): # Test setup - validate that graph exists and possible results are None self.graph.query("MATCH (n) RETURN n") pool = Pool(nodes=1) heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n) RETURN count(n)""" writer = pool.apipe(thread_run_query, heavy_write_query, None) self.conn.delete(GRAPH_ID) writer.wait() possible_exceptions = [ "Encountered different graph value when opened key " + GRAPH_ID, "Encountered an empty key when opened key " + GRAPH_ID ] result = writer.get() if isinstance(result, str): self.env.assertContains(result, possible_exceptions) else: self.env.assertEquals(1000000, result["result_set"][0][0]) def test_07_concurrent_write_rename(self): # Test setup - validate that graph exists and possible results are None self.graph.query("MATCH (n) RETURN n") pool = Pool(nodes=1) new_graph = GRAPH_ID + "2" # Create new empty graph with id GRAPH_ID + "2" self.conn.execute_command("GRAPH.QUERY", new_graph, """MATCH (n) return n""", "--compact") heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n) RETURN count(n)""" writer = pool.apipe(thread_run_query, heavy_write_query, None) self.conn.rename(GRAPH_ID, new_graph) writer.wait() # Possible scenarios: # 1. Rename is done before query is sent. The name in the graph context is new_graph, so when upon commit, when trying to open new_graph key, it will encounter an empty key since new_graph is not a valid key. # Note: As from https://github.com/RedisGraph/RedisGraph/pull/820 this may not be valid since the rename event handler might actually rename the graph key, before the query execution. # 2. Rename is done during query executing, so when commiting and comparing stored graph context name (GRAPH_ID) to the retrived value graph context name (new_graph), the identifiers are not the same, since new_graph value is now stored at GRAPH_ID value. possible_exceptions = [ "Encountered different graph value when opened key " + GRAPH_ID, "Encountered an empty key when opened key " + new_graph ] result = writer.get() if isinstance(result, str): self.env.assertContains(result, possible_exceptions) else: self.env.assertEquals(1000000, result["result_set"][0][0]) def test_08_concurrent_write_replace(self): # Test setup - validate that graph exists and possible results are None self.graph.query("MATCH (n) RETURN n") pool = Pool(nodes=1) heavy_write_query = """UNWIND(range(0,999999)) as x CREATE(n) RETURN count(n)""" writer = pool.apipe(thread_run_query, heavy_write_query, None) set_result = self.conn.set(GRAPH_ID, "1") writer.wait() possible_exceptions = [ "Encountered a non-graph value type when opened key " + GRAPH_ID, "WRONGTYPE Operation against a key holding the wrong kind of value" ] result = writer.get() if isinstance(result, str): # If the SET command attempted to execute while the CREATE query was running, # an exception should have been issued. self.env.assertContains(result, possible_exceptions) else: # Otherwise, both the CREATE query and the SET command should have succeeded. self.env.assertEquals(1000000, result.result_set[0][0]) self.env.assertEquals(set_result, True) # Delete the key self.conn.delete(GRAPH_ID) def test_09_concurrent_multiple_readers_after_big_write(self): # Test issue #890 self.graph = Graph(GRAPH_ID, self.conn) self.graph.query("""UNWIND(range(0,999)) as x CREATE()-[:R]->()""") read_query = """MATCH (n)-[r:R]->(m) RETURN count(r) AS res UNION RETURN 0 AS res""" queries = [read_query] * CLIENT_COUNT results = run_concurrent(queries, thread_run_query) for result in results: if isinstance(result, str): self.env.assertEquals(0, result) else: self.env.assertEquals(1000, result["result_set"][0][0])
class rgraph(absgraph): def __init__(self, server, schema): host, port = server.split(':') self._client_stub = redis.Redis(host=host, port=int(port)) self._graph = Graph('test', self._client_stub) self._maxquery = 100 self.cypher_no_exists = True super(rgraph, self).__init__(schema) def drop_all(self): self._graph.delete() def get_schema(self): pass def load_schema(self, schema): pass def set_index(self): for p, d in self.schema.items(): if not 'index' in d: continue elif not d['index']: continue query = 'CREATE INDEX ON :node (%s)' % p self.query(query) def _is_intinstance(self, p, v): return self._int_type(p) and (isinstance(v, np.float_) or isinstance( v, float) or isinstance(v, str)) def add_node(self, node): self._graph.add_node(node) def add_edge(self, edge): self._graph.add_edge(edge) def add_nodes(self, nodes): for i in nodes: node = Node(label='node', properties=i) self.add_node(node) def nquads(self, df, predicates, i): #d = self.id_predicate #nodes = [Node(label='node', properties={'%s'%d: df.iloc[i]['%s'%d], # 'numeric': srlz(df.iloc[i]['numeric'])}) for i in range(n, n+m)] plen = len(predicates) lim = min(i + self._maxquery, len(df)) nquads = [] while i < lim: properties = {} for p in predicates: try: s = df.iloc[i][p] except: s = i s = self.serialize(s) properties[p] = s nquads.append(properties) i += 1 return nquads, i def nquads_edges(self, graph, label, i=0, nodes=None, neighbors=None, j=0): if nodes is None: nodes = list(graph.nodes()) edges = [] budget = self._maxquery for node in nodes[i:]: if neighbors is None: neighbors = list(graph.neighbors(node)) for k, neigh in enumerate(neighbors[j:]): if budget == 0: return edges, i, neighbors, j + k, self._maxquery edges.append((node, neigh)) budget -= 1 i += 1 neighbors = None j = 0 return edges, i, neighbors, j + k, self._maxquery - budget def nquads_edges2(self, edges): """ GRAPH.QUERY test 'MATCH (a:node {name: "acc-tight5.mps.pkl__v998"}), (b:node {name: "acc-tight5.mps.pkl__v998"}), (c:node {name: "acc-tight5.mps.pkl__v10"}), (d:node {name: "acc-tight5.mps.pkl__slack-min"}) CREATE (a)-[:edge]->(b), (c)-[:edge]->(d)' """ # get all the nodes if isinstance(edges[0][0], str): base = '(s%d:node {%s: "%s"}), (d%d:node {%s: "%s"})' else: base = '(s%d:node {%s: %s}), (d%d:node {%s: %s})' l = [] #q = 'MATCH ' #for i, e in enumerate(edges): # if i > 0: # q += ', ' # q += base%(i, self.id_predicate, e[0], i, self.id_predicate, e[1]) # l.append('(s%d)-[:%s]->(d%d)'%(i, self.edge_attribute, i)) #q += ' CREATE ' + ', '.join(l) m = [] for i, e in enumerate(edges): m.append(base % (i, self.id_predicate, e[0], i, self.id_predicate, e[1])) l.append('(s%d)-[:%s]->(d%d)' % (i, self.edge_attribute, i)) q = 'MATCH ' + ', '.join(m) + ' CREATE ' + ', '.join(l) return q def parse_neighbors(self, res, ret): if len(res.result_set) == 0: return p = res.result_set[0][0].decode().split('.')[1] # XXX for k, v in res.result_set[1:]: k = k.decode() v = v.decode() if self._is_intinstance(p, k): k = int(float(k)) if self._is_intinstance(p, v): v = int(float(v)) if k in ret: ret[k].append(v) else: ret[k] = [v] def neighbors(self, identities, pred=None, id_pred=None): """ We want something like GRAPH.QUERY test 'MATCH (n: node {name:"acc-tight5.mps.pkl__v998"})-[:edge]->(m) RETURN m.numeric, m.name' or better solution like: GRAPH.QUERY test 'MATCH (n0: node)-[:edge]->(m) WHERE n0.name = "acc-tight5.mps.pkl__v998" OR n0.name = "acc-tight5.mps.pkl__v10" RETURN n0.name, m.name' """ if not id_pred: id_pred = self.id_predicate if not pred: pred = self.edge_attribute ret = OrderedDict() # # For small numbers, this is faster # if len(identities) < 1: for i in identities: q = 'MATCH (s: node {%s: "%s"})-[:%s]->(d) RETURN d.%s' % ( id_pred, i, pred, id_pred) res = self.query(q) ret.update({i: [j[0].decode() for j in res.result_set[1:]]}) return ret # # Otherwise batch requests # if isinstance(identities[0], str): where_base = 'n.%s = "%s"' else: where_base = 'n.%s = %s' step = 8 l = 0 h = step lim = len(identities) ret = OrderedDict() while l < lim: b = identities[l:h] #where = ['n.%s = "%s"'%(id_pred, i) for i in b] where = [where_base % (id_pred, i) for i in b] where = ' OR '.join(where) query = ('MATCH (n: node)-[:%s]->(m) WHERE %s RETURN n.%s, m.%s' % (pred, where, id_pred, id_pred)) res = self.query(query) self.parse_neighbors(res, ret) l = h h = h + step return ret def _int_type(self, predicate): return self.schema[predicate]['type'] == 'int' def parse_batch(self, res, ret, predicates): r = res.result_set[1] npreds = len(predicates) for i, p in enumerate(predicates): l = [j.decode() for j in r[i::npreds]] t = self.deserialize_type(p) if t: l = self.deserialize(l, t) # XXX if self._int_type(p) and (isinstance(l[0], np.float_) or isinstance(l[0], float) or isinstance(l[0], str)): l = [int(float(i)) for i in l] #if p in ret: # ret[p].extend(l) #else: # ret[p] = l ret[p].extend(l) def batch(self, identities, predicates, identities_predicate=None): if identities_predicate is None: identities_predicate = self.id_predicate if isinstance(identities[0], str): mtch_base = '(n%d:node {%s: "%s"})' else: mtch_base = '(n%d:node {%s: %s})' step = 1000 l = 0 h = step lim = len(identities) ret = OrderedDict({p: [] for p in predicates}) while l < lim: nodes = identities[l:h] mtch = [ mtch_base % (j, identities_predicate, i) for j, i in enumerate(nodes) ] mtch = ', '.join(mtch) rtrn = [ 'n%d.%s' % (j, p) for j in range(len(nodes)) for p in predicates ] rtrn = ', '.join(rtrn) query = 'MATCH ' + mtch + ' RETURN ' + rtrn res = self.query(query) # We get something like # [[b'n0.numeric', b'n0.name', b'n1.numeric', b'n1.name'], ... # res.result_set[1][0] is like b'xazf' self.parse_batch(res, ret, predicates) l = h h = h + step return ret def missing_values(self, predicate, low, high): return [] def _one_cypher(self, predicate, identity): #query = 'MATCH (n:node {%s: "%s"}) RETURN n.%s'%( # self.sorted_predicate, identity, predicate) if identity: if isinstance(identity, str): query = 'MATCH (n:node) WHERE n.%s = "%s" RETURN n.%s' % ( self.id_predicate, identity, predicate) else: query = 'MATCH (n:node) WHERE n.%s = "%s" RETURN n.%s' % ( self.id_predicate, identity, predicate) else: if self.cypher_no_exists: whr = 'n.%s != ""' else: whr = 'exists(n.%s)' query = ('MATCH (n:node) WHERE ' + whr + ' RETURN n.%s LIMIT 1') % (predicate, predicate) return query def parse_one(self, res, predicate): return res.result_set[1][0].decode() def one(self, predicate, identity=None): query = self._one_cypher(predicate, identity) res = self.query(query) r = self.parse_one(res, predicate) t = self.deserialize_type(predicate) if t: r = self.deserialize([r], t)[0] return r def _count_cypher(self, name=None): if name is None: name = self.id_predicate return 'MATCH (n:node) RETURN COUNT(n)' def parse_count(self, res): return (int(float(res.result_set[1][0].decode()))) def count(self, name=None): query = self._count_cypher(name) res = self.query(query) return self.parse_count(res) def merge(self): query = '' for _, node in self._graph.nodes.items(): query += str(node) + ',' for edge in self._graph.edges: query += str(edge) + ',' # Discard leading comma. if query[-1] is ',': query = query[:-1] self._graph.merge(query) def commit(self): self._graph.commit() def flush(self): self._graph.flush() def query(self, query): return self._graph.query(query) def range_cypher(self, low, high, predicates, id_predicate, expand): unsortable = False if id_predicate is None: id_predicate = self.id_predicate if self.id_predicate_unsortable: unsortable = True rtrn = ['n.%s' % p for p in predicates] rtrn = ', '.join(rtrn) if unsortable: query = ('MATCH (n: node) RETURN %s ORDER BY n.%s LIMIT %d' % (rtrn, self.sorted_predicate, high - low)) else: pred = self.sorted_predicate query = ('MATCH (n: node) WHERE n.%s >= %d AND n.%s < %d ' 'RETURN %s ORDER BY n.%s' % (pred, low, pred, high, rtrn, pred)) return query def _range_xform(self, ret, predicates): ret2 = [{} for i in range(len(ret[predicates[0]]))] for k, vs in ret.items(): for j, v in enumerate(vs): ret2[j][k] = v return ret2 def _range(self, low, high, predicates, id_predicate=None, expand=False): query = self.range_cypher(low, high, predicates, id_predicate, expand) ret = OrderedDict() res = self.query(query) res_predicates = [p.decode().split('.')[1] for p in res.result_set[0]] for pi, p in enumerate(predicates): tmp = [i[pi].decode() for i in res.result_set[1:]] ret[p] = tmp # for RETURN n.numeric, n.name, n.identity, ret[1:] is like # [[b'numericvalue0', b'namevalue0', b'identityvalue0'], ..] # Now ret looks like [{'name': [name values]}, {'numeric': []} ..] # To be compatible with _dataframe, we transform this to # [{'name': 'namevalue0', 'numeric': 'numericvalue0'}, {}, {} ... ] ret2 = self._range_xform(ret, predicates) return ret2 def load_df(self, df, predicates, n=0): print('loading nodes') while n < len(df): nquads, n = self.nquads(df, predicates, n) self.add_nodes(nquads) self.flush() def load_graph(self, g, edge): nodes = list(g.nodes()) print('loading edges') n = 0 j = 0 nbrs = None num_nodes = len(nodes) n_prev = 0 while n < num_nodes: nquads, n, nbrs, j, c = self.nquads_edges(g, edge, n, nodes=nodes, neighbors=nbrs, j=j) query = self.nquads_edges2(nquads) self.query(query) if n > n_prev + 10000: print('%d / %d' % (n, num_nodes)) n_prev = n
class RedisGraph: """ Graph abstraction over RedisGraph. A thin wrapper but provides us some options. """ def __init__(self, host='localhost', port=6379, graph='default', password=''): """ Construct a connection to Redis Graph. """ self.r = redis.Redis(host=host, port=port, password=password) self.redis_graph = Graph(graph, self.r) def add_node(self, identifier=None, label=None, properties=None): """ Add a node with the given label and properties. """ logger.debug( f"--adding node id:{identifier} label:{label} prop:{properties}") if identifier and properties: properties['id'] = identifier node = Node(node_id=identifier, alias=identifier, label=label, properties=properties) self.redis_graph.add_node(node) return node def get_edge(self, start, end, predicate=None): """ Get an edge from the graph with the specified start and end identifiers. """ result = None for edge in self.redis_graph.edges: if edge.src_node.id == start and edge.dest_node.id == end: result = edge break return result def add_edge(self, start, predicate, end, properties={}): """ Add an edge with the given predicate and properties between start and end nodes. """ logger.debug( f"--adding edge start:{start} pred:{predicate} end:{end} prop:{properties}" ) if isinstance(start, str) and isinstance(end, str): start = Node(node_id=start, label='thing') end = Node(node_id=end, label='thing') self.redis_graph.add_node(start) self.redis_graph.add_node(end) edge = Edge(start, predicate, end, properties) self.redis_graph.add_edge(edge) return edge def has_node(self, identifier): return identifier in self.redis_graph.nodes def get_node(self, identifier, properties=None): return self.redis_graph.nodes[identifier] def commit(self): """ Commit modifications to the graph. """ self.redis_graph.commit() def query(self, query): """ Query and return result set. """ result = self.redis_graph.query(query) result.pretty_print() return result def delete(self): """ Delete the named graph. """ self.redis_graph.delete()
class GraphUtils: """Provides low level functions to interact with Redis Graph""" def __init__(self, redis_proxy: RedisProxy, graph_name="apigraph") -> None: """Initialize Graph Utils module :param redis_proxy: RedisProxy object created from redis_proxy module :param graph_name: Graph Key name to be created in Redis :return: None """ self.redis_proxy = redis_proxy self.redis_connection = redis_proxy.get_connection() self.graph_name = graph_name self.redis_graph = Graph(graph_name, self.redis_connection) def read(self, match: str, ret: str, where: Optional[str] = None) -> Union[list, None]: """ Run query to read nodes in Redis and return the result :param match: Relationship between queried entities. :param ret: Defines which property/ies will be returned. :param where: Used to filter results, not mandatory. :return: Corresponding Nodes """ query = "MATCH(p{})".format(match) if where: query += " WHERE(p.{})".format(where) query += " RETURN p{}".format(ret) query_result = self.redis_graph.query(query) # Processing Redis-set response format query_result = self.process_result(query_result) # if not query_result: # query_result = None return query_result def update(self, match: str, set: str, where: Optional[str] = None) -> list: """ Run query to update nodes in Redis and return the result :param match: Relationship between queried entities. :param set: The property to be updated. :param where: Used to filter results, not mandatory. :return: Query results """ query = "MATCH(p{})".format(match) if where is not None: query += " WHERE(p.{})".format(where) query += " SET p.{}".format(set) return self.redis_connection.execute_command("GRAPH.QUERY", self.graph_name, query) def delete(self, where: str, match: str = "") -> list: """ Run query to update nodes in Redis and return the result :param match: Relationship between queried entities. :param set: The property to be updated. :param where: Used to filter results, not mandatory. :return: Query results """ query = "MATCH(p{})".format(match) if where is not None: query += " WHERE(p.{})".format(where) query += " DELETE p" return self.redis_connection.execute_command("GRAPH.QUERY", self.graph_name, query) def create_relation(self, label_source: str, where_source: str, relation_type: str, label_dest: str, where_dest: str) -> list: """ Create a relation(edge) between nodes according to WHERE filters. :param label_source: Source node label. :param where_source: Where statement to filter source node. :param relation_type: The name of the relation type to assign. :param label_dest: Label name for the destination node. :param where_dest: Where statement to filter destination node """ query = "MATCH(s:{} {{{}}}), ".format(label_source, where_source) query += "(d:{} {{{}}})".format(label_dest, where_dest) query += " CREATE (s)-[:{}]->(d)".format(relation_type) return self.redis_connection.execute_command("GRAPH.QUERY", self.graph_name, query) def add_node(self, label: str, alias: str, properties: dict) -> Node: """ Add node to the redis graph :param label: label for the node. :param alias: alias for the node. :param properties: properties for the node. :return: Created Node """ node = Node(label=label, alias=alias, properties=properties) self.redis_graph.add_node(node) return node def add_edge(self, source_node: Node, predicate: str, dest_node: str) -> None: """Add edge between nodes in redis graph :param source_node: source node of the edge. :param predicate: relationship between the source and destination node :param dest_node: destination node of the edge. :return: None """ edge = Edge(source_node, predicate, dest_node) self.redis_graph.add_edge(edge) def flush(self) -> None: """Commit the changes made to the Graph to Redis and reset/flush the Nodes and Edges to be added in the next commit""" self.redis_graph.flush() def process_result(self, result: list) -> list: """ Partial data processing for results redis-sets :param get_data: data get from the Redis memory. """ response_json_list = [] if not result.result_set: return [] for return_alias in result.result_set: for record in return_alias[:]: new_record = {} if record is None: return new_record = record.properties if new_record: if 'id' in new_record: new_record['@id'] = new_record.pop('id') new_record['@type'] = new_record.pop('type') if 'context' in new_record: new_record['@context'] = new_record.pop('context') response_json_list.append(new_record) return response_json_list
def populate_graph(self, graph_name): redis_graph = Graph(graph_name, redis_con) # quick return if graph already exists if redis_con.exists(graph_name): return redis_graph people = ["Roi", "Alon", "Ailon", "Boaz", "Tal", "Omri", "Ori"] visits = [("Roi", "USA"), ("Alon", "Israel"), ("Ailon", "Japan"), ("Boaz", "United Kingdom")] countries = ["Israel", "USA", "Japan", "United Kingdom"] personNodes = {} countryNodes = {} # create nodes for p in people: person = Node(label="person", properties={ "name": p, "height": random.randint(160, 200) }) redis_graph.add_node(person) personNodes[p] = person for c in countries: country = Node(label="country", properties={ "name": c, "population": random.randint(100, 400) }) redis_graph.add_node(country) countryNodes[c] = country # create edges for v in visits: person = v[0] country = v[1] edge = Edge(personNodes[person], 'visit', countryNodes[country], properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) redis_graph.commit() # delete nodes, to introduce deleted entries within our datablock query = """MATCH (n:person) WHERE n.name = 'Roi' or n.name = 'Ailon' DELETE n""" redis_graph.query(query) query = """MATCH (n:country) WHERE n.name = 'USA' DELETE n""" redis_graph.query(query) # create indices redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX FOR (p:Person) ON (p.name, p.height)") redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX FOR (c:country) ON (c.name, c.population)") actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX FOR ()-[r:visit]-() ON (r.purpose)") actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CALL db.idx.fulltext.createNodeIndex({label: 'person', stopwords: ['A', 'B'], language: 'english'}, 'text')" ) return redis_graph
source_entity_id, destination_entity_id), 'rank', 1) source_node = Node(label='entity', properties={ 'id': source_entity_id, 'rank': 1, 'name': source_canonical_name }) destination_node = Node(label='entity', properties={ 'id': destination_entity_id, 'rank': 1, 'name': destination_canonical_name }) redis_graph.add_node(source_node) redis_graph.add_node(destination_node) edge = Edge(source_node, 'related', destination_node, properties={'article': sentence_key}) redis_graph.add_edge(edge) redis_graph.commit() if num_sents % 100 == 0: log(f"... {num_sents} sentences ") num_sents += 1 log("Flushing graph for sentence %s " % sentence_key) logger.info("Completed")
def test_CRUD_replication(self): # create a simple graph env = self.env source_con = env.getConnection() replica_con = env.getSlaveConnection() # enable write commands on slave, required as all RedisGraph # commands are registered as write commands replica_con.config_set("slave-read-only", "no") # perform CRUD operations # create a simple graph graph = Graph(GRAPH_ID, source_con) replica = Graph(GRAPH_ID, replica_con) s = Node(label='L', properties={'id': 0, 'name': 'abcd'}) t = Node(label='L', properties={'id': 1, 'name': 'efgh'}) e = Edge(s, 'R', t) graph.add_node(s) graph.add_node(t) graph.add_edge(e) graph.flush() # create index q = "CREATE INDEX ON :L(id)" graph.query(q) # create full-text index q = "CALL db.idx.fulltext.createNodeIndex('L', 'name')" graph.query(q) # add fields to existing index q = "CALL db.idx.fulltext.createNodeIndex('L', 'title', 'desc')" graph.query(q) # create full-text index with index config q = "CALL db.idx.fulltext.createNodeIndex({label: 'L1', language: 'german', stopwords: ['a', 'b'] }, 'title', 'desc')" graph.query(q) # update entity q = "MATCH (n:L {id:1}) SET n.id = 2" graph.query(q) # delete entity q = "MATCH (n:L {id:0}) DELETE n" graph.query(q) # give replica some time to catch up time.sleep(1) # make sure index is available on replica q = "MATCH (s:L {id:2}) RETURN s.name" plan = graph.execution_plan(q) replica_plan = replica.execution_plan(q) env.assertIn("Index Scan", plan) self.env.assertEquals(replica_plan, plan) # issue query on both source and replica # make sure results are the same result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result) # make sure node count on both primary and replica is the same q = "MATCH (n) RETURN count(n)" result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result) # make sure nodes are in sync q = "MATCH (n) RETURN n ORDER BY n" result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result) # make sure both primary and replica have the same set of indexes q = "CALL db.indexes()" result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result) # drop fulltext index q = "CALL db.idx.fulltext.drop('L')" graph.query(q) # give replica some time to catch up time.sleep(1) # make sure both primary and replica have the same set of indexes q = "CALL db.indexes()" result = graph.query(q).result_set replica_result = replica.query(q).result_set self.env.assertEquals(replica_result, result)