def test_array_functions(self): redis_graph = Graph('social', self.r) query = """CREATE (p:person{name:'a',age:32, array:[0,1,2]})""" redis_graph.query(query) query = """WITH [0,1,2] as x return x""" result = redis_graph.query(query) self.assertEqual([0, 1, 2], result.result_set[0][0]) query = """MATCH(n) return collect(n)""" result = redis_graph.query(query) a = Node(node_id=0, label='person', properties={ 'name': 'a', 'age': 32, 'array': [0, 1, 2] }) self.assertEqual([a], result.result_set[0][0]) # All done, remove graph. redis_graph.delete()
def __init__(self): self.env = Env() global graph global redis_con redis_con = self.env.getConnection() graph = Graph("G", redis_con) self.populate_graph()
def test02_private_identifiers(self): """Validate that private identifiers are not added to the graph.""" graphname = "tmpgraph1" # Write temporary files with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(["_identifier", "nodename"]) out.writerow([0, "a"]) out.writerow([5, "b"]) out.writerow([3, "c"]) with open('/tmp/relations.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(["src", "dest"]) out.writerow([0, 3]) out.writerow([5, 3]) runner = CliRunner() res = runner.invoke(bulk_insert, [ '--nodes', '/tmp/nodes.tmp', '--relations', '/tmp/relations.tmp', graphname ], catch_exceptions=False) # The script should report 3 node creations and 2 edge creations self.assertEqual(res.exit_code, 0) self.assertIn('3 nodes created', res.output) self.assertIn('2 relations created', res.output) tmp_graph = Graph(graphname, self.redis_con) # The field "_identifier" should not be a property in the graph query_result = tmp_graph.query('MATCH (a) RETURN a') for propname in query_result.header: self.assertNotIn('_identifier', propname)
def test04_repeated_edges(self): graphname = "repeated_edges" g = Graph(graphname, redis_con) src = Node(label='p', properties={'name': 'src'}) dest = Node(label='p', properties={'name': 'dest'}) edge1 = Edge(src, 'e', dest, properties={'val': 1}) edge2 = Edge(src, 'e', dest, properties={'val': 2}) g.add_node(src) g.add_node(dest) g.add_edge(edge1) g.add_edge(edge2) g.commit() # Verify the new edge q = """MATCH (a)-[e]->(b) RETURN e.val, a.name, b.name ORDER BY e.val""" actual_result = g.query(q) expected_result = [[ edge1.properties['val'], src.properties['name'], dest.properties['name'] ], [ edge2.properties['val'], src.properties['name'], dest.properties['name'] ]] self.env.assertEquals(actual_result.result_set, expected_result) # Save RDB & Load from RDB redis_con.execute_command("DEBUG", "RELOAD") # Verify that the latest edge was properly saved and loaded actual_result = g.query(q) self.env.assertEquals(actual_result.result_set, expected_result)
def binary_tree_graph1(): global redis_graph redis_con = _brand_new_redis() redis_graph = Graph("G1", redis_con) redis_graph.query("CREATE(a: A {name: 'a'}), \ (b1: X {name: 'b1'}), \ (b2: X {name: 'b2'}), \ (b3: X {name: 'b3'}), \ (b4: X {name: 'b4'}), \ (c11: X {name: 'c11'}), \ (c12: X {name: 'c12'}), \ (c21: X {name: 'c21'}), \ (c22: X {name: 'c22'}), \ (c31: X {name: 'c31'}), \ (c32: X {name: 'c32'}), \ (c41: X {name: 'c41'}), \ (c42: X {name: 'c42'}) \ CREATE(a)-[:KNOWS] -> (b1), \ (a)-[:KNOWS] -> (b2), \ (a)-[:FOLLOWS] -> (b3), \ (a)-[:FOLLOWS] -> (b4) \ CREATE(b1)-[:FRIEND] -> (c11),\ (b1)-[:FRIEND] -> (c12), \ (b2)-[:FRIEND] -> (c21), \ (b2)-[:FRIEND] -> (c22), \ (b3)-[:FRIEND] -> (c31), \ (b3)-[:FRIEND] -> (c32), \ (b4)-[:FRIEND] -> (c41), \ (b4)-[:FRIEND] -> (c42) \ CREATE(b1)-[:FRIEND] -> (b2), \ (b2)-[:FRIEND] -> (b3), \ (b3)-[:FRIEND] -> (b4), \ (b4)-[:FRIEND] -> (b1) \ ")
def test11_schema_ignore_columns(self): """Validate that columns with the type IGNORE are not inserted.""" graphname = "ignore_graph" with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(['str_col:STRING', 'ignore_col:IGNORE']) out.writerow(['str1', 0]) out.writerow(['str2', 1]) runner = CliRunner() res = runner.invoke( bulk_insert, ['--nodes', '/tmp/nodes.tmp', '--enforce-schema', graphname], catch_exceptions=False) self.assertEqual(res.exit_code, 0) self.assertIn('2 nodes created', res.output) graph = Graph(graphname, self.redis_con) query_result = graph.query('MATCH (a) RETURN a ORDER BY a.str_col') # The nodes should only have the 'str_col' property node_1 = {'str_col': 'str1'} node_2 = {'str_col': 'str2'} self.assertEqual(query_result.result_set[0][0].properties, node_1) self.assertEqual(query_result.result_set[1][0].properties, node_2)
def compare_uncached_to_cached_query_plans(self, query, params=None): global redis_con plan_graph = Graph('Cache_Test_plans', redis_con) uncached_plan = plan_graph.execution_plan(query, params) cached_plan = plan_graph.execution_plan(query, params) self.env.assertEqual(uncached_plan, cached_plan) plan_graph.delete()
def __init__(self): global graph self.env = Env(decodeResponses=True) graph = Graph('update', self.env.getConnection()) # create a single node with attribute 'v' graph.query("CREATE ({v:1})")
def __init__(self): super(testOptimizationsPlan, self).__init__() global graph global redis_con redis_con = self.env.getConnection() graph = Graph("g", redis_con) self.populate_graph()
def _parse_connection_string(self, cs: str) -> Graph: parsed = pr.urlparse(cs) if parsed.path: db = parsed.path[1:] if not db: db = 0 else: db = int(db) else: db = 0 if parsed.query: qp = parsed.query.split("&", maxsplit=1)[0] p_name, p_value = qp.split("=") if p_name != "graph": db_graph = "sqerzo" else: db_graph = p_value else: db_graph = "sqerzo" port = parsed.port if port is None: port = 6379 r = redis.Redis(host=parsed.hostname, port=port, username=parsed.username, password=parsed.password, db=db) return Graph(db_graph, r)
def __init__(self): super(testFunctionCallsFlow, self).__init__() global graph global redis_con redis_con = self.env.getConnection() graph = Graph("G", redis_con) self.populate_graph()
def test09_utf8(self): graphname = "tmpgraph5" # Write temporary files with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(['id', 'utf8_str_ß']) out.writerow([0, 'Straße']) out.writerow([1, 'auslösen']) out.writerow([2, 'zerstören']) out.writerow([3, 'français']) out.writerow([4, 'américaine']) out.writerow([5, 'épais']) out.writerow([6, '中國的']) out.writerow([7, '英語']) out.writerow([8, '美國人']) runner = CliRunner() res = runner.invoke( bulk_insert, ['--port', port, '--nodes', '/tmp/nodes.tmp', graphname]) assert res.exit_code == 0 assert '9 nodes created' in res.output graph = Graph(graphname, redis_con) query_result = graph.query('MATCH (a) RETURN a ORDER BY a.id') expected_strs = [ 'a.utf8_str_ß', 'Straße', 'auslösen', 'zerstören', 'français', 'américaine', 'épais', '中國的', '英語', '美國人' ] for i, j in zip(query_result.result_set, expected_strs): self.assertEqual(repr(i[1]), repr(j))
def test06_batched_build(self): # Create demo graph wth one query per input file graphname = "batched_graph" runner = CliRunner() csv_path = os.path.dirname( os.path.abspath(__file__)) + '/../../demo/bulk_insert/resources/' res = runner.invoke(bulk_insert, [ '--port', port, '--nodes', csv_path + 'Person.csv', '--nodes', csv_path + 'Country.csv', '--relations', csv_path + 'KNOWS.csv', '--relations', csv_path + 'VISITED.csv', '--max-token-count', 1, graphname ]) assert res.exit_code == 0 # The script should report statistics multiple times assert res.output.count('nodes created') > 1 new_graph = Graph(graphname, redis_con) # Newly-created graph should be identical to graph created in single query original_result = redis_graph.query( 'MATCH (p:Person) RETURN p, ID(p) ORDER BY p.name') new_result = new_graph.query( 'MATCH (p:Person) RETURN p, ID(p) ORDER BY p.name') assert original_result.result_set == new_result.result_set original_result = redis_graph.query( 'MATCH (a)-[e:KNOWS]->(b) RETURN a.name, e, b.name ORDER BY e.relation, a.name' ) new_result = new_graph.query( 'MATCH (a)-[e:KNOWS]->(b) RETURN a.name, e, b.name ORDER BY e.relation, a.name' ) assert original_result.result_set == new_result.result_set
def test07_transposed_multi_hop(self): redis_con = self.env.getConnection() g = Graph("tran_multi_hop", redis_con) # (a)-[R]->(b)-[R]->(c)<-[R]-(d)<-[R]-(e) a = Node(properties={"val": 'a'}) b = Node(properties={"val": 'b'}) c = Node(properties={"val": 'c'}) d = Node(properties={"val": 'd'}) e = Node(properties={"val": 'e'}) g.add_node(a) g.add_node(b) g.add_node(c) g.add_node(d) g.add_node(e) ab = Edge(a, "R", b) bc = Edge(b, "R", c) ed = Edge(e, "R", d) dc = Edge(d, "R", c) g.add_edge(ab) g.add_edge(bc) g.add_edge(ed) g.add_edge(dc) g.flush() q = """MATCH (a)-[*2]->(b)<-[*2]-(c) RETURN a.val, b.val, c.val ORDER BY a.val, b.val, c.val""" actual_result = g.query(q) expected_result = [['a', 'c', 'a'], ['a', 'c', 'e'], ['e', 'c', 'a'], ['e', 'c', 'e']] self.env.assertEquals(actual_result.result_set, expected_result)
def test08_nonstandard_separators(self): """Validate use of non-comma delimiters in input files.""" graphname = "tmpgraph6" inputs = [['prop_a', 'prop_b', 'prop_c'], ['val1', 5, True], [10.5, 'a', False]] # Write temporary files with open('/tmp/nodes.tmp', mode='w') as csv_file: # Open writer with pipe separator. out = csv.writer( csv_file, delimiter='|', ) for row in inputs: out.writerow(row) runner = CliRunner() res = runner.invoke( bulk_insert, ['--nodes', '/tmp/nodes.tmp', '--separator', '|', graphname], catch_exceptions=False) self.assertEqual(res.exit_code, 0) self.assertIn('2 nodes created', res.output) graph = Graph(graphname, self.redis_con) query_result = graph.query( 'MATCH (a) RETURN a.prop_a, a.prop_b, a.prop_c ORDER BY a.prop_a, a.prop_b, a.prop_c' ) expected_result = [['val1', 5, True], [10.5, 'a', False]] # The graph should have the correct types for all properties self.assertEqual(query_result.result_set, expected_result)
def __init__(self): self.env = Env(decodeResponses=True) global redis_graph redis_con = self.env.getConnection() redis_graph = Graph(GRAPH_ID, redis_con) self.populate_graph() self.build_indices()
def test09_schema(self): """Validate that the enforce-schema argument is respected""" graphname = "tmpgraph7" with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(['str_col:STRING', 'num_col:INT']) out.writerow([0, 0]) out.writerow([1, 1]) runner = CliRunner() res = runner.invoke( bulk_insert, ['--nodes', '/tmp/nodes.tmp', '--enforce-schema', graphname], catch_exceptions=False) self.assertEqual(res.exit_code, 0) self.assertIn('2 nodes created', res.output) graph = Graph(graphname, self.redis_con) query_result = graph.query( 'MATCH (a) RETURN a.str_col, a.num_col ORDER BY a.num_col') expected_result = [['0', 0], ['1', 1]] # The graph should have the correct types for all properties self.assertEqual(query_result.result_set, expected_result)
def __init__(self, graph_name='COM'): """ :param graph_name: 所创建的图表名称 """ self._redis_content = RedisContent().get_content self._redis_graph = Graph(graph_name, self._redis_content) self.index = 0
def test12_no_null_values(self): """Validate that NULL inputs are not inserted.""" graphname = "null_graph" with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(['str_col', 'mixed_col']) out.writerow(['str1', True]) out.writerow(['str2', None]) runner = CliRunner() res = runner.invoke(bulk_insert, ['--nodes', '/tmp/nodes.tmp', graphname], catch_exceptions=False) self.assertEqual(res.exit_code, 0) self.assertIn('2 nodes created', res.output) graph = Graph(graphname, self.redis_con) query_result = graph.query('MATCH (a) RETURN a ORDER BY a.str_col') # Only the first node should only have the 'mixed_col' property node_1 = {'str_col': 'str1', 'mixed_col': True} node_2 = {'str_col': 'str2'} self.assertEqual(query_result.result_set[0][0].properties, node_1) self.assertEqual(query_result.result_set[1][0].properties, node_2)
def __init__(self): self.env = Env() global redis_con global redis_graph redis_con = self.env.getConnection() redis_graph = Graph(GRAPH_ID, redis_con)
def setUpClass(cls): global redis_graph cls.r = redis() cls.r.start() redis_con = cls.r.client() redis_graph = Graph(social_utils.graph_name, redis_con) social_utils.populate_graph(redis_con, redis_graph)
def test15_array_properties_schema_enforced(self): """Validate that array properties are correctly inserted with an enforced schema.""" graphname = "arr_graph_with_schema" with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file, delimiter='|') out.writerow(['str_col:STRING', 'arr_col:ARRAY']) out.writerow(['str1', """[1, 0.2, 'nested_str', False]"""]) out.writerow( ['str2', """['prop1', ['nested_1', 'nested_2'], 5]"""]) runner = CliRunner() res = runner.invoke(bulk_insert, [ '--nodes', '/tmp/nodes.tmp', '--separator', '|', '--enforce-schema', graphname ], catch_exceptions=False) self.assertEqual(res.exit_code, 0) self.assertIn('2 nodes created', res.output) graph = Graph(graphname, self.redis_con) query_result = graph.query('MATCH (a) RETURN a ORDER BY a.str_col') node_1 = {'str_col': 'str1', 'arr_col': [1, 0.2, 'nested_str', False]} node_2 = { 'str_col': 'str2', 'arr_col': ['prop1', ['nested_1', 'nested_2'], 5] } self.assertEqual(query_result.result_set[0][0].properties, node_1) self.assertEqual(query_result.result_set[1][0].properties, node_2)
def __init__(self): self.env = Env(decodeResponses=True) global redis_con global redis_graph redis_con = self.env.getConnection() redis_graph = Graph("G", redis_con) self.populate_graph()
def test07_utf8(self): """Verify that numeric, boolean, and null types are properly handled""" graphname = "tmpgraph5" # Write temporary files with open('/tmp/nodes.tmp', mode='w') as csv_file: out = csv.writer(csv_file) out.writerow(['id', 'utf8_str_ß']) out.writerow([0, 'Straße']) out.writerow([1, 'auslösen']) out.writerow([2, 'zerstören']) out.writerow([3, 'français']) out.writerow([4, 'américaine']) out.writerow([5, 'épais']) out.writerow([6, '中國的']) out.writerow([7, '英語']) out.writerow([8, '美國人']) runner = CliRunner() res = runner.invoke(bulk_insert, ['--nodes', '/tmp/nodes.tmp', graphname], catch_exceptions=False) assert res.exit_code == 0 assert '9 nodes created' in res.output graph = Graph(graphname, self.redis_con) # The non-ASCII property string must be escaped backticks to parse correctly query_result = graph.query( """MATCH (a) RETURN a.`utf8_str_ß` ORDER BY a.id""") expected_strs = [['Straße'], ['auslösen'], ['zerstören'], ['français'], ['américaine'], ['épais'], ['中國的'], ['英語'], ['美國人']] for i, j in zip(query_result.result_set, expected_strs): self.assertEqual(repr(i), repr(j))
def __init__(self): super(testVariableLengthTraversals, self).__init__() global redis_con global redis_graph redis_con = self.env.getConnection() redis_graph = Graph("G", redis_con) self.populate_graph()
def populate_graph(self, graph_name): # quick return if graph already exists if redis_con.exists(graph_name): return redis_graph people = ["Roi", "Alon", "Ailon", "Boaz", "Tal", "Omri", "Ori"] visits = [("Roi", "USA"), ("Alon", "Israel"), ("Ailon", "Japan"), ("Boaz", "United Kingdom")] countries = ["Israel", "USA", "Japan", "United Kingdom"] redis_graph = Graph(graph_name, redis_con) personNodes = {} countryNodes = {} # create nodes for p in people: person = Node(label="person", properties={ "name": p, "height": random.randint(160, 200) }) redis_graph.add_node(person) personNodes[p] = person for p in countries: country = Node(label="country", properties={ "name": p, "population": random.randint(100, 400) }) redis_graph.add_node(country) countryNodes[p] = country # create edges for v in visits: person = v[0] country = v[1] edge = Edge(personNodes[person], 'visit', countryNodes[country], properties={'purpose': 'pleasure'}) redis_graph.add_edge(edge) redis_graph.commit() # delete nodes, to introduce deleted item within our datablock query = """MATCH (n:person) WHERE n.name = 'Roi' or n.name = 'Ailon' DELETE n""" redis_graph.query(query) query = """MATCH (n:country) WHERE n.name = 'USA' DELETE n""" redis_graph.query(query) # create indices actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX ON :person(name, height)") actual_result = redis_con.execute_command( "GRAPH.QUERY", graph_name, "CREATE INDEX ON :country(name, population)") return redis_graph
def setUpClass(cls): print "ImdbFlowTest" global redis_graph cls.r = redis() cls.r.start() redis_con = cls.r.client() redis_graph = Graph(imdb_utils.graph_name, redis_con) imdb_utils.populate_graph(redis_con, redis_graph)
def setUpClass(cls): print "GraphPersistency" global redis_graph global dense_graph global redis_con cls.r = redis() cls.r.start() redis_con = cls.r.client() redis_graph = Graph(GRAPH_NAME, redis_con) dense_graph = Graph(DENSE_GRAPH_NAME, redis_con) # redis_con = redis.Redis() # redis_graph = Graph(GRAPH_NAME, redis_con) # dense_graph = Graph(DENSE_GRAPH_NAME, redis_con) cls.populate_graph() cls.populate_dense_graph()
def __init__(self): super(testConcurrentQueryFlow, self).__init__() global graphs graphs = [] for i in range(0, CLIENT_COUNT): redis_con = self.env.getConnection() graphs.append(Graph(GRAPH_ID, redis_con)) self.populate_graph()
def __init__(self): self.env = Env() global graphs graphs = [] for i in range(0, CLIENT_COUNT): redis_con = self.env.getConnection() graphs.append(Graph(GRAPH_ID, redis_con)) self.populate_graph()