def create_nodo(self, **kwargs): d = {"created": self.created} for item in kwargs.items(): d.update({item[0]: item[1]}) print "$$$$$$$$$$Diccionario para creacion de nodo$$$$$$$" print d if d["element_type"] == "person": person = graph_db.create(node(d)) person[0].add_labels("User", "Person") m = "Node Created!" print m elif d["element_type"] == "individual": #individual = graph_db.create(node(**d)) individual = graph_db.create( node(element_type=d["element_type"], id=d["id"], chromosome=str(d["chromosome"]), views=d["views"])) individual[0].add_labels("Individuals", "Individual") m = "Node Created!" print m elif d["element_type"] == "Collection": collection = graph_db.create(node(d)) collection[0].add_labels("Collections", "Collection") m = "Node Created!" print m return m
def create_project_graph(): """Creates a project Graph and stashes it in Neo4j. Returns a tuple of (users, projects, relationships), where each item is a list of the created data. """ # Create some Users user_nodes = [node(name=t[0], username=t[1]) for t in random_users()] users = db.create(*user_nodes) for u in users: # ...and label them as such. u.add_labels("user") # Create some Projects. project_nodes = [node(name=s) for s in random_projects()] projects = db.create(*project_nodes) rels = [] for p in projects: # ...and label them as such. p.add_labels("project") # Set up some relationships. # 1. Give the project a single Owner rels.append(rel((p, "OWNED_BY", random.choice(users)))) # 2. Give the project a random number of contributors. for u in random.sample(users, random.randrange(3, 50)): rels.append(rel((u, "CONTRIBUTES_TO", p))) # Save the relationships rels = db.create(*rels) return (users, projects, rels)
def createNode(node_attributes, object_id, objects, elements, graph_db): """ Create in Neo4j the object node and its standalone element nodes Returns the object node reference in Neo4j """ index_field = objects[object_id][objects_config['index_field']] if index_field in node_attributes: object_node = graph_db.get_or_create_indexed_node( 'ID', 'index_field', node_attributes[index_field], node_attributes #{index_field: node_attributes[index_field]} ) #object_node.set_properties(node_attributes) else: object_node, = graph_db.create(node(node_attributes)) object_node.add_labels(objects[object_id][objects_config['label_field']]) # if this object has standalone elements for field_name, value in node_attributes.items(): if (object_id, field_name) in elements: element_attributes = elements[(object_id, field_name)] element_attributes[field_name] = value element_node, = graph_db.create(node(element_attributes)) # label the nodes as elements element_node.add_labels("Element") graph_db.create(rel(object_node, "has element", element_node)) # link the element node to a ses concept linkToSES(element_node, element_attributes, graph_db) return object_node
def create_db(usercol, refcol, start, end): ''' try to generate relationships using a different example from the fundamentals page ''' graph_db = neo4j.GraphDatabaseService("http://localhost:7474/db/data/") rowlist = [] #nodes first for i in range(11, 100): rowlist.append (node(user=usercol[i])) rowlist.append (node(ref = refcol[i])) #relationships second for i in range(11, 100): rowlist.append(rel(start[i], "RECOMMENDED", end[i])) incubate = graph_db.create(*rowlist) #asterisk expands the list & might work better? #gives an error Incomplete Read if you try to do the whole thing at once, but #looks like you can do this in pieces in order to get the whole thing (?) #not sure if this is really necessary, should try +/- the format=pretty part neo4j._add_header('X-Stream', 'true;format=pretty')
def create_nodo(self, **kwargs): d={"created":self.created} for item in kwargs.items(): d.update({item[0]:item[1]}) print "$$$$$$$$$$Diccionario para creacion de nodo$$$$$$$" print d if d["element_type"] == "person": person = graph_db.create(node(d)) person[0].add_labels("User", "Person") m = "Node Created!" print m elif d["element_type"] == "individual": #individual = graph_db.create(node(**d)) individual = graph_db.create(node(element_type=d["element_type"], id=d["id"], chromosome=str(d["chromosome"]), views=d["views"])) individual[0].add_labels("Individuals", "Individual") m = "Node Created!" print m elif d["element_type"] == "Collection": collection = graph_db.create(node(d)) collection[0].add_labels("Collections", "Collection") m = "Node Created!" print m return m
def create(cls, name, *emails): person_node, _ = graph_db.create(node(name=name), rel(cls._root, "PERSON", 0)) for email in emails: graph_db.create(node(email=email), rel(cls._root, "EMAIL", 0), rel(person_node, "EMAIL", 0)) return Person(person_node)
def create_operons(self): f = open(self.directory + 'Operons.txt', 'r') data = f.readlines() f.close() i = 0 for line in data: if line[0] == '#': continue chunks = line.split('\t') ### testing if chunks[0] == '' or chunks[1] == '' or chunks[2] == 0: continue if chunks[3] == '': chunks[3] = 'unknown' operon, term, term_rel, org_rel = self.connection.\ create(node({'name': chunks[0], 'start': int(chunks[1]), 'end': int(chunks[2]), 'strand': chunks[3], 'evidence': chunks[6], 'source': 'RegulonDB'}), node({'text': chunks[0]}), rel(0, 'HAS_NAME', 1), rel(0, 'PART_OF', self.ecoli_node)) operon.add_labels('Operon', 'BioEntity', 'DNA') i += 1 logging.info('%d operons were created!' % i)
def priklad(graph_db): # attach to a local graph database service graph_db.clear() # create two nodes and a relationship between them # (Alice)-[:KNOWS]->(Bob) alice, bob, ab = graph_db.create( node(name="Alice"), node(name="Bob"), rel(0, "KNOWS", 1) ) # build a Cypher query and related parameters query = ( "START a = node({A}) " "MATCH (a)-[:KNOWS]->(b) " "RETURN a, b" ) params = {"A": alice.id} # define a row handler def print_row(row): print("imam ", row) a, b = row print(a["name"] + " knows " + b["name"]) # execute the query x = cypher.execute(graph_db, query, params, row_handler=print_row) print(x)
def add_ind_to_col(request, username): global message if request.method == 'POST': if request.user.is_authenticated(): u1 = User.objects.get(username=username) u = User.objects.get(id=u1.id) json_data = json.loads(request.body) col = json_data['userCollection'] ind = json_data['id'] c = Collection.objects.get(id=col) collection_name = c.name #print collection_name itc = Collection_Individual(collection=c, individual_id=ind, added_from=c, from_user=u, date_added=datetime.datetime.now()) itc.save() #Agregar activity stream activity_stream = Activity_stream() usr = request.user.username activity_stream.activity("person", "save", "individual to collection", usr) #Agregar relacion entre individuo y coleccion en la red de grafos collection = GraphCollection() collection_result = collection.get_collection(collection_name) individual = Graph_Individual() individual_result = individual.get_node(ind) nodo1 = node(collection_result[0][0]) nodo2 = node(individual_result[0][0]) relation = Relations() relation.has(nodo1, nodo2) message = "Individual is now added to this collection!" else: message = "No username in evoart!" print "YYYYYYYYYYYYYYY" print col print ind print message data = ({'collection': col, 'individual': ind, 'message': message}) datar = json.dumps(data) return HttpResponse(datar, content_type='application/json')
def create_update_promoters(self): f = open(self.directory + 'All Promoters.txt', 'r') data = f.readlines() f.close() created, updated = [0]*2 for line in data: if line[0] == '#': continue regid, name, strand, tss, sigma, seq, evidence = line.split('\t') tss = int(tss) # skipping incomplete data if '' in [regid, name, strand, tss]: continue query = 'MATCH (ch:Chromosome {name: "%s"})<-[:PART_OF]-' \ '(p:Promoter {tss: %d})-[:PART_OF]->' \ '(o:Organism {name: "%s"}) ' \ 'RETURN p' % (self.chro_name, tss, self.ecoli_name) res = neo4j.CypherQuery(self.connection, query) res_nodes = res.execute() # creating promoter if not res_nodes: promoter, term, rel_org, rel_chr, rel_term = self.connection.create( node({'name': name, 'start': tss, 'end': tss, 'strand': strand, 'tss': tss, 'seq': seq, 'evidence': evidence, 'Reg_id': regid, 'source': 'RegulonDB'}), node({'text': name}), rel(0, 'PART_OF', self.ecoli_node), rel(0, 'PART_OF', self.chro_node), rel(0, 'HAS_NAME', 1)) promoter.add_labels('Promoter', 'Feature', 'BioEntity', 'DNA') term.add_labels('Term') created += 1 else: # one promoter with the tss for record in res_nodes.data: promoter = record.values[0] promoter.update_properties({'seq': seq, 'evidence': evidence, 'Reg_id': regid}) update_source_property(promoter) self.check_create_terms(promoter, name) updated += 1 # duplicates! if len(res_nodes.data) > 1: logging.warning("There are %d nodes for a promoter with " "tss in the %d position! It was skipped!" % (len(res_nodes.data), tss)) logging.info("%d promoters were updated!" % updated) logging.info("%d promoters were created!" % created)
def test_can_use_return_values_as_references(graph): batch = WriteBatch(graph) a = batch.create(node(name="Alice")) b = batch.create(node(name="Bob")) batch.create(rel(a, "KNOWS", b)) results = batch.submit() ab = results[2] assert isinstance(ab, Relationship) assert ab.start_node["name"] == "Alice" assert ab.end_node["name"] == "Bob"
def test_can_create_multiple_nodes(self): self.batch.create({"name": "Alice"}) self.batch.create(node({"name": "Bob"})) self.batch.create(node(name="Carol")) alice, bob, carol = self.batch.submit() assert isinstance(alice, Node) assert isinstance(bob, Node) assert isinstance(carol, Node) assert alice["name"] == "Alice" assert bob["name"] == "Bob" assert carol["name"] == "Carol"
def test_unique_constraint(): graph_db = get_clean_database() if graph_db is None: return borough, = graph_db.create(node(name="Taufkirchen")) borough.add_labels("borough") graph_db.schema.add_unique_constraint("borough", "name") constraints = graph_db.schema.get_unique_constraints("borough") assert "name" in constraints borough_2, = graph_db.create(node(name="Taufkirchen")) with pytest.raises(ValueError): borough_2.add_labels("borough")
def test_unique_constraint(): graph_db = get_clean_database() label_1 = uuid4().hex borough, = graph_db.create(node(name="Taufkirchen")) borough.add_labels(label_1) graph_db.schema.add_unique_constraint(label_1, "name") constraints = graph_db.schema.get_unique_constraints(label_1) assert "name" in constraints borough_2, = graph_db.create(node(name="Taufkirchen")) with pytest.raises(ValueError): borough_2.add_labels(label_1) graph_db.delete(borough, borough_2)
def test_create_function(self): self.batch.create(node(name="Alice")) self.batch.create(node(name="Bob")) self.batch.create(rel(0, "KNOWS", 1)) alice, bob, ab = self.batch.submit() assert isinstance(alice, Node) assert alice["name"] == "Alice" assert isinstance(bob, Node) assert bob["name"] == "Bob" assert isinstance(ab, Relationship) assert ab.start_node == alice assert ab.type == "KNOWS" assert ab.end_node == bob self.recycling = [ab, alice, bob]
def get_or_create_node(graph_db, KEY, VALUE, FULL_NODE={}): query = neo4j.CypherQuery(graph_db, "MATCH (a) WHERE a.%s = '%s' RETURN a" % (KEY,VALUE) ) results = query.execute() if len(results.data)==1: d, = results.data[0].values return d elif len(results.data)==0: if len(FULL_NODE)==0: a, = graph_db.create(node({KEY: VALUE}) ) if len(FULL_NODE) > 0: a, = graph_db.create(node(FULL_NODE) ) return a elif len(results.data) > 1: return False else: raise Exception
def get_or_create_node(graph_db, KEY, VALUE, FULL_NODE={}): query = neo4j.CypherQuery( graph_db, "MATCH (a) WHERE a.%s = '%s' RETURN a" % (KEY, VALUE)) results = query.execute() if len(results.data) == 1: d, = results.data[0].values return d elif len(results.data) == 0: if len(FULL_NODE) == 0: a, = graph_db.create(node({KEY: VALUE})) if len(FULL_NODE) > 0: a, = graph_db.create(node(FULL_NODE)) return a elif len(results.data) > 1: return False else: raise Exception
def createAnalysis(self, target_uuid=None, target_genid=None, data_link=None, results_uuid=None, description_uuid=None): if (target_uuid is None) == (target_genid is None): raise Exception( "Exactly one of the following parameters must be specified: target_uuid, target_genid " ) if target_genid is not None: target_node = self._getNodeByGenid(label=BIOENTITY, genid=target_genid) else: target_node = self._getNodeByUUID(label=RAW_DATA, uuid=target_uuid) wbatch = neo4j.WriteBatch(self.gdb) params = {} params['uuid'] = self._getNewUUID() if data_link is not None: params['data_link'] = data_link if results_uuid is not None: params['results_uuid'] = results_uuid if description_uuid is not None: params['description_uuid'] = description_uuid analysis_node = wbatch.create(node(params)) wbatch.add_labels(analysis_node, ANALYSIS) wbatch.create(rel(target_node, HAS_ANALYSIS, analysis_node)) wbatch.submit() return params['uuid']
def createNodeImageFlickr(self, idf, title, url, datetaken, tags): #query_string = "Create (f:Image {name: 'flickr"+idf+"', idf:"+idf+", title:'"+title+"', url:'"+url+"', datetaken:'"+datetaken+"', tags:'"+tags+"'})" #query = neo4j.CypherQuery(self.graph_db,query_string) #query.execute() ndo, = self.graph_db.create(node({"idf":idf,"title":title,"url":url,"datetaken":datetaken,"tags":tags})) ndo.add_labels("Image") return ndo
def titan_insert(): start = datetime.now() die_hard = graph_db.create( node(name="Bruce Willis"), node(name="John McClane"), node(name="Alan Rickman"), node(name="Hans Gruber"), node(name="Nakatomi Plaza"), rel(0, "PLAYS", 1), rel(2, "PLAYS", 3), rel(1, "VISITS", 4), rel(3, "STEALS_FROM", 4), rel(1, "KILLS", 3), ) stop = datetime.now() return stop - start
def test_can_cast_node(self): graph_db = neo4j.GraphDatabaseService() alice, = graph_db.create({"name": "Alice"}) casted = node(alice) assert isinstance(casted, neo4j.Node) assert not casted.is_abstract() assert casted["name"] == "Alice"
def test_can_cast_node(): graph_db = neo4j.GraphDatabaseService() alice, = graph_db.create({"name": "Alice"}) casted = node(alice) assert isinstance(casted, neo4j.Node) assert not casted.is_abstract assert casted["name"] == "Alice"
def restoreWG(wgList): labels = ['Social', 'WG'] batch = neo4j.WriteBatch(gdb) for wgName in wgList: wgNode = batch.create(node(identifier=wgName)) batch.add_labels(wgNode, *labels) results = batch.submit() batch.clear()
def ss(arr, label, parents=None): i = 0 for a in arr: n, = db().create(node(a)) n.add_labels(label) if parents: db().create(rel((n, "realate_to", parents[i]))) i+=1
def ss(arr, label, parents=None): i = 0 for a in arr: n, = db().create(node(a)) n.add_labels(label) if parents: db().create(rel((n, "realate_to", parents[i]))) i += 1
def check_create_terms(self, bioentity, name): if not isinstance(bioentity, gb.neo4j.Node): raise TypeError('The node argument must be an object of neo4j.Node class!') if bioentity['name'] != name: term, rel_pro = self.connection.create( node({'text': name}), rel(0, 'HAS_NAME', bioentity)) term.add_labels('Term')
def node(self, id, data): """Method which adds node to graph. :param id: identifier of node :type id: str :param data: name of node :type data: str """ self.nodes.append(node(name=data))
def createNode(nodeValue, sLabel): if nodeValue: graph_db = neo4j.GraphDatabaseService("http://localhost:7474/db/data/") batch = neo4j.WriteBatch(graph_db) alice=batch.create(node(name=nodeValue,label=sLabel)) results=batch.submit() return 1 else: return 0
def create_update_terminators(self): f = open(self.directory + 'Terminators.txt', 'r') data = f.readlines() f.close() created, updated, problem = [0]*3 for line in data: if line[0] == '#': continue regid, start, end, strand, seq, tu, type, operon, ref, evidence = line.split('\t') start, end = [int(start), int(end)] # skipping incomplete data if '' in [regid, strand, start, end] or 0 in [start, end]: continue query = 'MATCH (ch:Chromosome {name: "%s"})<-[:PART_OF]-' \ '(t:Terminator {start: %d, end: %d, strand: "%s"}) ' \ 'RETURN t' % (self.chro_name, start, end, strand) res = neo4j.CypherQuery(self.connection, query) res_nodes = res.execute() # creating terminator if not res_nodes: terminator, rel_chr = self.connection.create( node({'start': start, 'end': end, 'strand': strand, 'seq': seq, 'evidence': evidence, 'Reg_id': regid, 'source': 'RegulonDB'}), rel(0, 'PART_OF', self.chro_node)) terminator.add_labels('Terminator', 'Feature', 'DNA') created += 1 elif len(res_nodes.data) == 1: terminator = res_nodes.data[0].values[0] terminator.update_properties({'seq': seq, 'evidence': evidence, 'Reg_id': regid}) update_source_property(terminator) updated += 1 # duplicates! else: logging.warning("There are %d nodes for a terminator with " "location (%d, %d, %s)! It was skipped!" % (len(res_nodes.data), start, end, strand)) continue # creating relations (:TU)-[:CONTAINS]->(:Terminator) rel_tu = self.relation_with_tu(tu, terminator) problem = problem + rel_tu logging.info('%d terminators were updated!' % updated) logging.info('%d terminators were created!' % created) if problem > 0: logging.warning('There were problems with %d terminators.' % problem)
def test_adding_labels_to_abstract_nodes_raises_unbound_error(graph): if not graph.supports_node_labels: return alice = node(name="Alice") try: alice.add_labels("human", "female") except BindError: assert True else: assert False
def test_adding_labels_to_abstract_nodes_raises_unbound_error(graph): if not graph.supports_node_labels: return alice = node(name="Alice") try: alice.add_labels("human", "female") except UnboundError: assert True else: assert False
def test_cannot_add_labels_to_abstract_nodes(graph): if not graph.supports_node_labels: return alice = node(name="Alice") try: alice.add_labels("human", "female") except TypeError: assert True else: assert False
def test_cannot_add_labels_to_abstract_nodes(): graph_db = neo4j.GraphDatabaseService() if not graph_db.supports_node_labels: return alice = node(name="Alice") try: alice.add_labels("human", "female") except TypeError: assert True else: assert False
def setWg(wg): workingGroup['name'] = wg labels = ['Social', 'WG'] batch = neo4j.WriteBatch(gdb) wgNode = batch.create(node(identifier=workingGroup['name'])) batch.add_labels(wgNode, *labels) results = batch.submit() batch.clear() workingGroup['nodeid'] = gdb.get_indexed_node('node_auto_index', 'identifier', wg) #print workingGroup return
def create_bus_node(bus_data): bus_node = node( tag = bus_data["tag"], arrival_time = bus_data["arrival_time"], arrival_station = bus_data["arrival_station"], arrival_line = bus_data["arrival_line"], last_update = current_time_string(), ) bus_node, = graph_db.create(bus_node) bus_node.add_labels(LABEL_BUS) return bus_node
def add_users_batch( users ): """ Does a batch insert of user data into the db. """ batch = neo4j.WriteBatch( DB ) for user in users: u = { user_id: user[0], location: user[1], is_local: is_user_local( user[1] ) } n = batch.create( node( u ) ) batch.add_labels( n, 'FlickrUser' ) batch.submit()
def add_users_batch(users): """ Does a batch insert of user data into the db. """ batch = neo4j.WriteBatch(DB) for user in users: u = { user_id: user[0], location: user[1], is_local: is_user_local(user[1]) } n = batch.create(node(u)) batch.add_labels(n, 'FlickrUser') batch.submit()
def _neo_update_user(): global _neo_graph, userid2agent print "neo_update_user ..." all_users = pg_get_all_users() for (u_id, u_dn, u_username) in all_users: query = neo4j.CypherQuery(_neo_graph, "MATCH (ee:Agent) WHERE ee.username = {p_username} RETURN ee;") agent = query.execute_one(p_username = u_username) if agent is None: agent, = _neo_graph.create( \ node(userid = u_id, dn = u_dn, username = u_username)) agent.add_labels("Agent") userid2agent[u_id] = agent
def save_board_to_db(ladders, snakes): """ Creates a single Node, representing a board in database. :param ladders: :param snakes: :return: """ new_board = GRAPH_DB.create(node(snakes=to_hackerrank_paths(snakes), ladders=to_hackerrank_paths(ladders)))[0] new_board.add_labels("board") return new_board._id
def insert(self, personId, friendList): id_node = self._batch.get_or_create_in_index(neo4j.Node, self._nodes_index, "vk_id", personId, node(id=personId)) self._counter += 1 for friend_id in friendList: friend_node = self._batch.get_or_create_in_index( neo4j.Node, self._nodes_index, "vk_id", friend_id, node(id=friend_id)) self._batch.get_or_create_in_index( neo4j.Relationship, self._rels_index, "rel_id", str(min(personId, friend_id)) + "_" + str(max(personId, friend_id)), rel(id_node, "FRIEND", friend_node)) self._counter += 2 if self._counter >= self.MAXIMUM_BATCH_LENGTH: print "NSTDBKeeper: Batch is full, running transaction" self._batch.run() self._batch.clear() # Clears all batch requests self._counter = 0
def load_batch(rows, graph_db): print "%10d loading %i rows..." % (time(), len(rows)) batch = neo4j.WriteBatch(graph_db) # batch is linked to graph database for row in rows: schedule = row[0] schedule_node = batch.create(node(name=schedule)) batch.add_labels(schedule_node, "Schedule") #batch.get_or_create_indexed_node(SECTION_INDEX, 'name', owner, {'type': 'SCHEDULE', 'name': owner}) #schedule_node, = graph_db.create({'name': owner}) #schedule_node.add_labels("Schedule") batch.run()
def test_can_set_labels_on_node(): graph_db = neo4j.GraphDatabaseService() if not graph_db.supports_node_labels: return alice, = graph_db.create(node(name="Alice")) alice.add_labels("human", "female") labels = alice.get_labels() assert len(labels) == 2 assert labels == set(["human", "female"]) alice.set_labels("mystery", "badger") labels = alice.get_labels() assert labels == set(["mystery", "badger"]) assert labels != set(["human", "female"])
def create_node( graph=None, # type: Optional[py2neo.Graph] labels=None, # type: Optional[Iterable[str]] properties=None, # type: Optional[Mapping[str, Any]] ): # type: (...) -> Node """Cross-version function to create a node.""" properties = properties or {} labels = labels or [] node = py2neo.node(*labels, **properties) if graph is not None: node = foremost(graph.create(node)) return node
def get_or_add_node_to_batch(self, node_dict, label_index, batch, stub='', create=True): """Given a node dictionary, gets an existing node, or creates a new one and returns it. Batch node cannot be used for creating relationships in batch (use create=True). Assumes node has not been generated based on full attributes, it is a stub, so visited is set to False. :param dict node_dict: dictionary of properties :param str label_index: label for node and index used :param BatchObject batch: BatchObject being constructed :param str stub: 'True' if being created without full properties (i.e., as part of a relationship), otherwise 'False' :param bool create: if true the node is created immediately, otherwise it is added to the batch if the node already exists this is ignored :rtype Node, BatchObject, or None: a node or BatchObject representing a node """ key = 'permalink' value = self.get_permalink(node_dict) if value is None: return None # Attempt to get the node from Neo4j anode = self.get_indexed_node(label_index, key, value) # If the node was found then update the properties if anode: node_dict = self.cleanse_properties(node_dict) anode.update_properties(node_dict) # Node did not already exist, create it or add it to the batch if not anode: node_properties = self.cleanse_properties(node_dict) if create: node_properties.update({'visited': 'False', 'stub': stub}) anode = self.get_or_create_indexed_node( label_index, key, value, node_properties) anode.add_labels(label_index) else: anode = batch.get_or_create_in_index(neo4j.Node, label_index, key, node(node_properties)) batch.add_labels(anode, label_index) batch.set_properties(anode, {'visited': 'False', 'stub': stub}) return anode
def graph_clusters(clusters): graph_db = neo4j.GraphDatabaseService('http://localhost:7474/db/data/') nodes = [node(title=cluster.title(), pages=[child.title for child in cluster.children]) \ for cluster in clusters] relations = [] num_clusters = len(clusters) for i in range(0, num_clusters): for j in range(i + 1, num_clusters): if clusters[i].connects_to(clusters[j]): print('%s connects to %s' % (clusters[i].title(), clusters[j].title())) relations.append(rel(i, 'LINKS TO', j)) # if clusters[j].connects_to(clusters[i]): # relations.append(rel(j, 'LINKS TO', i)) graph_db.create(*(nodes + relations))
def test_can_remove_labels_from_node(graph): if not graph.supports_node_labels: return alice, = graph.create(node(name="Alice")) alice.add_labels("human", "female") labels = alice.get_labels() assert len(labels) == 2 assert labels == set(["human", "female"]) alice.remove_labels("human") labels = alice.get_labels() assert labels == set(["female"]) assert labels != set(["human", "female"]) alice.remove_labels("female") labels = alice.get_labels() assert labels == set()
def createRawData(self, target_genid=None, data_link=None): wbatch = neo4j.WriteBatch(self.gdb) params = {} params['uuid'] = self._getNewUUID() if data_link is not None: params['data_link'] = data_link raw_data_node = wbatch.create(node(params)) wbatch.add_labels(raw_data_node, RAW_DATA) if target_genid: target_node = self._getNodeByGenid(label=BIOENTITY, genid=target_genid) wbatch.create(rel(target_node, HAS_RAW_DATA, raw_data_node)) wbatch.submit() return params['uuid']
def test_can_remove_labels_from_node(): graph_db = neo4j.GraphDatabaseService() if not graph_db.supports_node_labels: return alice, = graph_db.create(node(name="Alice")) alice.add_labels("human", "female") labels = alice.get_labels() assert len(labels) == 2 assert labels == set(["human", "female"]) alice.remove_labels("human") labels = alice.get_labels() assert labels == set(["female"]) assert labels != set(["human", "female"]) alice.remove_labels("female") labels = alice.get_labels() assert labels == set()
def test_can_set_labels_on_node(graph): if not graph.supports_node_labels: return graph.clear() alice, = graph.create(node(name="Alice")) alice.add_labels("human", "female") labels = alice.get_labels() assert len(labels) == 2 assert labels == {"human", "female"} alice.set_labels("mystery", "badger") labels = alice.get_labels() assert labels == {"mystery", "badger"} assert labels != {"human", "female"} found = graph.find("badger") assert list(found) == [alice] found = graph.find("badger", "name", "Alice") assert list(found) == [alice]
def add_photos_batch(photos, idx_name): """ Does a batch insert of photo data into the db. """ batch = neo4j.WriteBatch(DB) for i, photo in enumerate(photos): db_photo = { 'lat': photo['latitude'], 'lon': photo['longitude'], 'title': photo['title'], 'time': photo['time'], 'user': photo['user'], 'photo_id': photo['photo_id'], 'origin': 'flickr' } n = batch.create(node(db_photo)) batch.add_labels(n, 'FlickrPhoto') batch.add_to_index(neo4j.Node, idx_name, 'k', 'v', n) batch.submit()
def user_node(id): global usr_node if usr_node: return usr_node else: l = [ neo4j.Node(path) for path in db().find( "User", property_key="uid", property_value=id) ] if len(l): usr_node = l[0] if len(l) > 1: for k in l[1:]: k.delete() else: usr_node, = db().create(node({"uid": id})) usr_node.add_labels("User") return usr_node
def load_nodes_from_file(self, filename): batch = neo4j.WriteBatch(graph_db) num_nodes = 0 field_names = self.generate_node_indeces(filename) with open(filename, mode="r") as infile: reader = csv.reader(infile) for row in reader: new = {} if row[1] !="name": for i in range(0, len(row)): new[field_names[i]] = row[i].decode('utf-8').strip() new_node = batch.create(node(new)) index = graph_db.get_index(neo4j.Node, new['type']) batch.add_indexed_node(index, "name", new['name'].strip(), new_node) batch.add_labels(new_node, new['type']) num_nodes +=1 info("Loaded: " + str(num_nodes) + " nodes.") batch.submit()
def create_node( graph=None, # type: Optional[py2neo.Graph] labels=None, # type: Optional[Iterable[str]] properties=None, # type: Optional[Mapping[str, Any]] ): # type: (...) -> Node """Cross-version function to create a node.""" properties = properties or {} if labels and graph is None: raise TypeError('Parameter "graph" is required for py2neo v1 with' ' labels') node = py2neo.node(properties) if graph is not None: node = foremost(graph.create(node)) if labels: node.add_labels(*labels) return node