def insertarData(self, noticia, data): matcher = NodeMatcher(self.graph) noticiarel = matcher.match("Noticia", title=noticia).first() dataInDB = matcher.match("Fecha", data="{}-{}-{}".format( data.year, data.month, data.day)).first() dataRel = Node("Fecha", data="{}-{}-{}".format(data.year, data.month, data.day)) tx = self.graph.begin() if dataInDB is None: tx.create(dataRel) publicadaEl = Relationship(dataRel, "publicadaEl", noticiarel) tx.create(publicadaEl) mesos = [ 'Enero', 'Febrero', 'Marzo', 'Abril', 'Mayo', 'Junio', 'Julio', 'Agosto', 'Setiembre', 'Octubre', 'Noviembre', 'Diciembre' ] mesInDB = matcher.match("Anio", data="{} {}".format( mesos[data.month - 1], data.year)).first() mesRel = Node("MesAnio", data="{} {}".format(mesos[data.month - 1], data.year)) if mesInDB is None: contiene = Relationship(mesRel, "contiene", dataRel) tx.create(contiene) else: contiene = Relationship.type("contiene") tx.merge(contiene(mesInDB, dataRel)) else: publicadaEl = Relationship.type("publicadaEl") tx.merge(publicadaEl(dataInDB, noticiarel)) tx.commit()
def insertarNoticia(self, noticia, retweets, propietari, plataforma): a = Node("Noticia", title=noticia, retweets=retweets) tx = self.graph.begin() tx.create(a) matcher = NodeMatcher(self.graph) redactorInDB = matcher.match("Redactor", redactor=propietari).first() redactorRel = Node("Redactor", redactor=propietari) plataformaInDB = matcher.match("Plataforma", plataforma=plataforma).first() plataformaRel = Node("Plataforma", plataforma=plataforma) if redactorInDB is None: redactadaPor = Relationship(a, "redactadaPor", redactorRel) tx.create(redactadaPor) else: redactadaPor = Relationship.type("redactadaPor") tx.merge(redactadaPor(a, redactorInDB)) if plataformaInDB is None: disponibleEn = Relationship(a, "disponibleEn", plataformaRel) tx.create(disponibleEn) else: disponibleEn = Relationship.type("disponibleEn") tx.merge(disponibleEn(a, plataformaInDB)) tx.commit()
def file1(self): csv_list1 = list(self.csv_file1) for i in range(1, len(csv_list1)): # len(csv_list) if len(csv_list1[i]) != 0: print(csv_list1[i]) # 创建题目实体节点 Title = Node('title', Title_name=csv_list1[i][0]) # 在数据库中创建节点 self.graph.merge(Title, 'title', 'Title_name') # 创建作者实体节点 Author = Node('author', Author_name=csv_list1[i][1]) # 在数据库中创建节点 self.graph.merge(Author, 'author', "Author_name") # 创建学校实体节点 Institute = Node('institute', Institute_name=csv_list1[i][2]) # 在数据库中创建节点 self.graph.merge(Institute, 'institute', "Institute_name") # 创建实体关系类型节点 author_of = Relationship.type("author_of") institute_of = Relationship.type("institute_of") # 在图形数据库中创建实体和关系 self.graph.merge(author_of(Title, Author), "title", "Title_name") self.graph.merge(institute_of(Author, Institute), "title", "Title_name")
def insert_relationship(g, people_data): nodes = NodeMatcher(g) tx = g.begin() RELATIONSHIPS = { "actor": Relationship.type("ACT_IN"), "actress": Relationship.type("ACT_IN"), "director": Relationship.type("DIRECT"), "producer": Relationship.type("PRODUCE"), } visited = set() for i in range(len(people_data)): person_id, movie_id = people_data[i]["nconst"], people_data[i][ "tconst"] if (person_id, movie_id) not in visited: print(f"{i} Insert Relation: {person_id},{movie_id}") visited.add((person_id, movie_id)) person = nodes.match("Person", id=person_id).first() movie = nodes.match("Movie", id=movie_id).first() this_relation = people_data[i]["category"] if person and movie: tx.create(RELATIONSHIPS[this_relation](person, movie)) else: print(f"{i} Skip: {person_id},{movie_id}") if i % 1000 == 0: print("Committing ...") tx.commit() tx = g.begin() tx.commit()
def link_project_nodes(relationships): gc = util.GraphConnector() graph = gc.connector for relationship in relationships: project = graph.nodes.match("Project", project_id=relationship['ID']).first() if relationship['Platform'] is not None: is_in_platform = Relationship.type("IS_IN_PLATFORM") platform = graph.nodes.match( "Platform", name=relationship['Platform']).first() graph.create(is_in_platform(project, platform)) if relationship['Language'] is not None: is_in_language = Relationship.type("WRITTEN_IN") language = graph.nodes.match( "Language", name=relationship['Language']).first() graph.create(is_in_language(project, language)) if relationship['Status'] is not None: in_status = Relationship.type("STATUS") status = graph.nodes.match("Status", name=relationship['Status']).first() graph.create(in_status(project, status)) if relationship['Licenses'] is not None: has_license = Relationship.type("HAS_LICENSE") license = graph.nodes.match("License", name=relationship['Licenses']).first() graph.create(has_license(project, license))
def new_arp_neighbor_rel(self, Router): arp_neighbor = self.snmp2json[self.NetElement][u'L3Domain'][u'ARP_Neighbors'] for i in arp_neighbor: int_id = i if ('Int_Id:' + int_id) in self.interfaces: int_bw = self.interfaces['Int_Id:' + int_id]['ifSpeed'] int_name = self.interfaces['Int_Id:' + int_id]['ifDescr'] for j in arp_neighbor[i]: if not '127.0.0.' in j: ip_addr = j[0] mac_addr = j[1] neigh_info = j[2] ip = Node('Ip_address', status = 'Active', address = ip_addr) ip.__primarylabel__ = "Ip_address" ip.__primarykey__ = "address" if not self.find_ip_addr(ip_addr): graph.create(ip) query = ''' MATCH (i:Interface)-[:BELONGS_TO]->(r:Router), (ip:Ip_address) WHERE (r.hostname = {hostname} and i.int_id = {int_id}) and ip.address = {ip_addr} CREATE UNIQUE (i)-[:SEEN_BY_ARP]->(ip) ''' rel_arp_int_ip = graph.run(query, hostname = self.hostname, int_id = int_id, ip_addr = ip_addr) if (('0:50:56' or '0:c:29' or '0:25:b3') in mac_addr): server = Node('Server', MAC = mac_addr, property = 'VmWare_VM') server.__primarylabel__ = "Server" server.__primarykey__ = "MAC" CONFIGURED_IN = Relationship.type('CONFIGURED_IN') rel_ip_server = graph.merge(CONFIGURED_IN(ip,server)) elif ('0:9:f:' in mac_addr): firewall = Node('Firewall', MAC = mac_addr, property = 'FortiNet') firewall.__primarylabel__ = 'Firewall' firewall.__primarykey__ = 'MAC' CONFIGURED_IN = Relationship.type('CONFIGURED_IN') rel_ip_server = graph.merge(CONFIGURED_IN(ip,firewall)) self.arp_relationship(Router, firewall, int_name, int_bw) elif (('5c:f3:fc' or '0:15:17') in mac_addr): if re.search('5c:f3:fc', mac_addr): server = Node('Server', MAC = mac_addr, property = 'IBM_SERVER') else: server = Node('Server', MAC = mac_addr, property = 'INTEL_SERVER') server.__primarylabel__ = "Server" server.__primarykey__ = "MAC" CONFIGURED_IN = Relationship.type('CONFIGURED_IN') rel_ip_server = graph.merge(CONFIGURED_IN(ip,server)) elif 'message' not in neigh_info: hostname = neigh_info[u'hostname'] dev_info = neigh_info[u'description'] print (hostname, self.hostname, self.NetElement) arp_router = Node('Router', hostname = hostname, devinfo = dev_info) arp_router.__primarylabel__ = "Router" arp_router.__primarykey__ = "hostname" self.arp_relationship(Router, arp_router, int_name, int_bw, hostname)
def create_relationships(self, acquisitions, competitors): if self.parent_company_node is not None: merged_by = Relationship.type('MergedBy') for company, value in acquisitions.items(): node = self.create_company_node(company, value) # self.graph.create(node) self.graph.merge(merged_by(node, self.parent_company_node), 'company', 'title') compete_with = Relationship.type('CompeteWith') for company, value in competitors.items(): node = self.create_company_node(company, value) # self.graph.create(node) self.graph.merge(compete_with(node, self.parent_company_node), 'company', 'title')
def director_details(searchType, id): movies = [] matcher = NodeMatcher(graph) dir = matcher.match("person").where("_.id=" + id).first() if (not dir["load"]): movies = api_search_by_director(id) matcher = NodeMatcher(graph) for p in movies: movie = matcher.match("movie").where("_.id=" + str(p["id"])).first() if (movie == None): mov = tmdb.Movies(p["id"]) response = mov.info() movie = Node("movie", original_title=response["original_title"], id=p["id"], release_date=response["release_date"], poster_path=p["poster_path"], vote_count=p["vote_count"], vote_average=p["vote_average"], load=False) graph.create(movie) for g in p["genre_ids"]: genre = matcher.match("genre").where("_.id=" + str(g)).first() belongs_to = Relationship.type("belongs_to") graph.create(belongs_to(movie, genre)) directs = Relationship.type("directs") graph.create(directs(dir, movie)) dir["load"] = True graph.push(dir) else: matcher = NodeMatcher(graph) query = "MATCH (p:person)-[a:directs]->(m:movie) WHERE p.id=%s RETURN m" % dir[ "id"] for a in graph.run(query).data(): movies.append(a["m"]) return render_template('ActorDetail.html', actor=dir, peliculas=movies, len=len(movies), searchType=searchType)
def create_node_relationship(node_a, node_b, leg, graph_object): a = Node("Airport", lat=node_a["latitude"], lon=node_a["longitude"], name=node_a["name"], location=node_a["location"], state=node_a["state"], country=node_a["country"], start_date=node_a["start-date"], code=node_a["code"]) a.__primarylabel__ = "name" a.__primarykey__ = "name" b = Node("Airport", lat=node_b["latitude"], lon=node_b["longitude"], name=node_b["name"], location=node_b["location"], state=node_b["state"], country=node_b["country"], start_date=node_b["start-date"], code=node_b["code"]) b.__primarylabel__ = "name" b.__primarykey__ = "name" connection = Relationship.type("CONNECTION") graph_object.merge(connection(a, b, airline=leg))
def api_search_movie(movie_title): search = tmdb.Search() response = search.movie(query=movie_title) for s in search.results: if (s["poster_path"] == "None" or s["poster_path"] is None): s["poster_path"] = "https://www.theprintworks.com/wp-content/themes/psBella/assets/img/film-poster-placeholder.png" else: s["poster_path"] = "https://image.tmdb.org/t/p/w220_and_h330_face" + s[ "poster_path"] matcher = NodeMatcher(graph) for p in response["results"]: movie = matcher.match("movie").where("_.id=" + str(p["id"])).first() if (movie is None): movie = Node("movie", original_title=p["title"], id=p["id"], release_date=p["release_date"], poster_path=p["poster_path"], vote_count=p["vote_count"], vote_average=p["vote_average"], load=False) graph.create(movie) for g in p["genre_ids"]: genre = matcher.match("genre").where("_.id=" + str(g)).first() belongs_to = Relationship.type("belongs_to") graph.create(belongs_to(movie, genre)) return response
def arp_relationship(self, Router, arp_neighbor, int_name, int_bw, hostname = 'hostname'): ARP_NEIGHBORS = Relationship.type("ARP_NEIGHBORS") try: rel_arp_router = graph.merge(ARP_NEIGHBORS(Router,arp_neighbor, int_name = int_name,int_bw = int_bw)) except IndexError: print ("Error in ", hostname) pass
def create_naics_tree(self): df = pd.read_csv('NAICS-Codes.txt', sep='\t') root_node = Node('Root', title='Root') subClassOf = Relationship.type("subClassOf") for index, row in df.iterrows(): code = row['Codes'] if '-' in code: start_code, end_code = map(int, code.split('-')) for code in range(start_code, end_code + 1): child = Node('Subclass', title=row['Titles'], naics_code=code) self.graph.merge(subClassOf(child, root_node), 'Subclass', 'naics_code') continue code = int(row['Codes']) child = Node('Subclass', title=row['Titles'], naics_code=code) parent = None if len(str(code)) > 2: for i in range(len(str(code))): parent = self.matcher.match('Subclass', naics_code=code // (10**(i + 1))).first() if parent is not None: break if parent is None: self.graph.merge(subClassOf(child, root_node), 'Root', 'title') else: self.graph.merge(subClassOf(child, parent), 'Subclass', 'naics_code')
def store_db(self, item): if isinstance(item, HRPolicyRelationItem): policy = Node("Policy", name=item['policyname']) #creating a node relationshiptext = item['relationship'].upper() #relationship text personname = item['name'].strip() if "(view" in personname: personname = personname[: -6] #get everything except the last 5 letters personnamelist = personname.split(",") #splitting names by commas for personname in personnamelist: personname = personname.strip() #trimming whitespaces person = Node("Person", name=personname) #creating a node relationship = Relationship.type( relationshiptext) #changing it into a relationship type graph.merge(relationship(person, policy), "Node", "name") #merging nodes with relationship elif isinstance(item, DocumentItem): file = open(path + os.sep + item['policyname'] + ".txt", "w") #saving data into text file file.write(item['document']) file.close()
def new_interfaces_node(self, router): self.interfaces = self.snmp2json[self.NetElement][u'Interfaces'] for i in self.interfaces: int_id = i.replace('Int_Id:', '') for j in self.interfaces[i]: if j == 'ifPhysAddress': if_mac_address = self.interfaces[i][j] if_uniqueness = if_mac_address + '_' + self.hostname + '_' + int_id elif j == 'ifDescr': if_name = self.interfaces[i][j] elif j == 'ifOperStatus': if_op_status = self.interfaces[i][j] elif j == 'ifAdminStatus': if_adm_status = self.interfaces[i][j] elif j == 'ifSpeed': if_speed = self.interfaces[i][j] elif j == 'ifMtu': if_mtu = self.interfaces[i][j] interface = Node('Interface', name = if_name, int_id = int_id, MAC = if_mac_address, UNIQUENESS= if_uniqueness, AdmStatus = if_adm_status, OpStatus = if_op_status, Speed = if_speed, MTU = if_mtu) interface.__primarylabel__ = "Interface" interface.__primarykey__ = "UNIQUENESS" BELONGS_TO = Relationship.type('BELONGS_TO') rel_router_int = graph.merge(BELONGS_TO(interface,router)) self.new_ip_addr_node()
def add_triples_to_neo4j_db(triples): graph = Graph("bolt://localhost:7687", auth=("neo4j", "eragold")) for triple in triples: subj = Node("Person", name=triple[0]) obj = Node("Entity", name=triple[1]) re = Relationship.type(triple[2])(subj, obj) graph.merge(re, 'Person', 'name')
def link_dep_nodes(dependency): gc = util.GraphConnector() graph = gc.connector dependent_version_parsed = re.findall("\\b\\d+\\b", dependency['Version Number']) if len(dependent_version_parsed) != 0: dependent_prj_major_version = dependent_version_parsed[0] else: return dependency_version_parsed = re.findall( "\\b\\d+\\b", dependency['Dependency Requirements']) if len(dependency_version_parsed) != 0: dependency_prj_major_version = dependency_version_parsed[0] else: return dependent_on = Relationship.type("DEPENDENT_ON") dependent_on.dependency_type = dependency['Dependency Kind'] dependent_on.is_optional = dependency['Optional Dependency'] dependent_on.dependency_platform = dependency['Dependency Platform'] dependent_on.dependency_name = dependency['Dependency Name'] dependent_version = graph.nodes.match("Version", id=dependency['Project ID'] + '_' + dependent_prj_major_version).first() dependency_version = graph.nodes.match( "Version", id=dependency['Dependency Project ID'] + '_' + dependency_prj_major_version).first() if (dependent_version is not None and dependency_version is not None): graph.create(dependent_on(dependent_version, dependency_version))
def graph_member(self, member): """Create a graph node for a member in GCP.""" logging.info("Graphing member " + member.name) node = Node("member", type=member.type, name=member.name) node.__primarylabel__ = "member" node.__primarykey__ = "name" for binding in member.sa_scopes: # 1. Parse bindings for the service account user permissions for key in binding.keys(): matcher = NodeMatcher(self.graph) location_node = matcher.match(key, name=binding[key]).first() # 2. Find the Org, project, or resource node at the binding level if location_node is None: logging.warning("No node found for {0} : {1}".format( key, binding[key])) logging.warning("{0} not mapped".format(member.name)) if location_node: # 3. Create relationship with the resource node(s) and the member node relationship = Relationship.type("iam_binding") try: self.graph.merge(relationship(node, location_node)) except ConnectionRefusedError as e: logging.error(str(e)) logging.error( "Unable to connect to the graph service, exiting.") sys.exit()
def createDB(dictionary, bookName): temp2NodeType = "" bookTitleNode = Node("MainTitle", name=bookName) #Creating central node graph.create(bookTitleNode) for keys, values in dictionary.items(): if keys[3] == "": continue temp1 = Node( keys[3], name=keys[1]) #Creating a node with a given relation and name temp1['Content'] = values[0] temp1['Code'] = values[1] if keys[3] == "MainHeading": #Finding out the node type of the other half of the relation temp2NodeType = "MainTitle" elif keys[ 3] == "Heading": #Finding out the node type of the other half of the relation temp2NodeType = "MainHeading" elif keys[ 3] == "SubHeading": #Finding out the node type of the other half of the relation temp2NodeType = "Heading" elif keys[ 3] == "SubSubHeading": #Finding out the node type of the other half of the relation temp2NodeType = "SubHeading" matcher = NodeMatcher(graph) temp2 = matcher.match(temp2NodeType, name=str(keys[0])).first( ) #Finding out the other half of the relation in the grpah temp_var = Relationship.type(keys[2]) relation = temp_var(temp1, temp2) graph.create(relation) #Creating a relationship between the two
def _merge_relationship(self, relationship_type, from_person, to_person): relationship = Relationship.type(relationship_type.name) tx = self._graph.begin() from_node = Node(Person.label(), **from_person.as_dict()) to_node = Node(Person.label(), **to_person.as_dict()) new_relationship = relationship(from_node, to_node) tx.merge(new_relationship, Person.label(), Person.key()) tx.push(new_relationship) tx.commit()
def dynamic_neighbors(self, router): ospf_info = self.snmp2json[self.NetElement][u'L3Domain'][u'OSPF_INFO'] bgp_info = self.snmp2json[self.NetElement][u'L3Domain'][u'BGP_INFO'] if ospf_info['Router_Id']: router['Router_Id'] = ospf_info['Router_Id'] graph.push(router) ospf_areas = ospf_info['Areas'] ospf_neigh = ospf_info['Neighbors'] for area in ospf_areas: area_id = ospf_areas[area]['AreaId'] area_status = ospf_areas[area]['AreaStatus'] area_auth_type = ospf_areas[area]['AuthType'] ospf_area_node = Node('Ospf_Area', id = area_id, status = area_status, auth = area_auth_type) ospf_area_node.__primarylabel__ = 'Ospf_Area' ospf_area_node.__primarykey__ = 'id' PART_OF = Relationship.type('PART_OF') rel_router_int = graph.merge(PART_OF(router,ospf_area_node)) for neighbor in ospf_neigh: neigh_id = ospf_neigh[neighbor]['NbrRtrId'] neigh_ip = ospf_neigh[neighbor]['NbrIpAddr'] neigh_state = ospf_neigh[neighbor]['NbrState'] ip = Node('Ip_address', status = 'Active', address = neigh_ip) ip.__primarylabel__ = "Ip_address" ip.__primarykey__ = "address" OSPF_NEIGHBOR = Relationship.type('OSPF_NEIGHBOR') rel_ospf_router = graph.merge(OSPF_NEIGHBOR(router,ip, state = neigh_state, neigh_rt_id = neigh_id)) if bgp_info['LocalAs']: router['LocalAs'] = bgp_info['LocalAs'] graph.push(router) bgp_peers = bgp_info['Peers'] for peer in bgp_peers: peer_local_add = bgp_peers[peer]['PeerLocalAddr'] peer_id = bgp_peers[peer]['PeerIdentifier'] peer_as = bgp_peers[peer]['PeerRemoteAs'] peer_address = bgp_peers[peer]['PeerRemoteAddr'] peer_state = bgp_peers[peer]['PeerState'] ip = Node('Ip_address', status = 'Active', address = peer_address) ip.__primarylabel__ = "Ip_address" ip.__primarykey__ = "address" BGP_PEER = Relationship.type('BGP_PEER') rel_bgp_router = graph.merge(BGP_PEER(router, ip, state = peer_state, peer_rt_id = peer_id))
def Grapy(np_data, graph): graph.delete_all() # tx = graph.begin() # 创建Node结点对象 # 结点对象有lable 标签 和 属性 # person 是其lable 标签, 有一个属性name for row in np_data: device, vul, patch = row[0], row[1], row[2] # 设备 if not len(graph.nodes.match("device", name=device)): denode = Node('device', name=device) graph.create(denode) res_device = graph.nodes.match('device', name=device).first() # 漏洞 if not len(graph.nodes.match("vul", name=vul)): vulnode = Node('vul', name=vul) graph.create(vulnode) res_vul = graph.nodes.match('vul', name=vul).first() # 设备-漏洞 dv = Relationship.type("device-vul") graph.merge(dv(res_device, res_vul)) # 补丁 if not len(graph.nodes.match("patch", name=patch)): patchnode = Node('patch', name=patch) graph.create(patchnode) res_patch = graph.nodes.match('patch', name=patch).first() # 漏洞-补丁 vp = Relationship.type("vul-patch") graph.merge(vp(res_vul, res_patch)) # 补丁-设备 pd = Relationship.type("patch-device") graph.merge(pd(res_patch, res_device))
class Relationships(object): HAS_TAG = Relationship.type('HAS_TAG') HAS_LINK = Relationship.type('HAS_LINK') POSTED_VIA = Relationship.type('POSTED_VIA') POSTED = Relationship.type('POSTED') MENTIONS = Relationship.type('MENTIONS') RETWEETS = Relationship.type('RETWEETS')
def test_build_pytorch_geometric_data(): bt = Relationship.type("BETTER_THAN") wt = Relationship.type("WORSE_THAN") nodes = [Node("ICE_CREAM", flavour="chocolate", niceness=10, id=0), Node("ICE_CREAM", flavour="chocolate", niceness=5, id=1), Node("ICE_CREAM", flavour="vanilla", niceness=8, id=2), Node("ICE_CREAM", flavour="vanilla", niceness=9, id=3)] rels = [bt(nodes[0], nodes[1], id=0), wt(nodes[1], nodes[2], id=1), bt(nodes[2], nodes[3], id=2), wt(nodes[3], nodes[0], id=3)] for node in nodes: graph._graph.create(node) for rel in rels: graph._graph.create(rel) try: matches = graph.run("MATCH (n:ICE_CREAM)-[r]->(m) RETURN n, r, m") data = build_pytorch_geometric_data(matches=matches, target_key='niceness', node_featurizer=_mock_node_featurizer, edge_featrizer=_mock_edge_featurizer) idx_map = {} for idt in range(4): nz = torch.nonzero(torch.stack(data.x)[:, 2] == idt) assert len(nz) == 1 assert nz.item() not in idx_map.values() idx_map[idt] = nz.item() first_edge = torch.nonzero(torch.stack(data.edge_attr)[:, 2] == 0).item() assert data.edge_index[0][first_edge] == idx_map[0] assert data.edge_index[1][first_edge] == idx_map[1] last_edge = torch.nonzero(torch.stack(data.edge_attr)[:, 2] == 3).item() assert data.edge_index[0][last_edge] == idx_map[3] assert data.edge_index[1][last_edge] == idx_map[0] finally: for node in nodes: graph._graph.delete(node) for rel in rels: graph._graph.delete(rel)
def create_link(new_node_data): s = graph.node(int(new_node_data["source_id"])) t = graph.node(int(new_node_data["target_id"])) relationship = Relationship(s, new_node_data["label"], t, **new_node_data["properties"]) graph.merge(relationship) response = { "label": relationship.type(), "id": remote(relationship)._id, "source": new_node_data["source_id"], "target": new_node_data["target_id"], "properties": new_node_data["properties"] } return json.jsonify({"link": response})
def graph_service_account(self, service_account, parent_node): """Create a graph node for a service account.""" node = Node("service_account", name=service_account.email, id=service_account.id) node.__primarylabel__ = "service_account" node.__primarykey__ = "name" relationship = Relationship.type("sa_child_of") try: self.graph.merge(relationship(node, parent_node)) except ConnectionRefusedError as e: logging.error(str(e)) logging.error("Unable to connect to the graph service, exiting.") sys.exit()
def get_remote_graph_from_one_node(base_graph: Graph, node: dict) -> Union[Subgraph, None]: """ Args: base_graph: node: 新数据集包含的node {'type':'','name':''} Returns: """ type_ = node["type"][0] + node["type"][1:].lower() type_valid = [ "Herb", "Disease", "Gene", "MM_symptom", "Mol", "TCM_symptom" ] if type_ not in type_valid: logger.warning("{} type not valid".format(node)) return Node(type_, s_name=node["name"]) cypher = (f"MATCH p=(n:{type_})-[]->()-[]->() WHERE n.s_name='" f"{node['name']}' RETURN DISTINCT p") query_res = base_graph.run(cypher) query_subgraph = query_res.to_subgraph() if not query_subgraph: logger.warning("{} not find graph".format(node)) if "s_name" in node: return Node(type_, **node) else: sname = node.pop("name") return Node(type_, s_name=sname, **node) nods = list(query_subgraph.nodes) rels = list(query_subgraph.relationships) new_nodes = { node.identity: Node(str(node.labels)[1:], **node) for node in nods } result = [] for rel in rels: type_ = list(rel.types())[0] ty = Relationship.type(type_) lab1 = rel.start_node.identity lab2 = rel.end_node.identity result.append(ty(new_nodes[lab1], new_nodes[lab2])) subgraph = reduce(lambda x, y: x | y, result) logger.info("{} generate success".format(node)) return subgraph
def test_basic(self): a = Node("Person", name="Peter") b = Node("Person", name="Jack") Knows = Relationship.type("Knows") self.graph.create(a | b) self.graph.create(Knows(a, b)) cursor = self.graph.run("Match (a) return a") for record in cursor: print(record) cursor = self.graph.run("Match ()-[a]->() return a") for record in cursor: print(record) self.graph.delete_all()
def file4(csv_file, graph): csv_list = list(csv_file) for i in range(1, len(csv_list)): # len(csv_list) if len(csv_list[i]) != 0: print(csv_list[i]) # 创建实体1节点 entity1 = Node('entity1', entity_1=csv_list[i][0]) # 在数据库中创建节点 graph.merge(entity1, 'entity1', "entity_1") # 创建实体2节点 entity2 = Node('entity2', entity_2=csv_list[i][2]) graph.merge(entity2, 'entity2', "entity_2") rel = Relationship.type(csv_list[i][1]) # 在图形数据库中创建实体和关系 graph.merge(rel(entity1, entity2), "entity1", "entity_1")
def file2(csv_file2, graph): csv_list2 = list(csv_file2) for i in range(1, len(csv_list2)): # len(csv_list) if len(csv_list2[i]) != 0: print(csv_list2[i]) # 题目实体节点已经存在,无需创建 Title = Node('title', Title_name=csv_list2[i][0]) # 创建关键词实体节点 Keywords = Node('keywords', Keywords_name=csv_list2[i][1]) # 在数据库中创建节点 graph.merge(Keywords, 'keywords', "Keywords_name") # 创建实体关系类型节点 keywords_of = Relationship.type("keywords_of") # 在图形数据库中创建实体和关系 graph.merge(keywords_of(Title, Keywords), "title", "Title_name")
def organizations(graph): #Transaction Container - where we store operations before committing to graph register = graph.begin() #Open file and assign to reader fileIn = "organization.csv" in_file = open(fileIn, 'r') reader = csv.reader(in_file, delimiter=',') try: i = 0 j = 0 next(reader) for row in reader: if row: user = strip(row[0]) organization = strip(row[1]) orgType = strip(row[2]) userNode = Node("User", user_id=user) #primary key and label used for merge comparison userNode.__primarylabel__ = "User" userNode.__primarykey__ = "user_id" organizationNode = Node("Organization", name=organization, type=orgType) organizationNode.__primarylabel__ = "Organization" organizationNode.__primarykey__ = "name" WORKS_FOR = Relationship.type("WORKS_FOR") #Merge combines if already exists, else creates new register.merge(WORKS_FOR(userNode, organizationNode)) i += 1 j += 1 if (i == 4): #submits a batch every 4 lines read register.commit() print(j, "organization lines processed") i = 0 register = graph.begin() else: register.commit() #submits remainder of lines read print(j, "organization lines processed") except Exception as e: print(e, row, reader.line_num)
def do_the_job(pkt): src = pkt.source dst = pkt.destination typ = pkt.protocol info = pkt.info length = pkt.length # print(pkt) # print('%s %s --> %s (%s) - %s' % (typ, src, dst, length, info)) logger.debug("Protocol: " + typ + ", Source: " + src + ", Destination: " + dst) a = get_node(arg=src) b = get_node(arg=dst) PACKET_TO = Relationship.type(typ) _graph_db.merge(PACKET_TO(a, b))
def test_type_of_unbound_relationship_is_mutable(): ab = Relationship({}, "KNOWS", {}) ab.type = "LIKES" assert ab.type == "LIKES"