def get_graph(self): res = post('%s/api/GetGraph' % self.base, data={'token': self.token}).json() try: graph = json_graph.loads(json.dumps(res)) return graph except: return None
def read_json(file): json_string=file.read() #print(json_string) json_dict=json_graph.loads(json_string) #print(json_dict) #return json_graph.node_link_graph(json_dict, True, False) return json_dict
def load_specific_molecule(self, full_path): if re.match('.*\.json', full_path) is None: return "Path must contain a json file" else: json_file = open(full_path, "r") json_dict = eval(json_file.read()) json_file.close() id_map = {} molecule = NAOActorMolecule(self.memory, self.memory.atoms, self.nao_mem, self.nao_motion, duplication=True) atoms = json_dict["atoms"] molecular_graph = json_dict["molecular_graph"] for json_atom in atoms: new_atom = load_json_atom(json_atom, memory, nao_memory=self.nao_mem, nao_motion=self.nao_motion) self.memory.add_atom(new_atom, new_atom.get_id()) molecule.molecular_graph = json_graph.loads(molecular_graph) molecule.set_connections() memory.add_molecule(molecule) return molecule
def get_graph(self): res = post('%s/api/GetGraph' % self.base, data={ 'token': self.token }).json() try: graph = json_graph.loads(json.dumps(res)) return graph except: return None
def read_json_netfile(infile): ''' Reads in a JSON network input: infile name output: Networkx graph object ''' from networkx.readwrite import json_graph G = json_graph.loads(open(infile).read()) return G
def main(args): input_fh = open(args.input) while True: line = input_fh.readline().strip() if not line: break g = json_graph.loads(line) analyze(g) input_fh.close() for k,v in sorted(counts.items()): print '%s: %d' % (k,v) print '=======' print_proportions()
def read_json_graph(self, file_name): """ Read a JSON representation of a graph from a file. Parameters ---------- file_name : The name of the file of which the JSON representation will be read """ try: import_file = open(file_name, "r") content = import_file.read() network = json_graph.loads(content) import_file.close() return network except IOError: self.logger.fatal('an IO error exception occurred while reading a JSON representation of a graph')
def load_brain_archive(self): largest_atom_id = 0 last_atom_id = 0 base = "brain_archive/{0}/".format(config.robot_system) if not os.path.exists(base): os.makedirs(base) files = [o for o in os.listdir(base) if re.match('.*\.json', o)] for f in files: print f json_file = open("{0}{1}".format(base, f), "r") json_dict = eval(json_file.read()) json_file.close() id_map = {} molecule = NAOActorMolecule(self.memory, self.memory.atoms, self.nao_mem, self.nao_motion, duplication=True) atoms = json_dict["atoms"] molecular_graph = json_dict["molecular_graph"] for json_atom in atoms: new_atom = load_json_atom(json_atom, memory, nao_memory=self.nao_mem, nao_motion=self.nao_motion) atom_id_number = int( re.match('a-([0-9]+)', new_atom.id).group(1)) # print atom_id_number # print molecular_graph new_id = new_atom.create_id(atom_id_number + last_atom_id) molecular_graph = molecular_graph.replace(new_atom.id, new_id) # print molecular_graph atom_id_number += last_atom_id if atom_id_number > largest_atom_id: largest_atom_id = atom_id_number self.memory.add_atom(new_atom, new_id) print molecular_graph molecule.molecular_graph = json_graph.loads(molecular_graph) print molecule.molecular_graph molecule.set_connections() memory.add_molecule(molecule) memory.add_to_brain_archive(molecule) last_atom_id = largest_atom_id for molecule in self.memory.brain_archive: print molecule for a in molecule.get_atoms_as_list(): print a.print_atom() print "brain_archive:", self.memory.brain_archive
def load_specific_molecule(self,full_path): if re.match('.*\.json',full_path) is None: return "Path must contain a json file" else: json_file = open(full_path,"r") json_dict = eval(json_file.read()) json_file.close() id_map = {} molecule = NAOActorMolecule(self.memory,self.memory.atoms,self.nao_mem,self.nao_motion,duplication=True) atoms = json_dict["atoms"] molecular_graph = json_dict["molecular_graph"] for json_atom in atoms: new_atom = load_json_atom(json_atom,memory,nao_memory=self.nao_mem,nao_motion=self.nao_motion) self.memory.add_atom(new_atom,new_atom.get_id()) molecule.molecular_graph=json_graph.loads(molecular_graph) molecule.set_connections() memory.add_molecule(molecule) return molecule
def load_brain_archive(self): largest_atom_id = 0 last_atom_id = 0 base = "brain_archive/{0}/".format(config.robot_system) if not os.path.exists(base): os.makedirs(base) files = [o for o in os.listdir(base) if re.match('.*\.json',o)] for f in files: print f json_file = open("{0}{1}".format(base,f),"r") json_dict = eval(json_file.read()) json_file.close() id_map = {} molecule = NAOActorMolecule(self.memory,self.memory.atoms,self.nao_mem,self.nao_motion,duplication=True) atoms = json_dict["atoms"] molecular_graph = json_dict["molecular_graph"] for json_atom in atoms: new_atom = load_json_atom(json_atom,memory,nao_memory=self.nao_mem,nao_motion=self.nao_motion) atom_id_number = int(re.match('a-([0-9]+)',new_atom.id).group(1)) # print atom_id_number # print molecular_graph new_id = new_atom.create_id(atom_id_number + last_atom_id) molecular_graph = molecular_graph.replace( new_atom.id, new_id) # print molecular_graph atom_id_number += last_atom_id if atom_id_number > largest_atom_id: largest_atom_id = atom_id_number self.memory.add_atom(new_atom,new_id) print molecular_graph molecule.molecular_graph=json_graph.loads(molecular_graph) print molecule.molecular_graph molecule.set_connections() memory.add_molecule(molecule) memory.add_to_brain_archive(molecule) last_atom_id = largest_atom_id for molecule in self.memory.brain_archive: print molecule for a in molecule.get_atoms_as_list(): print a.print_atom() print "brain_archive:",self.memory.brain_archive
def load_molecule(json,memory,atoms,nao_memory,nao_motion): molecule = NAOActorMolecule(memory,atoms,nao_memory,nao_motion,duplication=True) atoms = json["atoms"] for atom in atoms: id = atom["id"] _class = atom["class"] message_delays = atom["message_delays"] if _class == 'NaoSensorAtom': new_atom = NaoSensorAtom(memory=memory,messages=None,message_delays=message_delays, sensors=atom["sensors"],sensory_conditions=atom["sensory_conditions"],nao_memory=nao_memory, id = id) memory.add_atom(new_atom) elif _class == 'TransformAtom': new_atom = TransformAtom(memory=memory,messages=None,message_delays=message_delays, parameters=None,id = id) memory.add_atom(new_atom) elif _class == 'NaoMotorAtom': new_atom = NaoMotorAtom(memory=memory,messages=None,message_delays=message_delays, parameters=atom["parameters"],motors=atom["motors"],nao_motion=nao_motion, nao_memory=nao_memory, id = id) memory.add_atom(new_atom) molecule.molecular_graph=json_graph.loads(json["molecular_graph"]) molecule.set_connections() return molecule
def load_graph(): f = open('graph.json','r') g = f.read() G = json_graph.loads(g) return G
def get_graph(self, datasource=None, selected_vertex_urns=None): output_format = "JSON" if (datasource != None and \ "urn:nmap:NAMESPACE:NETWORKS:NETWORK.HOST" == urllib.unquote(datasource)): json_cptl_nmap_graph = self.nmDAO.getCPTLGraph(output_format) G = json_graph.loads(json_cptl_nmap_graph) self.browser_graph = G else: # This is where CPTL-Aware Resources come into play if (datasource != None and\ "urn:badger:get_hostip_dest_hostnames" == urllib.unquote(datasource) and\ selected_vertex_urns != None): source_vertex_attr_type = "urn-cptl-HOST-ipv4" target_vertex_attr_type = "urn-cptl-HOST-hostname" elif (datasource != None and\ "urn:badger:get_host_dest_ips" == urllib.unquote(datasource) and\ selected_vertex_urns != None): source_vertex_attr_type = "urn-cptl-HOST-ipv4" target_vertex_attr_type = "urn-cptl-HOST-ipv4" elif (datasource != None and\ "urn:badger:get_host_dest_tldcounts" == urllib.unquote(datasource) and\ selected_vertex_urns != None): source_vertex_attr_type = "urn-cptl-HOST-ipv4" target_vertex_attr_type = "urn-cptl-HOST-tag-tldcount" elif (datasource != None and\ "urn:badger:get_host_dest_cccounts" == urllib.unquote(datasource) and\ selected_vertex_urns != None): source_vertex_attr_type = "urn-cptl-HOST-ipv4" target_vertex_attr_type = "urn-cptl-HOST-tag-cccount" else: raise Exception("Unrecognized analysis!") # Decode the URNs passed in datasource = urllib.unquote(datasource) decoded_selected_vertex_urns = [] encoded_selected_vertex_urns = selected_vertex_urns.split(",") for encoded_urn in encoded_selected_vertex_urns: decoded_urn = urllib.unquote(encoded_urn) decoded_selected_vertex_urns.append(decoded_urn) # We need to write code to resolve this IP from the selected vertex urn for selected_vertex_urn in decoded_selected_vertex_urns: #print "SELECTED urn: " + selected_vertex_urn source_vertex_id = None source_vertex = None source_vertex_attr_value = None # Find the node that corresponds to the selected_vertex_urn for node in self.browser_graph.nodes(data=True): node_id = node[0] node_dict = node[1] if (node_dict['urn_id'] == selected_vertex_urn): source_vertex_id = node_id source_vertex = node; # need to fix this source_vertex_dict = node[1] source_vertex_attr_value = source_vertex_dict[source_vertex_attr_type] break # Now get the information json_badger_graph =\ self._badger_get_host_dest_info(source_vertex_attr_value,\ source_vertex_attr_type,\ target_vertex_attr_type) G = self.browser_graph #self.write_graph(G, "/tmp/G.json") H = json_graph.loads(json_badger_graph) #self.write_graph(H, "/tmp/H.json") self.browser_graph = self.combine_graphs(G,\ H,\ source_vertex_attr_type,\ source_vertex_attr_value,\ target_vertex_attr_type); #updated_graph = nx.compose(current_graph, graph_extensions) #self.write_graph(self.browser_graph, "/tmp/C.json") json_updated_graph = json_graph.dumps(self.browser_graph) return json_updated_graph
#! /usr/bin/env python import networkx as nx from networkx.readwrite import json_graph import sys G = json_graph.loads(open(sys.argv[1]).read()) for g in G.nodes_iter(): if type(G.node[g]['name']) == list: G.node[g]['name'] = 'fixme' nx.write_graphml(G, sys.argv[2])
if T.node[nn]['name'] == 'ClassTypeList': imps = [] for nnn in T.successors(nn): for nnnn in T.successors(nnn): cn = [] if T.node[y]['name'] == 'Identifier': cn.append(T.node[getFirstChild(T, y)]['name']) imps.append(cn) return imps asts = {} for fn in glob.glob('jgraph-ana/*.json'): # print fn s = open(fn).read() G = json_graph.loads(s) asts['.'.join(getFQN(G, 0))] = G G = nx.DiGraph() for t in asts: T = asts[t] ext = getExtends(T, 0) if ext == None: continue imps = getImports(T, 0) for i in imps: n = i[:-1]+ext print '.'.join(n)