def __init__(self,graph_filename=None,graph_type=None): if graph_filename==None: self.G=nx.DiGraph() if graph_type == 'dig' else nx.Graph() else: try: # check if file exist graph_file=f'{fac.graph_file_location}{graph_filename}' with open(graph_file, "r") as read_file: data_dict = json.load(read_file) self.G=nx.node_link_graph(data_dict, directed=False, multigraph=True, attrs=None) GraphPlot(self.G) except: # make an empty dictionary to make an empty Graph print('Creating empty graph') graph_file=f'{fac.graph_file_location}{graph_filename}' self.G=nx.DiGraph() if graph_type == 'dig' else nx.Graph() data_dict=nx.node_link_data(self.G) with open(graph_file,'w') as f: json.dump(data_dict, f, indent=4) self.G=nx.node_link_graph(data_dict, directed=False, multigraph=True, attrs=None) GraphPlot(self.G) self.graph_filename=graph_filename
def read_json_graph( path: PathLike = JSON_DATA_PATH, additional_data: bool = False, additional_data_key: str = JSON_ADDITIONAL_DATA_KEY, ) -> Union[Graph, Tuple[Graph, Graph]]: """Read a json link_data_format file with nodes, edges and attributes.""" path = Path(path) with open(path) as graph_file: if additional_data: json_graph: JSONDict = load(graph_file, object_hook=json_deserialise) return ( node_link_graph(json_graph), json_graph.get(additional_data_key), ) return node_link_graph(load(graph_file))
def __init__(self, data=None, treatment=None, outcome=None, graph=None, variables=None, assumptions=None): self.data = data self.treatment = treatment self.outcome = outcome self.ide_flag = False self.propensity_flag = False if graph is None: self.graph = nx.DiGraph() elif re.match(r".*\.gml", graph): self.graph = nx.DiGraph(nx.read_gml(graph)) elif re.match(r".*\.xml", graph): self.graph = nx.DiGraph(nx.read_graphml(graph)) elif re.match(r".*\.json", graph): with open(graph, 'r') as f: g = json.load((f)) self.graph = nx.node_link_graph(g) if variables is not None: for v in variables: self.graph.add_node(v[0], ob=v[1]) if assumptions is not None: for a in assumptions: self.graph.add_edge(a[0], a[1])
def save_network(network_data): network_json = json.loads(network_data) network_id = network_json["network_id"] network_string = network_json["network"][0] g = nx.node_link_graph(network_string) degree_dict = dict(g.degree()) closeness_dict = nx.closeness_centrality(g) betweenness_dict = nx.betweenness_centrality(g) eigenvector_dict = nx.eigenvector_centrality(g) node_list = [] for node in g.nodes: temp_node = Node(node) temp_node.degree = degree_dict[node] temp_node.closeness = round(closeness_dict[node], 4) temp_node.betweenness = round(betweenness_dict[node], 4) temp_node.eigenvector = round(eigenvector_dict[node], 4) node_list.append(temp_node) node_list.sort(key=lambda x: x.id) network_string = json.dumps([ob.__dict__ for ob in node_list]) conn = sqlite3.connect("network_storage.db") cur = conn.cursor() try: cur.execute("INSERT INTO networks(id, network) VALUES (?, ?)", (network_id, network_string)) conn.commit() except sqlite3.IntegrityError: print("Non-Unique Network ID used")
def graph_labeling(input_file, labels, timeout, limit, show_model): if input_file is None: source = default_graph_labeling_source() print("""\ No input file - using default data: {example} """.format(example=source)) data = json.loads(source) else: data = json.load(input_file) graph = nx.node_link_graph(data) graph_labeling_solver = GraphLabelingSolver(graph, labels, timeout=timeout, limit=limit) if show_model: print_model(graph_labeling_solver.model) num_solutions = 0 for solution in graph_labeling_solver: num_solutions += 1 print("\n=== solution {} ===".format(num_solutions)) print(solution) print_solve_stats(graph_labeling_solver.get_stats())
def generate_network(n_sensors: int, n_fog: Optional[int] = None) -> nx.Graph: with open(MITTE_GEOJSON) as stream: mitte = shape(geojson.load(stream)["geometry"]) dc_nodes = _dc_nodes() nodes_fog = _fog_nodes(n_fog) sensor_nodes, sensor_edge_lists = zip( *islice(_sensor_nodes(mitte, nodes_fog), n_sensors)) nodes = dc_nodes + nodes_fog + list(sensor_nodes) edges = [edge for edge_list in sensor_edge_lists for edge in edge_list] # flatten for fog in nodes_fog: for cloud in dc_nodes: edges.append({ "source": fog["id"], "target": cloud["id"], "link": LinkCable() }) return nx.node_link_graph({ "directed": False, "multigraph": False, "graph": {}, "nodes": nodes, "links": edges, })
def test_bsg_stage_two(): f = figure() f.line(x=[1, 2, 3], y=[1, 2, 3]) KKref_graph = nx.node_link_graph(json.loads(KKref)) K = bsg.BokehStructureGraph(f) KK = bsg.BokehStructureGraph(K.model) assert nx.is_isomorphic(KK.graph, KKref_graph)
def read_expressions(): for expr_data in _iter_input_json(): # do not suppord dict_of_lists for now, but if do, will use tensors # tensors = expr_data['tensors'] G = nx.node_link_graph(expr_data['graph']) G = nx.convert_node_labels_to_integers(G) yield G, expr_data['_id']
def loadModel(self): """ """ # not dotted anymore need to look for a class name in the directory of # the models # look for model name inside the module files for file in os.listdir('PlexSim/Models/'): if file.endswith('pyx'): tmp = importlib.import_module( f'PlexSim.Models.{file.split(".")[0]}') if getattr(tmp, self.model, None): # returned init model # backward compatibility if isinstance(self.graph, nx.Graph) or isinstance( self.graph, nx.DiGraph): return getattr(tmp, self.model)(graph=self.graph) # new method else: print('loading model', self._use_old_format_) g = nx.node_link_graph(self.graph) if self._use_old_format_: g.__version__ = 1.0 return getattr(tmp, self.model)(graph=g) return None
def download(gid): # f = open("graph/graph-{}.json".format(gid), "r") # data = f.read() # f.close() data = json.loads(requests.get(root + "graph-{}.json".format(gid)).content) return nx.node_link_graph(data)
def _load_graphs(self, graph_ngx_jsonl, use_cache): save_file = graph_ngx_jsonl + ".pk" if use_cache and os.path.exists(save_file): print(f'using cached graphs from {save_file}') with open(save_file, 'rb') as fin: dgs = pickle.load(fin) return dgs dgs = [] with open(graph_ngx_jsonl, 'r') as fin: nxgs = [line for line in fin] for nxg_str in tqdm(nxgs, total=len(nxgs), desc='loading graphs'): nxg = nx.node_link_graph(json.loads(nxg_str)) dg = dgl.DGLGraph(multigraph=True) dg.from_networkx(nxg) cids = [nxg.nodes[n_id]['cid'] for n_id in range(len(dg))] dg.ndata.update({'cncpt_ids': torch.tensor(cids)}) dgs.append(dg) dgs = list(map(list, zip(*(iter(dgs),) * self.num_choice))) with open(save_file, 'wb') as fout: pickle.dump(dgs, fout) return dgs
def object_hook(self, obj): if '_type' not in obj: return obj type = obj['_type'] if type == 'nx.Graph': return nx.node_link_graph(obj['value']) return obj
def lattice_to_latex(tokens, G_lattice): G = lattice.init_graph(tokens) text = '''\\documentclass[tikz,border=10pt]{standalone} \\usetikzlibrary{automata,positioning,arrows.meta} \\begin{document} \\begin{tikzpicture} [ initial/.style={line width=1pt}, accepting by double/.append style={line width=1pt}, semithick, ]\n''' text += '\\node (0) [state, initial] {$0$};\n' k_list = sorted(G[0].keys()) n_sents = len(G[0].keys()) text += '\\node (%d) [state, above right=of 0] {$%d$};\n' % (k_list[1], k_list[1]) text += '\\node (%d) [state, right=of 0, above=of %d] {$%d$};\n' % ( k_list[0], k_list[1], k_list[0]) if n_sents > 2: text += '\\node (%d) [state, right=of 0] {$%d$};\n' % (k_list[2], k_list[2]) if n_sents > 3: text += '\\node (%d) [state, below right=of 0] {$%d$};\n' % (k_list[3], k_list[3]) if n_sents > 4: text += '\\node (%d) [state, right=of 0, below=of %d] {$%d$};\n' % ( k_list[4], k_list[3], k_list[4]) for n in G.nodes(): if n == -1 or n in k_list or n == 0: continue text += '\\node (%d) [state, ' % (n) for a, b in G.in_edges(n): text += 'right=of %d] {$%d$};' % (a, n) break text += '\n' n = -1 text += '\\node (%d) [state, ' % (n) for a, b in G.in_edges(n): text += 'right=of %d,' % (a) text += '] {$%d$};\n' % (n) text += '\\path [-{Stealth[]}]\n\n' G = nx.node_link_graph(G_lattice) for n in G.nodes(): text += '(%d) ' % (n) for a, b in G.out_edges(n): if len(G[a][b]['word']) == 0: word = '' else: word = '/'.join(G[a][b]['word']) text += 'edge node [above, sloped] {$%s$} (%d)\n' % (word, b) text += '''; \\end{tikzpicture} \\end{document} ''' return text
def load_graph(): """Loads graph from file. """ fh = open(file_name, 'r') G = nx.node_link_graph(json.loads(fh.read())) fh.close() print('loaded file') return G
def deserialize_networkx_graph(data): """Transform JSON serialised data into a networkx Graph object""" data = python_to_numpy_recursive(data) graph = node_link_graph(data) return graph
def graph_format(self, startday, endday, usergroups, readpath, writefile): index = 0 for h in usergroups: self.handledicts.append(dict()) for d in h: self.handledicts[index][d] = 1 index += 1 print(self.handledicts) for x in range(startday, endday): print(x) f = open(readpath + "/" + str(x) + ".json", "r") data = json.load(f) for d in data: self._add_node(d) for key in self.nodesdict: self.tweet_graph["nodes"].append(self.nodesdict[key].json_print()) for key in self.linksdict: self.tweet_graph["links"].append(self.linksdict[key]) hasharr = [] for key in self.hashtag_count: hasharr.append({"h": key, "score": self.hashtag_count[key]}) hasharr = sorted(hasharr, key=lambda hashobj: hashobj["score"], reverse=True) print(hasharr) graph_metrics = networkx.node_link_graph(self.tweet_graph, multigraph=False) node_metrics = networkx.betweenness_centrality(graph_metrics, weight="value") degree_metrics = networkx.degree_centrality(graph_metrics) harmonic_metrics = networkx.harmonic_centrality(graph_metrics, distance="value") for key in node_metrics: for n in self.tweet_graph["nodes"]: if n["id"] == key: n["graph_score"] = { "handle": n["name"], "betweenness": node_metrics[key], "degree": degree_metrics[key], "harmonic": harmonic_metrics[key] } wf = open(writefile, "w") self.tweet_graph["terms"] = hasharr json.dump(self.tweet_graph, wf, indent=4) return 1
def try_to_read_json(filename): try: with open(filename) as json_file: dataset = nx.node_link_graph(json.load(json_file)) dataset.name = filename.split("/")[1][:-5] return dataset except Exception as err: print(err) print("failed to load dataset " + filename + " in json format")
def __init__(self, graph=None, file_path=None): if file_path: try: with open(file_path, 'r', encoding='utf-8') as fr: data = json.load(fr) graph = nx.node_link_graph(data) except Exception as e: logger.error(e) nx.MultiDiGraph.__init__(self, graph)
def loads_kwargs(s): data = json.loads(s) for key in ['nx_graph', 'reversed_nx_graph']: try: data[key] = nx.node_link_graph(data[key]) except KeyError: pass return data
def load_from_data(self, data): if type(data) != dict: logger.info('%s object can not be load as a graph, please load a dict' % str(type(data))) return try: self.graph = nx.node_link_graph(data) except Exception as e: logger.error(e) else: return self.graph
def json_to_nx(graph): """ Convert json graph (node-link-data) to Networkx graph. """ return nx.node_link_graph(graph, attrs=dict(source='s', target='r', name='id', key='key', link='links'))
def _load_graph(d_fp): topo = {} for name, fp in d_fp.items(): try: with open(fp, 'r', encoding='utf-8') as f: js = json.load(f) topo[name] = nx.node_link_graph(js) except IOError: topo[name] = nx.Graph() return topo
def download_graphml(n_clicks, json_str_sub_network): if n_clicks: downloads_dir = os.path.join(os.getcwd(), 'downloads') if not os.path.exists(downloads_dir): os.mkdir(downloads_dir) rel_filename = os.path.join('downloads', 'network.graphml') abs_filename = os.path.join(os.getcwd(), rel_filename) sub_network = nx.node_link_graph(json.loads(json_str_sub_network)) nx.write_graphml(sub_network, path=abs_filename) return send_file(abs_filename)
def file_open(self, filename, replace=True): self.current_path = os.path.dirname(filename) self.filename = os.path.basename(filename) if replace: self.view.scene.clear_all() g = nx.node_link_graph( json.load(open(os.path.join(self.current_path, self.filename), 'r'))) self.deserialize_graph(g, replace=replace) self.is_dirty = False
def get_segment(data, ip): """Возвращение сегмента из общего графа""" aggr = json.loads(data) ag = aggr["aggr"] ### Адрес агрегатора G = nx.node_link_graph(aggr["data"]) GG = nx.node_link_graph(aggr["data"]) """Удаление агрегатора из графа""" G.remove_node(ag) """Поиск ip по сегментам""" segment_list = [] for g in [G.subgraph(c) for c in nx.connected_components(G)]: # Обход подграфов if g.has_node(ip): segment_list = list(g.nodes()) break segment_list.append(ag) SEG = GG.subgraph(segment_list) return SEG
def __init__(self, graph=None, file_name=None): if file_name: path = os.path.join(os.getcwd(), 'export', '%s.json' % file_name) try: with open(path, 'r', encoding='utf-8') as fr: data = json.load(fr) graph = nx.node_link_graph(data) except Exception as e: print(e) nx.MultiDiGraph.__init__(self, graph)
def __init__(self, jsonfile=None): """ Constructor creates an empty tree if nothing is passed, loads the json definition otherwise. """ if jsonfile is None: self._graph = nx.DiGraph() else: with open(jsonfile, 'r') as file: data = json.loads(file.read()) self._graph = nx.node_link_graph(data) self._construct_automaton()
def load_graph(file_name): dir_path = os.path.join(os.getcwd(), 'export') file_path = os.path.join(dir_path, '%s.json' % file_name) try: with open(file_path, 'r', encoding='utf-8') as fr: data = json.load(fr) graph = nx.node_link_graph(data) except Exception as e: print(e) else: return graph
def setUp(self): with open('testing.json', 'r') as f: self.result = json.load(f) with open('testing_input.json', 'r') as f: self.input = json.load(f) self.start_coord = (53.2815126, -6.2341631) self.end_coord = (53.3881208173444, -6.2659470210) self.graph_location = '../assets/graph.json' with open(self.graph_location, 'rb') as jfile: graph_json = geojson.load(jfile) self.G = nx.node_link_graph(graph_json)
def load_graph(filename: str = "graph.json", reset_bad=False) -> nx.DiGraph: with open(filename) as f: nld = load(f) gx = nx.node_link_graph(nld) if reset_bad: for node in gx.nodes: with gx.nodes[node]["payload"] as attrs: attrs["bad"] = False return gx
def read_json(filename, directed=False, multigraph=True, attrs=None): with open(filename) as f: js_graph = json.load(f) return nx.node_link_graph(js_graph, directed, multigraph, attrs)