def ReadGpickle(file_path): r""" Read all of the gpickle files in file_path, and transforms them into adjacency matrices. Parameters ---------- file_path : string The directory path of the gpickle files. Returns ---------- params : Adjacency matrices dictionary labeled by individuals' SUBID. Notes ---------- The version of networkx should be 1.9.0 when participate this function in the graph dataset in https://neurodata.io/mri-cloud . """ matrices_dict = {} files = [f for f in listdir(file_path) if isfile(join(file_path, f))] #print(files) files = sorted(files) for file in files: num = int(file.split('.')[0].split('_')[1]) matrices_dict[num] = {} for file in files: person, num = file.split('.')[0].split('_') person = int(person) num = int(num) matrices_dict[num][person] = asarray( to_numpy_matrix(read_gpickle(file_path + file))) for num in matrices_dict.keys(): gl = [] keys = sorted(matrices_dict[num].keys()) for key in keys: gl.append(matrices_dict[num][key]) matrices_dict[num] = gl return matrices_dict
def prepare_flight_graph(self): ''' Creates the graph used to fly the drone ''' if self.graph_path is not None and os.path.exists(self.graph_path): self.graph = gpickle.read_gpickle(self.graph_path) # Sample some random points on the current grid self.sampler = Sampler(self.data, SAFETY_DISTANCE, zmin=10, zmax=TARGET_ALTITUDE) self.polygons = self.sampler.polygons self.heights = self.sampler.heights # if we don't have the graph by now - let's create it if self.graph is None: nodes = self.sampler.sample(self.sample_size) print("Creating graph...") self.graph = create_graph(nodes, self.sampler.polygons, self.sampler.heights, self.neighbors) # if we have specified the path - we want to save it if self.graph_path is not None: print("Saving graph: ", self.graph_path) gpickle.write_gpickle(self.graph, self.graph_path)
def load_graph(self, graph_path): if graph_path is None: graph_path = self.graph_path # load processed data from directory graph_path logger.info(f'Loading graph from [{graph_path}]') self.G = read_gpickle(graph_path) return self.G
def read_gpickle(path, auto_table=False): """NetworkX read_gpickle method and wrap graph to Orange network. """ G = _wrap(rwgpickle.read_gpickle(path)) if auto_table: G.set_items(graph_to_table(G)) return G
def get_graph(my_df, saving=True): graph_filename = 'data/graph.pickle' if saving and os.path.exists(graph_filename): print('Loading graph from file.') return read_gpickle(graph_filename) print('Constructing and saving graph. Should take a few minutes.') DG = construct_graph(my_df) DG = add_walkable(my_df, DG) if saving: write_gpickle(DG, graph_filename, pickle.HIGHEST_PROTOCOL) return DG
def flights_network(create=False): path = '../data/pickled_graphs/flights.pkl' df = read_airports() pos = create_pos_for_shp(df) if create: G = create_airports_graph(df, pos) pickle.write_gpickle(G, path) else: G = pickle.read_gpickle(path) return G, pos
def get_simulation_graph(simulation_name: str) -> TxsGraph: # building the graph is expensive, so we pickle it for faster construction # next time simulation_graph_ser_file = os.path.join( get_simulation_datadir(simulation_name), "graph.pickle") if os.path.isfile(simulation_graph_ser_file): return read_gpickle(simulation_graph_ser_file) g = TxsGraph.from_datadir(datadir=get_simulation_datadir(simulation_name)) write_gpickle(g, simulation_graph_ser_file) return g
def load_graph(gen=True): if gen: G = read_gpickle('HTCM.gpickle') else: G = read_gpickle('Google.gpickle') G = convert_node_labels_to_integers(G) print info(G) print "Triangles in gen. graph:", sum(nx.triangles(G).values()) / 3 max_deg1 = 0, max_deg2 = 0 u1 = 0, u2 = 0 for node in G.nodes(): if max_deg < G.degree(node): max_deg = G.degree(node) u = node print "Max degree", max_deg, "at node", u, "(belonging to",\ nx.triangles(G, u), "triangles)" return G
def gminas_network(create=False): path = '../data/pickled_graphs/gminas_' df, nbrs = read_files('gminas') pos = create_pos(df) # {node: (pt_x, pt_y)} if create: G = create_gminas_graph(pos, nbrs, df) gpickle.write_gpickle(G, path + 'graph.pkl') df.to_pickle(path + 'df.pkl') else: G = gpickle.read_gpickle(path + 'graph.pkl') df = pd.read_pickle(path + 'df.pkl') return G, pos, df
#features[index] = feat # Load windows using the paper's method ww = winres[fi] nn, _ = ww.shape windows[fi] = [] scores[fi] = [] for i in range(nn): windows[fi].append((ih - ww[i, 1], ww[i, 0], ih - ww[i, 3], ww[i, 2])) scores[fi].append(ww[i, 4]) #display_windows (images[index], windows[index], scores[index]) # Load graph G = read_gpickle(argv[3]) # Find track ids current_track_id = 0 dd = {} lenlist = [] pathname = argv[2] figures = [figure() for i in range(len(windows))] print("Displaying windows...") def reclabel(G, ns, color, tid): if ns == 'S': return 0
#features[index] = feat # Load windows using the paper's method ww = winres[fi] nn,_ = ww.shape windows[fi] = [] scores[fi] = [] for i in range (nn): windows[fi].append ((ih-ww[i,1], ww[i,0], ih-ww[i,3], ww[i,2])) scores[fi].append (ww[i,4]) #display_windows (images[index], windows[index], scores[index]) # Read saved graph G = read_gpickle (argv[3]) # Read track list class Track: def __init__ (self, tid, start): self.track_id = tid self.track_start = start self.window_ids = [] def append_window (self, wid): self.window_ids.append (wid) def get_window_for_frame (self, fr): i = fr - self.track_start if i >= 0 and i < len(self.window_ids): wins, _, _ = winimg[fr]
targetFile = opts.get( "-t", None ) sourceFile = opts.get( "-s", None ) # GET DEPENDS ################## graph = None verbose = "-v" in opts if not sourceFile: graph = main( filelist, **kwargs_creategraph ) else: if verbose: sys.stdout.write("Reading dependencies from: %s\n" % sourceFile) graph = gpickle.read_gpickle( sourceFile ) # SAVE ALL DEPENDENCIES ? ######################### # save to target file if targetFile: if verbose: sys.stdout.write("Saving dependencies to %s\n" % targetFile) gpickle.write_gpickle( graph, targetFile ) # QUERY MODE ############### return_invalid = "-b" in opts
import pickle import subprocess import argparse from collections import Counter import networkx as nx from networkx.readwrite.gpickle import read_gpickle from networkx.drawing.nx_pydot import write_dot from build_func_deps import (output_graph_file as input_graph_file, output_def_file as input_def_file, FunctionDef, FuncType) from build_func_deps_config import output_folder # Load existing graph call_graph = read_gpickle(input_graph_file) with open(input_def_file, 'rb') as input_def_file: func_defs = pickle.load(input_def_file) # Define and parse the command line arguments parser = argparse.ArgumentParser() parser.add_argument('func_to_check', help='The name of function to check', type=str) parser.add_argument('upstream_cutoffs', help='The cutoff for checking who is calling the func', type=int) parser.add_argument('downstream_cutoff', help='The cutoff for checking who the func is calling', type=int)
assert item1_data.keys() == item2_data.keys() for k, v1 in item1_data.items(): v2 = item2_data[k] assert set(v1) == set(v2) def test_edge_attributes(g_old, g_new): for src_old, tgt_old, data_old in g_old.edges(data=True): data_new = g_new.edges()[src_old, tgt_old] assert data_old.keys() == data_new.keys() for k, v1 in data_old.items(): v2 = data_new[k] try: assert set(v1) == set(v2) except TypeError: assert v1 is None assert v2 is None fname = "Climate_Mind_DiGraph.gpickle" g_old = read_gpickle(fname + ".bck") g_new = read_gpickle(fname) assert len(g_old) == len(g_new) assert len(g_old.edges()) == len(g_new.edges()) test_node_attributes(g_old.nodes(data=True), g_new.nodes(data=True)) test_node_attributes(g_new.nodes(data=True), g_old.nodes(data=True)) test_edge_attributes(g_old, g_new) test_edge_attributes(g_new, g_old)