Example #1
0
    def prepare_flight_graph(self):
        '''
        Creates the graph used to fly the drone
        '''
        if self.graph_path is not None and os.path.exists(self.graph_path):
            self.graph = gpickle.read_gpickle(self.graph_path)

        # Sample some random points on the current grid
        self.sampler = Sampler(self.data,
                               SAFETY_DISTANCE,
                               zmin=10,
                               zmax=TARGET_ALTITUDE)
        self.polygons = self.sampler.polygons
        self.heights = self.sampler.heights

        # if we don't have the graph by now - let's create it
        if self.graph is None:
            nodes = self.sampler.sample(self.sample_size)
            print("Creating graph...")
            self.graph = create_graph(nodes, self.sampler.polygons,
                                      self.sampler.heights, self.neighbors)
            # if we have specified the path - we want to save it
            if self.graph_path is not None:
                print("Saving graph: ", self.graph_path)
                gpickle.write_gpickle(self.graph, self.graph_path)
Example #2
0
    def MCT_SEARCH(self):
        reward, config = super().MCT_SEARCH()

        write_gpickle(self.tree, os.path.join(self.exec_dir, "tree.pkl"))
        with open(os.path.join(self.exec_dir, "full_log.json"), 'w') as outfile:
            json.dump(self.env.history_score, outfile)

        return reward, config
Example #3
0
def get_graph(my_df, saving=True):
	graph_filename = 'data/graph.pickle'
	if saving and os.path.exists(graph_filename):
		print('Loading graph from file.')
		return read_gpickle(graph_filename)
	print('Constructing and saving graph. Should take a few minutes.')
	DG = construct_graph(my_df)
	DG = add_walkable(my_df, DG)
	if saving: write_gpickle(DG, graph_filename, pickle.HIGHEST_PROTOCOL)
	return DG
Example #4
0
def flights_network(create=False):
    path = '../data/pickled_graphs/flights.pkl'
    df = read_airports()
    pos = create_pos_for_shp(df)
    if create:
        G = create_airports_graph(df, pos)
        pickle.write_gpickle(G, path)
    else:
        G = pickle.read_gpickle(path)
    return G, pos
Example #5
0
def get_simulation_graph(simulation_name: str) -> TxsGraph:
    # building the graph is expensive, so we pickle it for faster construction
    # next time
    simulation_graph_ser_file = os.path.join(
        get_simulation_datadir(simulation_name), "graph.pickle")
    if os.path.isfile(simulation_graph_ser_file):
        return read_gpickle(simulation_graph_ser_file)

    g = TxsGraph.from_datadir(datadir=get_simulation_datadir(simulation_name))
    write_gpickle(g, simulation_graph_ser_file)
    return g
Example #6
0
def gminas_network(create=False):
    path = '../data/pickled_graphs/gminas_'
    df, nbrs = read_files('gminas')
    pos = create_pos(df)  # {node: (pt_x, pt_y)}
    if create:
        G = create_gminas_graph(pos, nbrs, df)
        gpickle.write_gpickle(G, path + 'graph.pkl')
        df.to_pickle(path + 'df.pkl')
    else:
        G = gpickle.read_gpickle(path + 'graph.pkl')
        df = pd.read_pickle(path + 'df.pkl')
    return G, pos, df
Example #7
0
    def MCT_SEARCH(self):
        """Monte carlo tree search iteration."""
        self.logger.info(
            "#########################Iteration={0}##################################"
            .format(self.n_iter))
        front = self.TREEPOLICY()
        reward = self.PLAYOUT(front)
        self.BACKUP(front, reward)
        self.n_iter += 1

        self.env.score_model.save_data(self.exec_dir)

        write_gpickle(self.tree, os.path.join(self.exec_dir, "tree.json"))

        with open(os.path.join(self.exec_dir, "full_log.json"),
                  'w') as outfile:
            json.dump(self.env.history_score, outfile)
Example #8
0
 def get_all_counts(self):
     """Generates and saves graphs and count dicts for each file in the clean_path directory"""
     files = [
         f for f in listdir(self.clean_path)
         if isfile(join(self.clean_path, f))
     ]
     mkdir(self.grapher_path)
     mkdir(join(self.grapher_path, 'graphs'))
     mkdir(join(self.grapher_path, 'counts'))
     for f in files:
         df = pd.read_csv(join(self.clean_path, f))
         g, counts = self.get_counts_over_time(df)
         write_gpickle(g, join(self.grapher_path, 'graphs',
                               f[:-4] + '.pkl'))
         count_file = open(
             join(self.grapher_path, 'counts', f[:-4] + '.pkl'), 'wb')
         pickle.dump(counts, count_file)
         count_file.close()
Example #9
0
def write_gpickle(G, path):
    """NetworkX write_gpickle method.

    """

    rwgpickle.write_gpickle(G, path)
Example #10
0
                target = G.neighbors(x)[i]
                G.add_edge(source, target)
                repeated_nodes.extend([source, target])

        source += 1
        if source % 1000 == 0:
            print(info(G))
    return G


def Google_graph():
    # Create the Google graph object and return the largest connected subgraph
    G = read_edgelist('web-Google.txt', comments='#')
    G = max(nx.connected_component_subgraphs(G), key=len)
    G.name = "Google_graph"
    return G


if __name__ == '__main__':

    num_nodes = 600000
    p = 0.6
    r = 3
    G = extended_prefential_attachment(num_nodes, p, r)
    print info(G)
    write_gpickle(G, "HTCM.gpickle")

    G = Google_graph()
    print info(G)
    write_gpickle(G, "Google.gpickle")
Example #11
0
def scan_source_files(visitor_cls):
    for source_root in source_roots:
        for folder, dirs, files in os.walk(source_root):
            dirs[:] = [d for d in dirs if d not in exclude_folders]
            for source_file in files:
                if fnmatch.fnmatch(source_file,
                                   '*.py') and ('test' not in source_file):
                    with open(os.path.join(folder, source_file),
                              'r',
                              encoding='utf-8') as source:
                        print('Scanning {}'.format(source.name))
                        ast_tree = ast.parse(source.read())
                        visitor = visitor_cls(source.name)
                        visitor.visit(ast_tree)


output_graph_file = os.path.join(output_folder, 'build_func_deps.graph')
output_def_file = os.path.join(output_folder, 'build_func_deps.def')

if __name__ == '__main__':
    # networkx 2.2 or above version is needed

    # Phrase 1
    scan_source_files(FunctionDefVisitorPhase1)
    with open(output_def_file, 'wb') as output_file:
        pickle.dump(func_defs, output_file)

    # Phrase 2
    scan_source_files(FunctionDefVisitorPhase2)
    write_gpickle(call_graph, output_graph_file)
Example #12
0
File: mdp.py Project: kthulhu/mrv
	if not sourceFile:
		graph = main( filelist, **kwargs_creategraph )
	else:
		if verbose:
			sys.stdout.write("Reading dependencies from: %s\n" % sourceFile)
		graph = gpickle.read_gpickle( sourceFile )



	# SAVE ALL DEPENDENCIES ?
	#########################
	# save to target file
	if targetFile:
		if verbose:
			sys.stdout.write("Saving dependencies to %s\n" % targetFile)
		gpickle.write_gpickle( graph, targetFile )


	# QUERY MODE
	###############
	return_invalid = "-b" in opts
	depth = int( opts.get( "-d", -1 ) )
	as_edge = "-e" in opts
	nice_mode = "-n" in opts
	dotgraph = None
	dotOutputFile = opts.get( "-o", None )
	kwargs_query[ 'invalid_only' ] = return_invalid		# if given, filtering for invalid only is enabled

	if dotOutputFile:
		dotgraph = MayaFileGraph()
Example #13
0
def write_gpickle(G, path):
    """NetworkX write_gpickle method.
    
    """
    
    rwgpickle.write_gpickle(G, path)
Example #14
0
 def save_graph(self, graph_path=None):
     # save graphs and labels
     if graph_path is None:
         graph_path = self.graph_path
     write_gpickle(self.G, graph_path)
     logger.info(f'Saved graph at [{graph_path}]')