def __init__(self, wallet, config, app=None): self.stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.start_color() curses.use_default_colors() curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLUE) curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_CYAN) self.stdscr.keypad(1) self.stdscr.border(0) self.maxy, self.maxx = self.stdscr.getmaxyx() curses.curs_set(0) self.w = curses.newwin(10, 50, 5, 5) self.wallet = wallet self.config = config set_verbosity(False) self.tab = 0 self.pos = 0 self.popup_pos = 0 self.str_recipient = "" self.str_description = "" self.str_amount = "" self.str_fee = "" self.wallet.interface.register_callback('updated', self.refresh) self.wallet.interface.register_callback('connected', self.refresh) self.wallet.interface.register_callback('disconnected', self.refresh) self.wallet.interface.register_callback('disconnecting', self.refresh) self.tab_names = [_("History"), _("Send"), _("Receive"), _("Contacts"), _("Wall")] self.num_tabs = len(self.tab_names)
def main(): """Parse arguments, call convert, write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Convert an edge list file to an igraph graph and write it to file in pickle format. The edge list file contains one edge per line as 'from_vertex\tto_vertex'. Lines at the beginning of the file starting with '#' are treated as comments." parser.add_argument("input", help="input file") parser.add_argument("output", help="output file (pickle format)") group = parser.add_mutually_exclusive_group() group.add_argument("-d", "--directed", action="store_true", default=False, help="consider the graph as directed") group.add_argument("-u", "--undirected", action="store_true", default=True, help="consider the graph as undirected (default)") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-z", "--compress", action="store_true", default=False, help="compress the output file") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Convert the file G = convert(args.input, args.directed, args.maxconn) # Serialize the graph to file logging.info("Writing graph to file %s", args.output) if args.compress: logging.debug("Compression selected") format = "picklez" else: format = "pickle" G.write(args.output, format)
def main(): """Parse arguments and perform the computation.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute approximate betweenness centrality of all vertices in a graph using the algorihm by Brandes and Pich, and the time to compute them, and write them to file" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-s", "--samplesize", type=util.positive_int, default=0, help="use specified sample size. Overrides epsilon, delta, and diameter computation") parser.add_argument("-t", "--timeout", type=util.positive_int, default=3600, help="Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", nargs="?", default=False, const="auto", help="write graph (and computed attributes) to file.") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Compute betweenness if args.samplesize: (stats, betw) = betweenness_sample_size(G, args.samplesize, args.write, args.timeout) else: (stats, betw) = betweenness(G, args.epsilon, args.delta, args.write, args.timeout) # If specified, write betweenness as vertex attributes, and time as graph # attribute back to file if args.write: logging.info("Writing betweenness as vertex attributes and stats as graph attribute") if args.write == "auto": filename = os.path.splitext(args.graph)[0] + ("-undir" if args.undirected else "dir") + ".picklez" G.write(filename) else: G.write(args.write) # Write stats and betweenness to output util.write_to_output(stats, betw, args.output)
def main(): """Parse arguments, call the approximation, write it to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute an approximation of the diameter of a graph and the time needed to compute it, and (if specified) write these info as a graph attributes" parser.add_argument("graph", help="graph file") parser.add_argument("-i", "--implementation", choices=["homegrown", "igraph"], default="homegrown", help="use specified implementation of betweenness computation") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", action="store_true", default=False, help="write the approximation of diameter of the graph as the 'approx_diameter' graph attribute and the time taken to compute it as the 'approx_diam_time' attribute") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Seed the random number generator random.seed() # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Read graph from file # Compute the diameter (elapsed_time, diam) = diameter(G, args.implementation) # Print info print("{}, diameter={}, time={}".format(args.graph, diam, elapsed_time)) # If requested, add graph attributes and write graph back to original file if args.write: logging.info("Writing diameter approximation and time to graph") G["approx_diam"] = diam G["approx_diam_time"] = elapsed_time # We use format auto-detection, which should work given that it worked # when we read the file G.write(args.graph)
def __init__(self, wallet, config, app=None): self.stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.start_color() curses.use_default_colors() curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLUE) curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_CYAN) self.stdscr.keypad(1) self.stdscr.border(0) self.maxy, self.maxx = self.stdscr.getmaxyx() curses.curs_set(0) self.w = curses.newwin(10, 50, 5, 5) self.wallet = wallet self.config = config set_verbosity(False) self.tab = 0 self.pos = 0 self.popup_pos = 0 self.str_recipient = "" self.str_description = "" self.str_amount = "" self.str_fee = "" self.wallet.interface.register_callback('updated', self.refresh) self.wallet.interface.register_callback('connected', self.refresh) self.wallet.interface.register_callback('disconnected', self.refresh) self.wallet.interface.register_callback('disconnecting', self.refresh) self.tab_names = [ _("History"), _("Send"), _("Receive"), _("Contacts"), _("Wall") ] self.num_tabs = len(self.tab_names)
def main(): """Parse arguments, run experiments, collect results and stats, write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "TODO" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("runs", type=util.positive_int, default=20, help="number of runs") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") group = parser.add_mutually_exclusive_group() group.add_argument("-a", "--approximate", action="store_true", default=True, help="use approximate diameter (default)") group.add_argument("-d", "--diameter", type=util.positive_int, default=0, help="value to use for the diameter") group.add_argument("-e", "--exact", action="store_true", default=False, help="use exact diameter") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-s", "--samplesize", type=util.positive_int, default=0, help="use specified sample size. Overrides epsilon, delta, and diameter computation") parser.add_argument("-t", "--timeout", type=util.positive_int, default=3600, help="Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--weightFile", default="-", help="random weights within the interval 0 to 1, must have as many entries as the number of edges") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) if args.exact: args.approximate = False # Read the weights weights_list=[] if args.weightFile != "-": with open(args.weightFile,'r') as weight_file: for line in weight_file: weights_list.append(float(line.strip())) # Perform experiment multiple times results = [] for i in range(args.runs): logging.info("Run #%d", i) # Compute betweenness if args.samplesize: results.append(vc_sample.betweenness_sample_size(G, args.samplesize, False, args.timeout)) else: if args.diameter > 0: results.append(vc_sample.betweenness(G, args.epsilon, args.delta, weights_list, args.diameter, False, args.timeout)) else: results.append(vc_sample.betweenness(G, args.epsilon, args.delta, weights_list, args.approximate, False, args.timeout)) # Compute aggregate statistics about the experiments stats = dict() stats["graph"]= os.path.basename(args.graph) stats["vertices"] = G.vcount() stats["edges"] = G.ecount() stats["runs"] = args.runs if args.samplesize: stats["sample_size"] = args.samplesize else: stats["delta"] = args.delta stats["epsilon"] = args.epsilon stats["sample_size"] = results[0][0]["sample_size"] stats_names = ["time", "forward_touched_edges", "backward_touched_edges"] if not args.samplesize: stats_names.append("diameter") stats_names.append("diameter_touched_edges") for stat_name in stats_names: values = sorted([x[0][stat_name] for x in results]) stats[stat_name + "_max"] = values[-1] stats[stat_name + "_min"] = values[0] stats[stat_name + "_avg"] = sum(values) / args.runs if args.runs > 1: stats[stat_name + "_stddev"] = math.sqrt(sum([math.pow(value - stats[stat_name + "_avg"], 2) for value in values]) / (args.runs - 1)) else: stats[stat_name + "_stddev"] = 0.0 stats["betw_min"] = [0.0] * G.vcount() stats["betw_max"] = [0.0] * G.vcount() stats["betw_avg"] = [0.0] * G.vcount() for i in range(G.vcount()): betws = sorted([x[1][i] for x in results]) stats["betw_min"][i]= betws[0] stats["betw_max"][i] = betws[-1] stats["betw_avg"][i] = sum(betws) / args.runs csvkeys="graph, runs, epsilon, delta, sample_size" csvkeys_names= ["{0}_avg, {0}_min, {0}_stddev, {0}_max, {0}_min".format(stat_name) for stat_name in stats_names] csvkeys_list = [csvkeys] + csvkeys_names csvkeys = ",".join(csvkeys_list) # print(stats["betw_min"]) print(csvkeys) print(util.dict_to_csv(stats, csvkeys)) # Write stats and results to output file try: with open(args.output, "wb") as output: logging.info("Writing stats and results to %s", args.output) pickle.dump((stats, results), output) output.close() #pkl_file = open("vc_out.picklez", 'rb') #reader = pickle.load(pkl_file) #print(reader[0]["diameter_touched_edges_avg"]) except OSError as E: logging.critical("Cannot write stats and results to %s: %s", args.output, E.strerror) sys.exit(2)
def main(): """Parse arguments and perform the computation.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute approximate betweenness centrality of all vertices in a graph using the algorihm by Brandes and Pich, and the time to compute them, and write them to file" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") parser.add_argument( "-m", "--maxconn", action="store_true", default=False, help= "if the graph is not weakly connected, only save the largest connected component" ) parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument( "-s", "--samplesize", type=util.positive_int, default=0, help= "use specified sample size. Overrides epsilon, delta, and diameter computation" ) parser.add_argument( "-t", "--timeout", type=util.positive_int, default=3600, help= "Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)" ) parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument( "-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", nargs="?", default=False, const="auto", help="write graph (and computed attributes) to file.") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Compute betweenness if args.samplesize: (stats, betw) = betweenness_sample_size(G, args.samplesize, args.write, args.timeout) else: (stats, betw) = betweenness(G, args.epsilon, args.delta, args.write, args.timeout) # If specified, write betweenness as vertex attributes, and time as graph # attribute back to file if args.write: logging.info( "Writing betweenness as vertex attributes and stats as graph attribute" ) if args.write == "auto": filename = os.path.splitext(args.graph)[0] + ( "-undir" if args.undirected else "dir") + ".picklez" G.write(filename) else: G.write(args.write) # Write stats and betweenness to output util.write_to_output(stats, betw, args.output)
def main(): """Parse arguments, run experiments, collect results and stats, write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Perform experiment to compute exact betweenness centrality of all vertices in a graph using Brandes' algorithm" parser.add_argument("runs", type=util.positive_int, default=20, help="number of runs") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") parser.add_argument( "-m", "--maxconn", action="store_true", default=False, help= "if the graph is not weakly connected, only save the largest connected component" ) parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument( "-t", "--timeout", type=util.positive_int, default=3600, help= "Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)" ) parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument( "-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Perform experiment multiple times results = [] for i in range(args.runs): logging.info("Run #%d", i) results.append(brandes_exact.betweenness(G, False, args.timeout)) # Compute aggregate statistics about the experiments stats = dict(results[0][0]) stats["graph"] = os.path.basename(args.graph) stats["vertices"] = G.vcount() stats["edges"] = G.ecount() stats["runs"] = args.runs del stats["time"] times = sorted([x[0]["time"] for x in results]) stats["time_max"] = times[-1] stats["time_min"] = times[0] stats["time_avg"] = sum(times) / args.runs if args.runs > 1: stats["time_stddev"] = math.sqrt( sum([math.pow(time - stats["time_avg"], 2) for time in times]) / (args.runs - 1)) else: stats["time_stddev"] = 0.0 csvkeys = "graph, runs, time_avg, time_stddev, time_max, time_min, forward_touched_edges, backward_touched_edges" print(csvkeys) print(util.dict_to_csv(stats, csvkeys)) # Write stats and results to output file try: with open(args.output, "wb") as output: logging.info("Writing stats and results to %s", args.output) pickle.dump((stats, results), output) output.close() except OSError as E: logging.critical("Cannot write stats and results to %s: %s", args.output, E.strerror) sys.exit(2)
def main(argv): """Control flow for reading the config settings and processing sounds. Get the configuration from the file :const:`CFG_FILE` (if it exists) and update the configuration from command-line args (if any). Extract internal resources for the duration of the remaining work; will be automatically cleaned up afterward. If no config, instantiate a default config and exit. Construct a :class:`config.Settings` object, and read the file table. Process the selected sounds, and then print any that were not found. :param argv: command-line arguments :type argv: iterable(str) :returns: 1 if an error prevents an attempt at processing or instantiation of the default config file; 0 otherwise :rtype: int :raises config.BadSetting: if a necessary setting is undefined :raises config.TooManySubstitutions: if a setting-evaluation loop goes on for too many iterations """ # Grab a couple of useful paths. qs_home = app_home() qs_working_dir = os.getcwd() # Read the config file (if it exists). cfg_path = os.path.join(qs_working_dir, CFG_FILE) cfg_table = config.read_cfg(cfg_path) # Apply any command-line args. config.update_cfg(argv, cfg_table) # Set pause_on_exit appropriately, and extract packaged resources for the # duration of the remaining work. path_table = {'qs_home' : add_sep(qs_home), 'qs_working_dir' : add_sep(qs_working_dir)} set_pause_on_exit(cfg_table, path_table) temp_dir = user_temp_dir(cfg_table, path_table) with resources.temp_copies(RES_PATH, temp_dir) as resource_dir: # Print info about this program and internal utilities. print_qs_info(resource_dir) # Do we have any config? If not, create a default config file and exit. if not cfg_table: print("No settings in config file or on command line.") if not create_config_file(cfg_path, resource_dir): return 1 print("Edit the config (if you like) then run quakesounds again.") return 0 # Create the settings-evaluator. path_table['qs_internal'] = add_sep(resource_dir) settings = config.Settings(cfg_table, path_table) # Set verbosity for the remainder of the run, and print module info # if verbose. set_verbosity(settings) print_modules_info() print("") # Get the sound selections and name mappings. targets_path = settings.eval('targets_path') targets_table = config.read_cfg(targets_path, default_sound_name) if not targets_table: if os.path.exists(targets_path): print("Nothing to process in the targets table at path: {0}".format( targets_path)) return 0 else: print("No targets table found at path: {0}".format(targets_path)) return 1 # Do that voodoo that we do. if not processing.go(settings, targets_table): return 1 # Inform of leftovers. if targets_table: print("Not processed:") for t in targets_table: print(" {0}".format(t)) else: print("All selections processed.") print("") # Done! return 0
def main(): """Parse arguments, call the approximation, write it to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute an approximation of the diameter of a graph and the time needed to compute it, and (if specified) write these info as a graph attributes" parser.add_argument("graph", help="graph file") parser.add_argument( "-i", "--implementation", choices=["homegrown", "igraph"], default="homegrown", help="use specified implementation of betweenness computation") parser.add_argument( "-m", "--maxconn", action="store_true", default=False, help= "if the graph is not weakly connected, only save the largest connected component" ) parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument( "-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument( "-w", "--write", action="store_true", default=False, help= "write the approximation of diameter of the graph as the 'approx_diameter' graph attribute and the time taken to compute it as the 'approx_diam_time' attribute" ) args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Seed the random number generator random.seed() # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Read graph from file # Compute the diameter (elapsed_time, diam) = diameter(G, args.implementation) # Print info print("{}, diameter={}, time={}".format(args.graph, diam, elapsed_time)) # If requested, add graph attributes and write graph back to original file if args.write: logging.info("Writing diameter approximation and time to graph") G["approx_diam"] = diam G["approx_diam_time"] = elapsed_time # We use format auto-detection, which should work given that it worked # when we read the file G.write(args.graph)
def main(): """Parse arguments, run experiments, collect results and stats, write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Perform experiment to compute exact betweenness centrality of all vertices in a graph using Brandes' algorithm" parser.add_argument("runs", type=util.positive_int, default=20, help="number of runs") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-t", "--timeout", type=util.positive_int, default=3600, help="Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) # Perform experiment multiple times results = [] for i in range(args.runs): logging.info("Run #%d", i) results.append(brandes_exact.betweenness(G, False, args.timeout)) # Compute aggregate statistics about the experiments stats = dict(results[0][0]) stats["graph"]= os.path.basename(args.graph) stats["vertices"] = G.vcount() stats["edges"] = G.ecount() stats["runs"] = args.runs del stats["time"] times = sorted([x[0]["time"] for x in results]) stats["time_max"] = times[-1] stats["time_min"] = times[0] stats["time_avg"] = sum(times) / args.runs if args.runs > 1: stats["time_stddev"] = math.sqrt(sum([math.pow(time - stats["time_avg"], 2) for time in times]) / (args.runs - 1)) else: stats["time_stddev"] = 0.0 csvkeys="graph, runs, time_avg, time_stddev, time_max, time_min, forward_touched_edges, backward_touched_edges" print(csvkeys) print(util.dict_to_csv(stats, csvkeys)) # Write stats and results to output file try: with open(args.output, "wb") as output: logging.info("Writing stats and results to %s", args.output) pickle.dump((stats, results), output) output.close() except OSError as E: logging.critical("Cannot write stats and results to %s: %s", args.output, E.strerror) sys.exit(2)
def main(): args = parse_args() #set verbose value in util set_verbosity(args.verbose) #assume to extract all files when nothing specified. if args.extract == []: args.extract.append('*:*.*') extraction_rules = [ ExtractionRule(e) for e in args.extract ] merge_images = not args.nomerge exec_dev = args.development #set path in utility class dbg("setting age2 input directory to " + args.srcdir, 1) set_read_dir(args.srcdir) #write mode is disabled by default, unless destdir is set if args.destdir != '/dev/null' and not args.listfiles and not args.dumpfilelist: dbg("setting write dir to " + args.destdir, 1) set_write_dir(args.destdir) write_enabled = True else: write_enabled = False drsfiles = { "graphics": DRS("Data/graphics.drs"), "interface": DRS("Data/interfac.drs"), "sounds0": DRS("Data/sounds.drs"), "sounds1": DRS("Data/sounds_x1.drs"), "gamedata0": DRS("Data/gamedata.drs"), "gamedata1": DRS("Data/gamedata_x1.drs"), "gamedata2": DRS("Data/gamedata_x1_p1.drs"), "terrain": DRS("Data/terrain.drs") } palette = ColorTable(drsfiles["interface"].get_file_data('bin', 50500)) if exec_dev: if write_enabled: print("no indev function available at the moment.") return else: raise Exception("development mode requires write access") if write_enabled: file_write(file_get_path('processed/player_color_palette.pal', write=True), palette.gen_player_color_palette()) import blendomatic blend_data = blendomatic.Blendomatic("Data/blendomatic.dat") for (modeidx, png, size, metadata) in blend_data.draw_alpha_frames_merged(): fname = 'alphamask/mode%02d' % (modeidx) filename = file_get_path(fname, write=True) file_write(filename + ".png", png) file_write(filename + ".docx", metadata) dbg("blending mode%02d -> saved packed atlas" % (modeidx), 1) import gamedata.empiresdat datfile = gamedata.empiresdat.Empires2X1P1("Data/empires2_x1_p1.dat") filename = file_get_path("processed/terrain_meta.docx", write=True) tmeta = "#terrain specification\n" tmeta += "#idx=terrain_id, slp_id, sound_id, blend_mode, blend_priority, angle_count, frame_count, terrain_dimensions0, terrain_dimensions1, terrain_replacement_id, name0, name1\n" tmeta += "n=%d\n" % len(datfile.data["terrain"]["terrain"]) i = 0 blending_modes = set() for tk in datfile.data["terrain"]["terrain"]: if tk["slp_id"] < 0: continue blending_modes.add(tk["blend_mode"]) wanted = ["terrain_id", "slp_id", "sound_id", "blend_mode", "blend_priority", "angle_count", "frame_count", "terrain_dimensions0", "terrain_dimensions1", "terrain_replacement_id", "name0", "name1"] line = [tk[w] for w in wanted] #as blending mode 0==1 and 7==8, and ice is 5 for sure, #we subtract one from the ids, and can map -1 to 0, as mode (0-1) == (1-1) #TODO: this can't be correct... line[3] -= 1 if line[3] < 0: line[3] = 0 line = map(str, line) tmeta += ("%d=" % i) + ",".join(line) + "\n" i += 1 file_write(filename, tmeta) filename = file_get_path("processed/blending_meta.docx", write=True) bmeta = "#blending mode specification\n" bmeta += "#yeah, i know that this content is totally stupid, but that's how the data can be injected later\n" bmeta += "#idx=mode_id\n" bmeta += "n=%d\n" % len(blending_modes) i = 0 for m in blending_modes: bmeta += "%d=%d\n" % (i, m) i += 1 file_write(filename, bmeta) if args.extrafiles: file_write(file_get_path('info/colortable.pal.png', write=True), palette.gen_image()) file_list = dict() files_extracted = 0 for drsname, drsfile in drsfiles.items(): for file_extension, file_id in drsfile.files: if not any((er.matches(drsname, file_id, file_extension) for er in extraction_rules)): continue if args.listfiles or args.dumpfilelist: fid = int(file_id) if fid not in file_list: file_list[fid] = list() file_list[fid] += [(drsfile.fname, file_extension)] continue if write_enabled: fbase = file_get_path('raw/' + drsfile.fname + '/' + str(file_id), write=True) fname = fbase + '.' + file_extension dbg("Extracting to " + fname + "...", 2) file_data = drsfile.get_file_data(file_extension, file_id) if file_extension == 'slp': if write_enabled: s = SLP(file_data) out_file_tmp = drsname + ": " + str(file_id) + "." + file_extension if merge_images: png, (width, height), metadata = s.draw_frames_merged(palette) file_write(fname + ".png", png) file_write(fname + '.docx', metadata) dbg(out_file_tmp + " -> saved packed atlas", 1) else: for idx, (png, metadata) in enumerate(s.draw_frames(palette)): filename = fname + '.' + str(idx) file_write(filename + '.png', png.image) file_write(filename + '.docx', metadata) dbg(out_file_tmp + " -> extracting frame %3d...\r" % (idx), 1, end="") dbg(out_file_tmp + " -> saved single frame(s)", 1) elif file_extension == 'wav': if write_enabled: file_write(fname, file_data) use_opus = True if use_opus: #opusenc invokation (TODO: ffmpeg?) opus_convert_call = ['opusenc', fname, fbase + '.opus'] dbg("converting... : " + fname + " to opus.", 1) oc = subprocess.Popen(opus_convert_call, stdout=subprocess.PIPE, stderr=subprocess.PIPE) oc_out, oc_err = oc.communicate() if ifdbg(2): oc_out = oc_out.decode("utf-8") oc_err = oc_err.decode("utf-8") dbg(oc_out + "\n" + oc_err, 2) #remove original wave file remove(fname) else: #this type is unknown or does not require conversion if write_enabled: file_write(fname, file_data) files_extracted += 1 if write_enabled: dbg(str(files_extracted) + " files extracted", 0) if args.listfiles or args.dumpfilelist: #file_list = sorted(file_list) if not args.dumpfilelist: for idx, f in file_list.items(): ret = "%d = [ " % idx for file_name, file_extension in f: ret += "%s/%d.%s, " % (file_name, idx, file_extension) ret += "]" print(ret) else: ret = "#!/usr/bin/python\n\n#auto generated age2tc file list\n\n" import pprint ret += "avail_files = " ret += pprint.pformat(file_list) print(ret)
def main(): """Parse arguments, call betweenness(), write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute approximate betweenness centrality of all vertices in a graph using sampling and VC-dimension, and the time to compute them, and write them to file" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") group = parser.add_mutually_exclusive_group() group.add_argument("-a", "--approximate", action="store_true", default=True, help="use approximate diameter (default)") group.add_argument("-d", "--diameter", type=util.positive_int, default=0, help="value to use for the diameter") group.add_argument("-e", "--exact", action="store_true", default=False, help="use exact diameter") parser.add_argument( "-m", "--maxconn", action="store_true", default=False, help= "if the graph is not weakly connected, only save the largest connected component" ) parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument( "-s", "--samplesize", type=util.positive_int, default=0, help= "use specified sample size. Overrides epsilon, delta, and diameter computation" ) parser.add_argument( "-t", "--timeout", type=util.positive_int, default=3600, help= "Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)" ) parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument( "-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", nargs="?", default=False, const="auto", help="write graph (and computed attributes) to file.") parser.add_argument( "-l", "--weightFile", default="-", help= "random weights within the interval 0 to 1, must have as many entries as the number of edges" ) args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Seed the random number generator random.seed() # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) if args.exact: args.approximate = False # Read the weights weights_list = [] if args.weightFile != "-": with open(args.weightFile, 'r') as weight_file: for line in weight_file: weights_list.append(float(line.strip())) # Compute betweenness if args.samplesize: (stats, betw) = betweenness_sample_size(G, args.samplesize, args.write) else: if args.diameter > 0: (stats, betw) = betweenness(G, args.epsilon, args.delta, weights_list, args.diameter, args.write) else: (stats, betw) = betweenness(G, args.epsilon, args.delta, weights_list, args.approximate, args.write) # If specified, write betweenness as vertex attributes, and time as graph # attribute back to file if args.write: logging.info( "Writing betweenness as vertex attributes and stats as graph attribute" ) if args.write == "auto": filename = os.path.splitext(args.graph)[0] + ( "-undir" if args.undirected else "dir") + ".picklez" G.write(filename) else: G.write(args.write) # Write stats and betweenness to output util.write_to_output(stats, betw, args.output)
def main(): """Parse arguments, call betweenness(), write to file.""" # Parse arguments parser = argparse.ArgumentParser() parser.description = "Compute approximate betweenness centrality of all vertices in a graph using sampling and VC-dimension, and the time to compute them, and write them to file" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("graph", help="graph file") parser.add_argument("output", help="output file") group = parser.add_mutually_exclusive_group() group.add_argument("-a", "--approximate", action="store_true", default=True, help="use approximate diameter (default)") group.add_argument("-d", "--diameter", type=util.positive_int, default=0, help="value to use for the diameter") group.add_argument("-e", "--exact", action="store_true", default=False, help="use exact diameter") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-s", "--samplesize", type=util.positive_int, default=0, help="use specified sample size. Overrides epsilon, delta, and diameter computation") parser.add_argument("-t", "--timeout", type=util.positive_int, default=3600, help="Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", nargs="?", default=False, const="auto", help="write graph (and computed attributes) to file.") parser.add_argument("-l", "--weightFile", default="-", help="random weights within the interval 0 to 1, must have as many entries as the number of edges") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Seed the random number generator random.seed() # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) if args.exact: args.approximate = False # Read the weights weights_list=[] if args.weightFile != "-": with open(args.weightFile,'r') as weight_file: for line in weight_file: weights_list.append(float(line.strip())) # Compute betweenness if args.samplesize: (stats, betw) = betweenness_sample_size(G, args.samplesize, args.write) else: if args.diameter > 0: (stats, betw) = betweenness(G, args.epsilon, args.delta, weights_list, args.diameter, args.write) else: (stats, betw) = betweenness(G, args.epsilon, args.delta, weights_list, args.approximate, args.write) # If specified, write betweenness as vertex attributes, and time as graph # attribute back to file if args.write: logging.info("Writing betweenness as vertex attributes and stats as graph attribute") if args.write == "auto": filename = os.path.splitext(args.graph)[0] + ("-undir" if args.undirected else "dir") + ".picklez" G.write(filename) else: G.write(args.write) # Write stats and betweenness to output util.write_to_output(stats, betw, args.output)
""") parser.add_argument('-enable-testing', '--enable-testing', action='store_true', default=False) parser.add_argument('-xct', '--xctest', action='store_true', default=False) parser.add_argument('-sdk', '--sdk') parser.add_argument('-target', '--target-version') parser.add_argument('-o', '--output', default='Frameworks') parser.add_argument('-v', '--verbose', action='store_true', help='verbose logging') args = parser.parse_args() set_verbosity(args.verbose) FRAMEWORKS_DIR = 'Frameworks' OUTPUT = args.output if __name__ == '__main__': if args.sdk is None: log_error('must have sdk') exit(1) try: tuples = list( map(lambda platform: (platform, platform.variants), platforms)) variants_to_platforms = dict(t for sublist in map( lambda tup: map(lambda variant: (variant.name, tup[0]), tup[1]), tuples) for t in sublist) variants_to_variants = dict(t for sublist in map(
connection, address = s.accept() except socket.timeout: if not server.clients: if time.time() - t > daemon_timeout: print_error("Daemon timeout") break else: t = time.time() continue t = time.time() client = ClientThread(server, connection) client.start() server.stop() # sleep so that other threads can terminate cleanly time.sleep(0.5) print_error("Daemon exiting") if __name__ == '__main__': import simple_config, util config = simple_config.SimpleConfig() util.set_verbosity(True) server = NetworkServer(config) server.start() try: daemon_loop(server) except KeyboardInterrupt: print "Ctrl C - Stopping daemon" server.stop() sys.exit(1)
def main(): """Parse arguments, do the comparison, write to output.""" parser = argparse.ArgumentParser() parser.description = "compare estimation of betweenness centralities to exact values" parser.add_argument("epsilon", type=util.valid_interval_float, help="accuracy parameter") parser.add_argument("delta", type=util.valid_interval_float, help="confidence parameter") parser.add_argument("graph", help="graph file") group = parser.add_mutually_exclusive_group() group.add_argument("-a", "--approximate", action="store_true", default=True, help="use approximate diameter when computing approximation of betweenness using VC-Dimension (default)") group.add_argument("-d", "--diameter", type=util.positive_int, default=0, help="value to use for the diameter") group.add_argument("-e", "--exact", action="store_true", default=False, help="use exact diameter when computing approximation of betweenness using VC-Dimension") parser.add_argument("-m", "--maxconn", action="store_true", default=False, help="if the graph is not weakly connected, only save the largest connected component") parser.add_argument("-p", "--pickle", action="store_true", default=False, help="use pickle reader for input file") parser.add_argument("-r", "--resultfiles", nargs=4, help="Use results files rather than recomputing betweenness. Files should be specified as 'exact_res vc_res bp_res gss_res'") parser.add_argument("-s", "--samplesize", type=util.positive_int, default=0, help="use specified sample size. Overrides epsilon, delta, and diameter computation") parser.add_argument("-t", "--timeout", type=util.positive_int, default=3600, help="Timeout computation after specified number of seconds (default 3600 = 1h, 0 = no timeout)") parser.add_argument("-u", "--undirected", action="store_true", default=False, help="consider the graph as undirected ") parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity (use multiple times for more verbosity)") parser.add_argument("-w", "--write", nargs="?", default=False, const="auto", help="write graph (and computed attributes) to file.") args = parser.parse_args() # Set the desired level of logging util.set_verbosity(args.verbose) # Seed the random number generator random.seed() # Read graph if args.pickle: G = util.read_graph(args.graph) else: G = converter.convert(args.graph, not args.undirected, args.maxconn) if args.exact: args.approximate = False if not args.resultfiles: (exact_stats, exact_betw) = brandes_exact.betweenness(G, args.write, args.timeout) if args.samplesize: (vc_stats, vc_betw) = vc_sample.betweenness_sample_size(G, args.samplesize, args.write, args.timeout) (bp_stats, bp_betw) = brandespich_sample.betweenness_sample_size(G, args.samplesize, args.write, args.timeout) (gss_stats, gss_betw) = geisbergerss_sample.betweenness_sample_size(G, args.samplesize, args.write, args.timeout) else: if args.diameter > 0: (vc_stats, vc_betw) = vc_sample.betweenness(G, args.epsilon, args.delta, args.diameter, args.write, args.timeout) else: (vc_stats, vc_betw) = vc_sample.betweenness(G, args.epsilon, args.delta, args.approximate, args.write, args.timeout) (bp_stats, bp_betw) = brandespich_sample.betweenness(G, args.epsilon, args.delta, args.write, args.timeout) (gss_stats, gss_betw) = geisbergerss_sample.betweenness(G, args.epsilon, args.delta, args.write, args.timeout) else: (exact_stats, exact_betw) = util.read_stats_betw(args.result_files[0]) (vc_stats, vc_betw) = util.read_stats_betw(args.result_files[1]) (bp_stats, bp_betw) = util.read_stats_betw(args.result_files[2]) (gss_stats, gss_betw) = util.read_stats_betw(args.result_files[3]) #Compute useful graph statistics (mainly diameter) if "diam" not in G.attributes(): diameter.diameter(G) # If specified, write betweenness as vertex attributes, and time and # diameter as graph attributes back to file if args.write: logging.info("Writing betweenness as vertex attributes and stats as graph attribute") if args.write == "auto": filename = os.path.splitext(args.graph)[0] + ("-undir" if args.undirected else "dir") + ".picklez" G.write(filename) else: G.write(args.write) # Compute error statistics # It is not a problem to sort the error by value because we only compute # aggregates. # Normalize #normalizer = math.pow(G.vcount(),2)-G.vcount() #norm_exact_betw = [a/normalizer for a in exact_betw] #norm_vc_betw = [a/normalizer for a in vc_betw] #norm_bp_betw = [a/normalizer for a in bp_betw] #norm_gss_betw = [a/normalizer for a in gss_betw] #VC-STATISTICS logging.info("Computing error statistics") max_err = args.epsilon * G.vcount() * (G.vcount() - 1) / 2 vc_errs = sorted([abs(a - b) for a,b in zip(exact_betw,vc_betw)]) vc_stats["err_avg"] = sum(vc_errs) / G.vcount() vc_stats["err_max"] = vc_errs[-1] vc_stats["err_min"] = list(itertools.filterfalse(lambda x: x == 0, vc_errs))[0] vc_stats["err_stddev"] = math.sqrt(sum([math.pow(err - vc_stats["err_avg"], 2) for err in vc_errs]) / (G.vcount() -1)) vc_stats["euc_dist"] = math.sqrt(sum([math.pow(a - b, 2) for a,b in zip(exact_betw,vc_betw)])) vc_stats["wrong_eps"] = 0; for i in range(G.vcount()): err = abs(exact_betw[i] - vc_betw[i]) #if err > max_err: #vc_stats["wrong_eps"] += 1 #if vc_stats["wrong_eps"] == 1: #print("## VC wrong epsilon ##") #print("{} {} {} {} {} {} {}".format(i, G.vs[i].degree(), #exact_betw[i], vc_betw[i], bp_betw[i], #err, err / (G.vcount() * (G.vcount() -1) / 2))) #BP-STATISTICS bp_errs = sorted([abs(a - b) for a,b in zip(exact_betw,bp_betw)]) bp_stats["err_avg"] = sum(bp_errs) / G.vcount() bp_stats["err_max"] = max(bp_errs) bp_stats["err_min"] = list(itertools.filterfalse(lambda x: x == 0, bp_errs))[0] bp_stats["err_stddev"] = math.sqrt(sum([math.pow(err - bp_stats["err_avg"], 2) for err in bp_errs]) / (G.vcount() -1)) bp_stats["euc_dist"] = math.sqrt(sum([math.pow(a - b, 2) for a,b in zip(exact_betw,bp_betw)])) bp_stats["wrong_eps"] = 0 for i in range(G.vcount()): err = abs(exact_betw[i] - bp_betw[i]) #if err > max_err: #bp_stats["wrong_eps"] += 1 #if bp_stats["wrong_eps"] == 1: #print("## BP wrong epsilon ##") #print("{} {} {} {} {} {} {}".format(i, G.vs[i].degree(), #exact_betw[i], bp_betw[i], vc_betw[i], err, err / (G.vcount() * (G.vcount() -1) / 2))) #GSS-STATISTICS gss_errs = sorted([abs(a - b) for a,b in zip(exact_betw,gss_betw)]) gss_stats["err_avg"] = sum(gss_errs) / G.vcount() gss_stats["err_max"] = max(gss_errs) gss_stats["err_min"] = list(itertools.filterfalse(lambda x: x == 0, gss_errs))[0] gss_stats["err_stddev"] = math.sqrt(sum([math.pow(err - gss_stats["err_avg"], 2) for err in gss_errs]) / (G.vcount() -1)) gss_stats["euc_dist"] = math.sqrt(sum([math.pow(a - b, 2) for a,b in zip(exact_betw,gss_betw)])) gss_stats["wrong_eps"] = 0 for i in range(G.vcount()): err = abs(exact_betw[i] - gss_betw[i]) #if err > max_err: #gss_stats["wrong_eps"] += 1 #if gss_stats["wrong_eps"] == 1: #print("## GSS wrong epsilon ##") #print("{} {} {} {} {} {} {}".format(i, G.vs[i].degree(), #exact_betw[i], gss_betw[i], vc_betw[i], err, err / (G.vcount() * (G.vcount() -1) / 2))) # Print statistics to output as CSV logging.info("Printing statistics") print("graph,nodes,edges,diam,directed,epsilon,delta,sample_size") print("{},{},{},{},{},{},{},{}".format(G["filename"], G.vcount(), G.ecount(), G["diam"], G.is_directed(), args.epsilon, args.delta, args.samplesize)) #csvkeys="epsilon, delta, sample_size, time, wrong_eps, err_avg, err_max, err_min, err_stddev, forward_touched_edges, backward_touched_edges, diameter_touched_edges, euc_dist, diameter, diam_type" csvkeys="epsilon,delta,sample_size,time,wrong_eps,err_avg,err_stddev,forward_touched_edges,backward_touched_edges,diameter_touched_edges,euc_dist,diameter,diam_type" print("type,", csvkeys) print("vc,", util.dict_to_csv(vc_stats,csvkeys)) print("bp,", util.dict_to_csv(bp_stats,csvkeys)) print("gss,", util.dict_to_csv(gss_stats,csvkeys)) print("exact,", util.dict_to_csv(exact_stats,csvkeys))
if time.time() - t > daemon_timeout: print_error("Daemon timeout") break else: t = time.time() continue t = time.time() client = ClientThread(server, connection) client.start() server.stop() # sleep so that other threads can terminate cleanly time.sleep(0.5) print_error("Daemon exiting") if __name__ == '__main__': import simple_config, util _config = {} if len(sys.argv) > 1: _config['electrum_path'] = sys.argv[1] config = simple_config.SimpleConfig(_config) util.set_verbosity(True) server = NetworkServer(config) server.start() try: daemon_loop(server) except KeyboardInterrupt: print "Ctrl C - Stopping daemon" server.stop() sys.exit(1)
return result if __name__ == '__main__': parser = arg_parser() options, args = parser.parse_args() # if options.wallet_path is None: # options.electrum_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'electrum_data') config_options = eval(str(options)) for k, v in config_options.items(): if v is None: config_options.pop(k) set_verbosity(config_options.get('verbose')) config = SimpleConfig(config_options) if len(args) == 0: url = None cmd = 'history' else: cmd = args[0] if cmd not in known_commands: cmd = 'help' cmd = known_commands[cmd] # instanciate wallet for command-line