def clear_cache(self): """ Clears all cached data (images and eveapi cache) """ try: util.clean_dir(IMG_CACHE_PATH) util.clean_dir(APICACHE_PATH) except OSError, e: logging.getLogger('mevemon').exception("Failed to clear cache")
def to_sk(self): # clean up result directory if os.path.isdir(self.sk_dir): util.clean_dir(self.sk_dir) else: os.makedirs(self.sk_dir) clss = utils.extract_nodes([ClassOrInterfaceDeclaration], self.prg) is_ax_cls = any(map(lambda c: c._axiom, clss)) # consist builds up some class hierarchies which happens in main.py # prg.consist() # type.sk logging.info('generating Object.sk') self.gen_object_sk(is_ax_cls) logging.info('generating meta.sk') self.gen_meta_sk(is_ax_cls) # cls.sk logging.info('generating cls.sk') cls_sks = [] for cls in clss: cls_sk = self.gen_cls_sk(cls, is_ax_cls) if cls_sk: cls_sks.append(cls_sk) logging.info('generating main.sk') self.gen_main_sk(cls_sks) logging.info('writing struct Object') # self.print_obj_struct(is_ax_cls) self.print_obj_struct(self.is_ax_cls) logging.info('generating array.sk') # self.gen_array_sk(is_ax_cls) self.gen_array_sk(self.is_ax_cls)
def docutize(output_dir): util.clean_dir(output_dir) title = 'documentation' util.Index( title=title, writeable_title=title, nav_order=4, layout='main_page', has_children=True, content="full documentation for webweb's parameters and interfaces" ).write(output_dir) examplify( input_dir=DISPLAY_INPUT_DIR, data_output_dir=DISPLAY_DATA_OUTPUT_DIR, pages_output_dir=DISPLAY_PAGES_OUTPUT_DIR, nav_order=1, container='display', parent_container='documentation' ) py_output_dir = os.path.join(output_dir, 'python') pydocutize(py_output_dir, 'documentation', 2)
def to_sk(self): # clean up result directory if os.path.isdir(self.sk_dir): util.clean_dir(self.sk_dir) else: os.makedirs(self.sk_dir) # consist builds up some class hierarchies which happens in main.py # prg.consist() # type.sk logging.info('generating Object.sk') self.gen_object_sk() logging.info('generating meta.sk') self.gen_meta_sk() # cls.sk logging.info('generating cls.sk') cls_sks = [] clss = utils.extract_nodes([ClassOrInterfaceDeclaration], self.prg) for cls in clss: cls_sk = self.gen_cls_sk(cls) if cls_sk: cls_sks.append(cls_sk) logging.info('generating main.sk') self.gen_main_sk(cls_sks) logging.info('writing struct Object') self.print_obj_struct() logging.info('generating array.sk') self.gen_array_sk()
def clean_build_dir(self, stage_name, build_dir): self.report_build_step('%s clean' % (stage_name,)) self.halt_on_failure() try: util.clean_dir(build_dir) except Exception as e: self.report_step_exception(e) raise
def clean_build_dir(self, stage_name, build_dir): self.report_build_step('%s clean' % (stage_name, )) self.halt_on_failure() try: util.clean_dir(build_dir) except Exception as e: self.report_step_exception(e) raise
def clean_build_dir(self, stage_name, build_dir): self.report_build_step('%s clean' % (stage_name,)) self.halt_on_failure() try: util.clean_dir(build_dir) except OSError as e: if e.errno != errno.ENOENT: self.report_step_exception() raise
def test_clean_dir(self): self._setup_files() try: self.assertEqual(len(os.listdir(self.basedir)), 3) util.clean_dir(self.basedir) self.assertTrue(os.path.exists(self.basedir)) self.assertEqual(len(os.listdir(self.basedir)), 0) finally: if os.path.exists(self.basedir): shutil.rmtree(self.basedir)
def main(args): # Delete it if already there so we ensure we get the correct version if the # version number in this script changes. clean_dir("deps") print("Cloning libuv...") run([ "git", "clone", "--quiet", "--depth=1", "https://github.com/libuv/libuv.git", LIB_UV_DIR ]) print("Getting tags...") run([ "git", "fetch", "--quiet", "--depth=1", "--tags" ], cwd=LIB_UV_DIR) print("Checking out libuv " + LIB_UV_VERSION + "...") run([ "git", "checkout", "--quiet", LIB_UV_VERSION ], cwd=LIB_UV_DIR) # TODO: Pin gyp to a known-good commit. Update a previously downloaded gyp # if it doesn't match that commit. print("Downloading gyp...") run([ "git", "clone", "--quiet", "--depth=1", "https://chromium.googlesource.com/external/gyp.git", LIB_UV_DIR + "/build/gyp" ]) # We don't need all of libuv and gyp's various support files. print("Deleting unneeded files...") remove_dir("deps/libuv/build/gyp/buildbot") remove_dir("deps/libuv/build/gyp/infra") remove_dir("deps/libuv/build/gyp/samples") remove_dir("deps/libuv/build/gyp/test") remove_dir("deps/libuv/build/gyp/tools") remove_dir("deps/libuv/docs") remove_dir("deps/libuv/img") remove_dir("deps/libuv/samples") remove_dir("deps/libuv/test") # We are going to commit libuv and GYP in the main Wren repo, so we don't # want them to be their own repos. remove_dir("deps/libuv/.git") remove_dir("deps/libuv/build/gyp/.git") # Libuv's .gitignore ignores GYP, but we want to commit it. replace_in_file("deps/libuv/.gitignore", "/build/gyp", "# /build/gyp (We do want to commit GYP in Wren's repo)")
def main(args): # Delete it if already there so we ensure we get the correct version if the # version number in this script changes. clean_dir("deps") print("Cloning libuv...") run([ "git", "clone", "--quiet", "--depth=1", "https://github.com/libuv/libuv.git", LIB_UV_DIR ]) print("Getting tags...") run([ "git", "fetch", "--quiet", "--depth=1", "--tags" ], cwd=LIB_UV_DIR) print("Checking out libuv " + LIB_UV_VERSION + "...") run([ "git", "checkout", "--quiet", LIB_UV_VERSION ], cwd=LIB_UV_DIR) # TODO: Pin gyp to a known-good commit. Update a previously downloaded gyp # if it doesn't match that commit. print("Downloading gyp...") run([ "git", "clone", "--quiet", "--depth=1", "https://chromium.googlesource.com/external/gyp.git", LIB_UV_DIR + "/build/gyp" ]) # We don't need all of libuv and gyp's various support files. print("Deleting unneeded files...") try_remove_dir("deps/libuv/build/gyp/buildbot") try_remove_dir("deps/libuv/build/gyp/infra") try_remove_dir("deps/libuv/build/gyp/samples") try_remove_dir("deps/libuv/build/gyp/test") try_remove_dir("deps/libuv/build/gyp/tools") try_remove_dir("deps/libuv/docs") try_remove_dir("deps/libuv/img") try_remove_dir("deps/libuv/samples") # We are going to commit libuv and GYP in the main Wren repo, so we don't # want them to be their own repos. remove_dir("deps/libuv/.git") remove_dir("deps/libuv/build/gyp/.git") # Libuv's .gitignore ignores GYP, but we want to commit it. if os.path.isfile("deps/libuv/.gitignore"): with open("deps/libuv/.gitignore", "a") as libuv_ignore: libuv_ignore.write("!build/gyp")
def main(argv): ap = argparse.ArgumentParser() ap.add_argument('--asan', action='store_true', default=False, help='Build with address sanitizer enabled.') ap.add_argument('--debug', action='store_true', default=False, help='Build in debug mode.') args, _ = ap.parse_known_args() source_dir = os.path.join('..', 'llvm-project') with step('cmake', halt_on_fail=True): projects = ['llvm', 'libc', 'clang', 'clang-tools-extra'] cmake_args = ['-GNinja'] if args.debug: cmake_args.append('-DCMAKE_BUILD_TYPE=Debug') else: cmake_args.append('-DCMAKE_BUILD_TYPE=Release') if args.asan: cmake_args.append('-DLLVM_USE_SANITIZER=Address') cmake_args.append('-DLLVM_ENABLE_PROJECTS={}'.format( ';'.join(projects))) run_command(['cmake', os.path.join(source_dir, 'llvm')] + cmake_args) with step('build llvmlibc', halt_on_fail=True): run_command(['ninja', 'llvmlibc']) with step('check-libc'): run_command(['ninja', 'check-libc']) if not args.asan: with step('Loader Tests'): run_command(['ninja', 'libc_loader_tests']) with step('Integration Tests'): run_command(['ninja', 'libc-integration-test']) with step('AOR Tests'): aor_dir = os.path.join(source_dir, 'libc', 'AOR_v20.02') # Remove the AOR build dir. util.clean_dir(os.path.join(aor_dir, 'build')) run_command(['make', 'check'], directory=aor_dir) if not args.debug: with step('Benchmark Utils Tests'): run_command(['ninja', 'libc-benchmark-util-tests'])
def copy_webweb_client_to_site(): """copies the webweb assets to a jekyll-friendly location""" for content_type in ['css', 'js']: assets_directory = os.path.join(JEKYLL_ASSETS_DIR, content_type, 'webweb') util.clean_dir(assets_directory) webweb_content_type_directory = os.path.join(WEBWEB_ASSETS_DIR, content_type) for file_name in os.listdir(webweb_content_type_directory): full_path = os.path.join(webweb_content_type_directory, file_name) if os.path.isfile(full_path): destination = os.path.join(assets_directory, file_name) shutil.copyfile(full_path, destination)
def end(self): dir_name = 'output' if not os.path.exists(dir_name): os.makedirs(dir_name) if self.viz is not None: self.viz.save(os.path.join(dir_name, 'anim.gif')) clean_dir(self.tmp_dir) # render best candidate in output folder processes = render_individuals(self.population[:1], self.audio_duration, dir_name) wait_for_processes_completion(processes) # plot fitness and save it in output folder plot_fitness(self.fitness_over_time, os.path.join(dir_name, 'fitness_over_time')) # also store a plot of the dsp graph plot_tree(self.population[0].tree, os.path.join(dir_name, 'graph'))
def trim_separate_align(input_fasta, mode, runsdir, version_dir): util.expect_file_exists(input_fasta) util.clean_dir(runsdir) util.mkdirp(runsdir) cmd = [] cmd.append(common.preanalysis1) cmd.append(input_fasta) cmd.append(mode) cmd.append(common.scripts_dir) cmd.append(common.mafft) cmd.append(common.outgroup_spec) cmd.append(version_dir) cmd.append(str(common.available_cores)) print(" ".join(cmd)) subprocess.check_call(cmd, cwd=runsdir)
def write_category(category, info, directory, number): """for each category, we'll make the following: - directories: - docs/documentation/{CATEGORY} - files: - docs/documentation/{CATEGORY}/{CATEGORY}.md - and for each ELEMENT, a file: - docs/documentation/{CATEGORY}/{ELEMENT}.md """ category_dir = os.path.join(directory, category) util.clean_dir(category_dir) # write index if info.get('text'): with open(info['text'], 'r') as f: index_content = f.read() else: index_content = None index = util.Page( title=category, has_children=True, parent='documentation', content=index_content, permalink='/documentation/{category}/'.format(category=category), nav_order=number, ) index.write(category_dir) for i, (element, components) in enumerate(info['elements'].items()): page_kwargs = { 'title': element, 'parent': category, 'grand_parent': 'documentation', 'nav_order': i + 1, } if components.get('text'): with open(components['text'], 'r') as f: page_kwargs['content'] = f.read() page = util.Page(**page_kwargs) page.write(category_dir)
def export(pargenes_run_dir, paths): print("Pargenes run dir: " + pargenes_run_dir) pargenes_output = os.path.join(pargenes_run_dir, "pargenes_output") ml_run_dir = os.path.join(pargenes_output, "mlsearch_run", "results", "ali_fasta") # export best ml tree (with support values if existing) src = "" if (common.pargenes_bs_trees > 0): src = os.path.join(pargenes_output, "supports_run", "results", "ali_fasta.support.raxml.support") else: src = os.path.join(ml_run_dir, "ali_fasta.raxml.bestTree") shutil.copy(src, paths.raxml_best_tree) # export best tree with duplicates reattached #if ("thinned" not in paths.dataset): reattach_duplicates.reattach_duplicates( src, paths.raxml_best_tree_with_duplicate, paths.duplicates_json) # export best tree with TBE values if (common.pargenes_bs_trees > 0): src = os.path.join(pargenes_output, "supports_run", "results", "ali_fasta.support.tbe.raxml.support") shutil.copy(src, paths.raxml_best_tree_tbe) # export best ml model src = os.path.join(ml_run_dir, "ali_fasta.raxml.bestModel") shutil.copy(src, paths.raxml_best_model) # export all ml trees src = os.path.join(ml_run_dir, "sorted_ml_trees.newick") shutil.copy(src, paths.raxml_all_ml_trees) src = os.path.join(ml_run_dir, "sorted_ml_trees_ll.newick") shutil.copy(src, paths.raxml_all_ml_trees_ll) # export bootstrap trees if (common.pargenes_bs_trees > 0): src = os.path.join(pargenes_output, "concatenated_bootstraps", "ali_fasta.bs") shutil.copy(src, paths.raxml_bootstrap_trees) av_pairwise_rf_distance = rf_distance.get_export_pairwise_rf_distance( paths.raxml_all_ml_trees, paths.raxml_all_ml_trees_rf_distances) toprint0 = "Average pairwise RF distance between all ML trees: " + str( av_pairwise_rf_distance) print(av_pairwise_rf_distance) # compute RF distance between starting and ML trees for the best run print("Computing rf distances between parsimony and ML trees...") rf_dir = os.path.join(paths.runs_dir, "rfdistances") util.clean_dir(rf_dir) util.mkdirp(rf_dir) prefix = os.path.join(rf_dir, "ml") tree1 = os.path.join(ml_run_dir, "ali_fasta.raxml.bestTree") tree2 = os.path.join(ml_run_dir, "ali_fasta.raxml.startTree") rf = rf_distance.get_rf_distance(tree1, tree2, prefix) toprint1 = "RF distance between the best ML tree and its starting tree: " + str( rf) all_runs_dir = os.path.join(ml_run_dir, "multiple_runs") sum_rf = 0.0 deno_rf = 0.0 for run in os.listdir(all_runs_dir): run_dir = os.path.join(all_runs_dir, run) prefix = os.path.join(rf_dir, run) tree1 = os.path.join(run_dir, "ali_fasta.raxml.bestTree") tree2 = os.path.join(run_dir, "ali_fasta.raxml.startTree") rf = rf_distance.get_rf_distance(tree1, tree2, prefix) sum_rf += rf deno_rf += 1.0 av_rf = sum_rf / deno_rf toprint2 = "Average (over all the " + str( deno_rf) + " runs) RF distance between start and ML trees: " + str( av_rf) print(toprint0) print(toprint1) print(toprint2) with open(paths.rf_distance_report, "w") as writer: writer.write(toprint0 + "\n") writer.write(toprint1 + "\n") writer.write(toprint2)
def main(): original_dir = os.getcwd() parser = argparse.ArgumentParser( description="Launches D-ITG test scenario in mininet.", epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("-l", "--store_logs", action="store_true", default=False, help="store logs (default: logs are discarded)") parser.add_argument("-t", "--topology", help="name of topology to run") parser.add_argument( "-B", "--bandwidth", default=1, help="Bandwidth of links in Mbit/s (default: 1 Mbit/s)") parser.add_argument("-lt", "--list-topologies", action="store_true", help="list available topologies") parser.add_argument("-ls", "--list-scenarios", action="store_true", help="list available scenarios") # parser.add_argument("-s", "--scenario", help="select test scenario - dir name or just scenario name") parser.add_argument( "-d", "--scenarios-dir", help="directory with scenarios (default: current directory)") parser.add_argument( "-H", "--hosts", default=1, help= "Number of hosts in network ('per switch' for uniform distribution)") parser.add_argument( "-dr", "--random-distribution", action="store_true", default=False, help="Random hosts distribution in network (default: uniform)") parser.add_argument( "-stp", "--stp-switch", action="store_true", help="Run with STP switches. Disconnects from controller.") parser.add_argument( "-o", "--logs-dir", help= "directory for storing logs (default: logs/ in scenario directory). Implies storing logs" ) parser.add_argument( "-r", "--repeat", type=int, default=1, help= "number of test case repeats (-1 for infinite). Warning: Logs will be overridden" ) parser.add_argument("--tool", default='iperf', help="Traffic generation tool: iperf, ditg") parser.add_argument("-T", "--traffic-type", help="Type of generated traffic") parser.add_argument("--c_min", help="Minimum bitrate of generated traffic") parser.add_argument("--c_max", help="Maximum bitrate of generated traffic") parser.add_argument("-g", "--clients", help="Number of clients generating traffic") parser.add_argument("-f", "--flows", help="Number of flows per client") args = parser.parse_args() if not (args.list_scenarios or args.list_topologies) and not (args.topology): print "Wrong parameters: Need to set topology. Or just list topologies or scenarios." print "" parser.print_help() exit(1) if args.tool and args.tool == 'iperf': if not (args.traffic_type and args.c_min and args.c_max and args.clients and args.flows): print "Not enough traffic parameters!" print "" parser.print_help() exit(1) else: util.validate_params(args.traffic_type) elif args.tool and args.tool == 'ditg': print "ditg full support not implemented yet" exit(1) if args.list_topologies: print "Topologies ([name] s=[no. switches]) h=[no. hosts]:" for t in topos_info: print "{} s={} h={}".format(t, topos_info[t][0], topos_info[t][1]) return 0 scenarios_dir = original_dir if args.scenarios_dir: scenarios_dir = args.scenarios_dir if args.list_scenarios: print "Scenarios ([name] ):" for s in get_scenarios(scenarios_dir): print s return 0 distribution = '' if (args.random_distribution): distribution = "random" else: distribution = "uniform" traffic_generation = False scenario = create_scenario_name(args.traffic_type, args.c_min, args.c_max, args.clients, args.flows, args.topology, distribution, args.hosts) print scenario all_scenarios = get_scenarios(scenarios_dir) scenario_dir = None if scenario in all_scenarios: scenario_dir = os.path.join(scenarios_dir, scenario) else: os.mkdir(scenarios_dir + '/' + scenario) scenario_dir = os.path.join(scenarios_dir, scenario) traffic_generation = True # Get topology topology = args.topology if isGenerated(topology, args.hosts, distribution): print "Topology {}-{}-{} exists".format(topology, distribution, args.hosts) else: if topology in get_zoo_topologies(): generate_topology(topology, args.hosts, distribution, args.bandwidth) else: print "Wrong topology name: " + topology print "Available generated: " print get_generated_topologies() print "Available to generate: " print get_zoo_topologies() exit(1) topology = "gen_" + topology + "_" + distribution + "_" + str(args.hosts) # Check if scenario can be run on topology #topology_hosts = topos_info[topology][1] #scenario_hosts = int(scenario_dir.split('_')[-1]) #if scenario_hosts > topology_hosts: # print "Cannot run scenario {} ({} hosts) on topology {} ({} hosts). Too many hosts in scenario.".format(scenario, scenario_hosts, topology, topology_hosts) # exit(4) # print scenario_dir # if not os.path.exists(scenario_dir): # print "Not found generated test dir: {}. Please run ./test_generator_itg.py first.".format(scenario_dir) # exit(4) os.chdir(scenario_dir) log_dir = None # Log dir implies storing logs if args.logs_dir: args.store_logs = True if args.store_logs: if args.logs_dir: if os.path.isabs(args.logs_dir): log_dir = args.logs_dir else: # Logs path relative to CWD log_dir = os.path.join(original_dir, args.logs_dir) else: log_dir = OUTPUT_DIR # Create or clean directory for logs util.clean_dir(log_dir, suffix=".log") print "Storing logs in: {}".format(os.path.join(os.getcwd(), log_dir)) else: print "Not storing logs." print "Topology: {} Scenario: {}".format(topology, scenario) os.chdir(original_dir) #topo = importlib.import_module("gen_{}".format(topology)) #Change comments to load a fixed topology #f, filename, desc = imp.find_module('gen_bteurope', [os.path.abspath(os.getcwd()) + '/topologies']) #topo = imp.load_module('gen_bteurope', f, filename, desc) f, filename, desc = imp.find_module( "{}".format(topology), [os.path.abspath(os.getcwd()) + '/topologies']) topo = imp.load_module("{}".format(topology), f, filename, desc) print "Launching Mininet.." net = Mininet(topo=topo.GeneratedTopo(), controller=RemoteController, switch=OVSSwitch, host=CPULimitedHost, link=TCLink, autoSetMacs=True) # Start network print "Starting network.." net.start() #if(traffic_generation): generate_traffic(net.hosts, scenario_dir, args.clients, args.flows, args.traffic_type, args.c_min, args.c_max) if args.stp_switch: util.turn_legacy_on() print "Waiting {} s ...".format(LEGACY_INITIALIZATION_DELAY) time.sleep(LEGACY_INITIALIZATION_DELAY) print "Waiting {} s for initialization of mininet and controller...".format( INITIALIZATION_DELAY) time.sleep(INITIALIZATION_DELAY) # Preparing TestTool #TODO choosing various tools based on config if args.tool == 'iperf': print "Using iperf" test_tool = TestToolIperf(net, log_dir) elif args.tool == 'ditg': print "Using ditg" test_tool = TestToolITG(net, log_dir) else: print "ERROR Unknown tool: {}".format(args.tool) net.stop() sys.exit(3) os.chdir(scenario_dir) # Run servers hosts = net.hosts print "Starting servers..." for host in hosts: host_name = host.name test_tool.run_server(host_name) iterations = args.repeat if iterations != 1: start_time = time.time() i = 0 while i != iterations: if iterations != 1: print "Iteration: {} / {}".format(i + 1, iterations) iteration_start_time = time.time() # Run ITGSends per host config threads = [] for f in os.listdir(os.path.curdir): if os.path.isfile(f) and f.endswith(TRAFFIC_FILE_SUFFIX): host_name = get_hostname(f) test_tool.run_client(host_name, f) # CLI(net) # DEBUG print "Waiting for test end.." retcode = test_tool.wait_for_end() end_time = time.time() print "Testing time: {:0.2f} s".format(end_time - iteration_start_time) i += 1 if iterations != 1: print "Total testing time: {:0.2f} s".format(end_time - start_time) print "Stopping network.." net.stop() s = os.stat('.') if args.store_logs: util.rchmod(log_dir, s.st_uid, s.st_gid) os.chdir(original_dir) print "Killing all test tool processes." test_tool.kill_all() if retcode == 2: print "END Test finished with WARNINGS" sys.exit(2) elif retcode == 1: print "ERROR CRITICAL server went down during test" sys.exit(1) else: print "END Test finished successfully" sys.exit(0)
def to_sk(cmd, smpls, tmpl, sk_dir): # clean up result directory if os.path.isdir(sk_dir): util.clean_dir(sk_dir) else: os.makedirs(sk_dir) # reset global variables so that we can run this encoding phase per demo reset() # update global constants def logged(mtd): if mtd.is_init: return False clss = util.flatten_classes([mtd.clazz], "subs") return sample.mtd_appears(smpls, clss, mtd.name) mtds = filter(logged, methods()) if mtds: n_params = 2 + max(map(len, map(op.attrgetter("params"), mtds))) else: # no meaningful logs in the sample? n_params = 2 n_evts = sample.max_evts(smpls) if cmd == "android": n_views = sample.max_views(smpls) magic_S = max(3, n_evts + 1, n_views) else: magic_S = max(5, n_evts + 1) # at least 5, just in case n_ios = sample.max_IOs(smpls) global _const _const = u""" int P = {}; // length of parameters (0: (>|<)mid, 1: receiver, 2...) int S = {}; // length of arrays for Java collections int N = {}; // length of logs """.format(n_params, magic_S, n_ios) # type.sk logging.info("building class hierarchy") tmpl.consist() # merge all classes and interfaces, except for primitive types clss, _ = util.partition(lambda c: util.is_class_name(c.name), classes()) bases = rm_subs(clss) gen_type_sk(sk_dir, bases) # cls.sk cls_sks = [] for cls in tmpl.classes: # skip the collections, which will be encoded at type.sk if repr(cls).split('_')[0] in C.collections: continue cls_sk = gen_cls_sk(sk_dir, smpls, cls) if cls_sk: cls_sks.append(cls_sk) # sample_x.sk smpl_sks = [] for smpl in smpls: smpl_sk = "sample_" + smpl.name + ".sk" smpl_sks.append(smpl_sk) sk_path = os.path.join(sk_dir, smpl_sk) gen_smpl_sk(sk_path, smpl, tmpl, tmpl.harness(smpl.name)) # log.sk gen_log_sk(sk_dir, tmpl) # sample.sk that imports all the other sketch files buf = cStringIO.StringIO() # deprecated as we use regex generator for class/method roles ## --bnd-cbits: the number of bits for integer holes #bits = max(5, int(math.ceil(math.log(len(methods()), 2)))) #buf.write("pragma options \"--bnd-cbits {}\";\n".format(bits)) # --bnd-unroll-amnt: the unroll amount for loops unroll_amnt = max(n_params, magic_S) buf.write("pragma options \"--bnd-unroll-amnt {}\";\n".format(unroll_amnt)) # --bnd-inline-amnt: bounds inlining to n levels of recursion inline_amnt = None # use a default value if not set if cmd == "android": #inline_amnt = 2 # depth of View hierarchy (at findViewByTraversal) inline_amnt = 1 # no recursion for flat Views elif cmd == "gui": # setting it 1 means there is no recursion inline_amnt = 1 if inline_amnt: buf.write("pragma options \"--bnd-inline-amnt {}\";\n".format(inline_amnt)) buf.write("pragma options \"--bnd-bound-mode CALLSITE\";\n") sks = ["log.sk", "type.sk"] + cls_sks + smpl_sks for sk in sks: buf.write("include \"{}\";\n".format(sk)) with open(os.path.join(sk_dir, "sample.sk"), 'w') as f: f.write(buf.getvalue()) logging.info("encoding " + f.name) buf.close()
def main(): original_dir = os.getcwd() parser = argparse.ArgumentParser(description="Launches D-ITG test scenario in mininet.", epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("-l", "--store_logs", action="store_true", default=False, help="store logs (default: logs are discarded)") parser.add_argument("-t", "--topology", help="name of topology to run") parser.add_argument("-lt", "--list-topologies", action="store_true", help="list available topologies") parser.add_argument("-ls", "--list-scenarios", action="store_true", help="list available scenarios") parser.add_argument("-s", "--scenario", help="select test scenario - dir name or just scenario name") parser.add_argument("-d", "--scenarios-dir", help="directory with scenarios (default: current directory)") parser.add_argument("-c", "--stp-switch", action="store_true", help="Run with STP switches. Disconnects from controller.") parser.add_argument("-o", "--logs-dir", help="directory for storing logs (default: logs/ in scenario directory). Implies storing logs") parser.add_argument("-r", "--repeat", type=int, default=1, help="number of test case repeats (-1 for infinite). Warning: Logs will be overridden") parser.add_argument("--tool", default='iperf', help="Traffic generation tool: iperf, ditg") args = parser.parse_args() if not (args.list_scenarios or args.list_topologies) and not (args.topology and args.scenario): print "Wrong parameters: Need to set topology and scenario. Or just list topologies or scenarios." print "" parser.print_help() exit(1) if args.list_topologies: print "Topologies ([name] s=[no. switches]) h=[no. hosts]:" for t in topos_info: print "{} s={} h={}".format(t, topos_info[t][0], topos_info[t][1]) return 0 scenarios_dir = original_dir if args.scenarios_dir: scenarios_dir = args.scenarios_dir if args.list_scenarios: print "Scenarios ([name] ):" for s in get_scenarios(scenarios_dir): print s return 0 # Get scenario dir scenario = args.scenario all_scenarios = get_scenarios(scenarios_dir) scenario_dir = None if scenario in all_scenarios: scenario_dir = os.path.join(scenarios_dir, scenario) else: for s in all_scenarios: if "_"+scenario+"_" in s: scenario_dir = os.path.join(scenarios_dir, s) if not scenario_dir: print "Wrong scenario name: "+scenario print "Available: " print all_scenarios exit(1) # Get topology topology = args.topology if topology not in get_generated_topologies(): if topology not in get_zoo_topologies(): print "Wrong topology name: "+topology print "Available generated: " print get_generated_topologies() print "Available to generate: " print get_zoo_topologies() exit(1) else: generate_topology(topology) # Check if scenario can be run on topology topology_hosts = topos_info[topology][1] scenario_hosts = int(scenario_dir.split('_')[-1]) if scenario_hosts > topology_hosts: print "Cannot run scenario {} ({} hosts) on topology {} ({} hosts). Too many hosts in scenario.".format(scenario, scenario_hosts, topology, topology_hosts) exit(4) if not os.path.exists(scenario_dir): print "Not found generated test dir: {}. Please run ./test_generator_itg.py first.".format(scenario_dir) exit(4) os.chdir(scenario_dir) log_dir = None # Log dir implies storing logs if args.logs_dir: args.store_logs = True if args.store_logs: if args.logs_dir: if os.path.isabs(args.logs_dir): log_dir = args.logs_dir else: # Logs path relative to CWD log_dir = os.path.join(original_dir, args.logs_dir) else: log_dir = OUTPUT_DIR # Create or clean directory for logs util.clean_dir(log_dir, suffix=".log") print "Storing logs in: {}".format(os.path.join(os.getcwd(), log_dir)) else: print "Not storing logs." print "Topology: {} Scenario: {}".format(topology, scenario_dir) print "Launching Mininet.." net = Mininet(topo=topos[topology](lopts={"bw": 10}), controller=RemoteController, switch=OVSSwitch, link=TCLink, autoSetMacs=True) # Start network print "Starting network.." net.start() if args.stp_switch: util.turn_legacy_on() print "Waiting {} s ...".format(LEGACY_INITIALIZATION_DELAY) time.sleep(LEGACY_INITIALIZATION_DELAY) print "Waiting {} s for initialization of mininet and controller...".format(INITIALIZATION_DELAY) time.sleep(INITIALIZATION_DELAY) # Preparing TestTool #TODO choosing various tools based on config if args.tool == 'iperf': test_tool = TestToolIperf(net, log_dir) elif args.tool == 'ditg': test_tool = TestToolITG(net, log_dir) else: print "ERROR Unknown tool: {}".format(args.tool) net.stop() sys.exit(3) # Run servers hosts = net.hosts print "Starting servers..." for host in hosts: host_name = host.name test_tool.run_server(host_name) iterations = args.repeat if iterations != 1: start_time = time.time() i = 0 while i != iterations: if iterations != 1: print "Iteration: {} / {}".format(i+1, iterations) iteration_start_time = time.time() # Run ITGSends per host config threads = [] for f in os.listdir(os.path.curdir): if os.path.isfile(f) and f.endswith(ITG_SCRIPT_SUFFIX): host_name = get_hostname(f) test_tool.run_client(host_name, f) # CLI(net) # DEBUG print "Waiting for test end.." retcode = test_tool.wait_for_end() end_time = time.time() print "Testing time: {:0.2f} s".format(end_time-iteration_start_time) i += 1 if iterations != 1: print "Total testing time: {:0.2f} s".format(end_time-start_time) print "Stopping network.." net.stop() s = os.stat('.') if args.store_logs: util.rchmod(log_dir, s.st_uid, s.st_gid) os.chdir(original_dir) print "Killing all test tool processes." test_tool.kill_all() if retcode == 2: print "END Test finished with WARNINGS" sys.exit(2) elif retcode == 1: print "ERROR CRITICAL server went down during test" sys.exit(1) else: print "END Test finished successfully" sys.exit(0)
def pydocutize(output_dir, parent, nav_order): functions_mapping = { 'Web': { 'display_name': 'webweb.Web', 'functions': [ '__init__', 'show', 'save', ], }, 'Network': { 'display_name': 'webweb.Network', 'functions': [ '__init__', 'add_layer', ], } } util.clean_dir(output_dir) import pydoc import inspect webweb_module = util.get_module('webweb', WEBWEB_CODE) container = 'python' with open('python_documentation.md', 'r') as f: index_content = f.read() util.Index( title=container, writeable_title=container, nav_order=2, layout='main_page', has_children=True, parent=parent, content=index_content, ).write(output_dir) counter = 1 for object_name in functions_mapping.keys(): _object = getattr(webweb_module, object_name) object_display_name = functions_mapping[object_name]['display_name'] for function_name in functions_mapping[object_name]['functions']: function_object = getattr(_object, function_name) function_signature = inspect.signature(function_object) # we want to exclude `self` parameters_list = [] for parameter, parameter_string in function_signature.parameters.items(): if parameter != 'self': parameters_list.append(str(parameter_string)) signature_string = "(" + ", ".join(parameters_list) + ")" function_doc = pydoc.getdoc(function_object) qualified_function_name = object_display_name writeable_function_name = object_display_name if function_name == '__init__': function_doc = pydoc.getdoc(_object) + "\n\n" + function_doc else: qualified_function_name += '.' + function_name writeable_function_name += '_' + function_name content = "```python\n{name}{signature}\n````\n\n{doc}".format( name=qualified_function_name, signature=signature_string, doc=function_doc, ) util.Page( title=qualified_function_name, writeable_title=writeable_function_name, nav_order=counter, layout='main_page', parent=container, grand_parent=parent, content=content, ).write(output_dir) counter += 1
def main(): original_dir = os.getcwd() parser = argparse.ArgumentParser(description="Launches D-ITG test scenario in mininet.", epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("-l", "--store_logs", action="store_true", default=False, help="store logs (default: logs are discarded)") parser.add_argument("-t", "--topology", help="name of topology to run") parser.add_argument("-B", "--bandwidth", default=1, help="Bandwidth of links in Mbit/s (default: 1 Mbit/s)") parser.add_argument("-lt", "--list-topologies", action="store_true", help="list available topologies") parser.add_argument("-ls", "--list-scenarios", action="store_true", help="list available scenarios") # parser.add_argument("-s", "--scenario", help="select test scenario - dir name or just scenario name") parser.add_argument("-d", "--scenarios-dir", help="directory with scenarios (default: current directory)") parser.add_argument("-H", "--hosts", default=1, help="Number of hosts in network ('per switch' for uniform distribution)") parser.add_argument("-dr", "--random-distribution", action="store_true", default=False, help="Random hosts distribution in network (default: uniform)") parser.add_argument("-stp", "--stp-switch", action="store_true", help="Run with STP switches. Disconnects from controller.") parser.add_argument("-o", "--logs-dir", help="directory for storing logs (default: logs/ in scenario directory). Implies storing logs") parser.add_argument("-r", "--repeat", type=int, default=1, help="number of test case repeats (-1 for infinite). Warning: Logs will be overridden") parser.add_argument("--tool", default='iperf', help="Traffic generation tool: iperf, ditg") parser.add_argument("-T", "--traffic-type", help="Type of generated traffic") parser.add_argument("--c_min", help="Minimum bitrate of generated traffic") parser.add_argument("--c_max", help="Maximum bitrate of generated traffic") parser.add_argument("-g", "--clients", help="Number of clients generating traffic") parser.add_argument("-f", "--flows", help="Number of flows per client") args = parser.parse_args() if not (args.list_scenarios or args.list_topologies) and not (args.topology): print "Wrong parameters: Need to set topology. Or just list topologies or scenarios." print "" parser.print_help() exit(1) if args.tool and args.tool == 'iperf': if not (args.traffic_type and args.c_min and args.c_max and args.clients and args.flows): print "Not enough traffic parameters!" print "" parser.print_help() exit(1) else: util.validate_params(args.traffic_type) elif args.tool and args.tool == 'ditg': print "ditg full support not implemented yet" exit(1) if args.list_topologies: print "Topologies ([name] s=[no. switches]) h=[no. hosts]:" for t in topos_info: print "{} s={} h={}".format(t, topos_info[t][0], topos_info[t][1]) return 0 scenarios_dir = original_dir if args.scenarios_dir: scenarios_dir = args.scenarios_dir if args.list_scenarios: print "Scenarios ([name] ):" for s in get_scenarios(scenarios_dir): print s return 0 distribution = '' if(args.random_distribution): distribution = "random" else: distribution = "uniform" traffic_generation = False scenario = create_scenario_name(args.traffic_type, args.c_min, args.c_max, args.clients, args.flows, args.topology, distribution, args.hosts) print scenario all_scenarios = get_scenarios(scenarios_dir) scenario_dir = None if scenario in all_scenarios: scenario_dir = os.path.join(scenarios_dir, scenario) else: os.mkdir(scenarios_dir + '/' + scenario) scenario_dir = os.path.join(scenarios_dir, scenario) traffic_generation = True # Get topology topology = args.topology if isGenerated(topology, args.hosts, distribution): print "Topology {}-{}-{} exists".format(topology, distribution, args.hosts) else: if topology in get_zoo_topologies(): generate_topology(topology, args.hosts, distribution, args.bandwidth) else: print "Wrong topology name: "+topology print "Available generated: " print get_generated_topologies() print "Available to generate: " print get_zoo_topologies() exit(1) topology = "gen_" + topology + "_" + distribution + "_" + str(args.hosts) # Check if scenario can be run on topology #topology_hosts = topos_info[topology][1] #scenario_hosts = int(scenario_dir.split('_')[-1]) #if scenario_hosts > topology_hosts: # print "Cannot run scenario {} ({} hosts) on topology {} ({} hosts). Too many hosts in scenario.".format(scenario, scenario_hosts, topology, topology_hosts) # exit(4) # print scenario_dir # if not os.path.exists(scenario_dir): # print "Not found generated test dir: {}. Please run ./test_generator_itg.py first.".format(scenario_dir) # exit(4) os.chdir(scenario_dir) log_dir = None # Log dir implies storing logs if args.logs_dir: args.store_logs = True if args.store_logs: if args.logs_dir: if os.path.isabs(args.logs_dir): log_dir = args.logs_dir else: # Logs path relative to CWD log_dir = os.path.join(original_dir, args.logs_dir) else: log_dir = OUTPUT_DIR # Create or clean directory for logs util.clean_dir(log_dir, suffix=".log") print "Storing logs in: {}".format(os.path.join(os.getcwd(), log_dir)) else: print "Not storing logs." print "Topology: {} Scenario: {}".format(topology, scenario) os.chdir(original_dir) #topo = importlib.import_module("gen_{}".format(topology)) #Change comments to load a fixed topology #f, filename, desc = imp.find_module('gen_bteurope', [os.path.abspath(os.getcwd()) + '/topologies']) #topo = imp.load_module('gen_bteurope', f, filename, desc) f, filename, desc = imp.find_module("{}".format(topology) , [os.path.abspath(os.getcwd()) + '/topologies']) topo = imp.load_module("{}".format(topology) , f, filename, desc) print "Launching Mininet.." net = Mininet(topo=topo.GeneratedTopo(), controller=RemoteController, switch=OVSSwitch, host=CPULimitedHost, link=TCLink, autoSetMacs=True) # Start network print "Starting network.." net.start() #if(traffic_generation): generate_traffic(net.hosts, scenario_dir, args.clients, args.flows, args.traffic_type, args.c_min, args.c_max) if args.stp_switch: util.turn_legacy_on() print "Waiting {} s ...".format(LEGACY_INITIALIZATION_DELAY) time.sleep(LEGACY_INITIALIZATION_DELAY) print "Waiting {} s for initialization of mininet and controller...".format(INITIALIZATION_DELAY) time.sleep(INITIALIZATION_DELAY) # Preparing TestTool #TODO choosing various tools based on config if args.tool == 'iperf': print "Using iperf" test_tool = TestToolIperf(net, log_dir) elif args.tool == 'ditg': print "Using ditg" test_tool = TestToolITG(net, log_dir) else: print "ERROR Unknown tool: {}".format(args.tool) net.stop() sys.exit(3) os.chdir(scenario_dir) # Run servers hosts = net.hosts print "Starting servers..." for host in hosts: host_name = host.name test_tool.run_server(host_name) iterations = args.repeat if iterations != 1: start_time = time.time() i = 0 while i != iterations: if iterations != 1: print "Iteration: {} / {}".format(i+1, iterations) iteration_start_time = time.time() # Run ITGSends per host config threads = [] for f in os.listdir(os.path.curdir): if os.path.isfile(f) and f.endswith(TRAFFIC_FILE_SUFFIX): host_name = get_hostname(f) test_tool.run_client(host_name, f) # CLI(net) # DEBUG print "Waiting for test end.." retcode = test_tool.wait_for_end() end_time = time.time() print "Testing time: {:0.2f} s".format(end_time-iteration_start_time) i += 1 if iterations != 1: print "Total testing time: {:0.2f} s".format(end_time-start_time) print "Stopping network.." net.stop() s = os.stat('.') if args.store_logs: util.rchmod(log_dir, s.st_uid, s.st_gid) os.chdir(original_dir) print "Killing all test tool processes." test_tool.kill_all() if retcode == 2: print "END Test finished with WARNINGS" sys.exit(2) elif retcode == 1: print "ERROR CRITICAL server went down during test" sys.exit(1) else: print "END Test finished successfully" sys.exit(0)
def to_sk(pgr, sk_dir): # clean up result directory if os.path.isdir(sk_dir): util.clean_dir(sk_dir) else: os.makedirs(sk_dir) # reset global variables so that we can run this encoding phase per demo reset() # update global constants # TODO: conservative analysis of possible length of collections # TODO: counting .add() calls or something? magic_S = 7 global _const _const = u""" int S = {}; // length of arrays for Java collections """.format(magic_S) # type.sk logging.info("building class hierarchy") pgr.consist() # merge all classes and interfaces, except for primitive types clss, _ = util.partition(lambda c: util.is_class_name(c.name), classes()) bases = rm_subs(clss) gen_type_sk(sk_dir, bases) # cls.sk cls_sks = [] for cls in pgr.classes: # skip the collections, which will be encoded at type.sk if repr(cls).split('_')[0] in C.collections: continue cls_sk = gen_cls_sk(sk_dir, cls) if cls_sk: cls_sks.append(cls_sk) # log.sk gen_log_sk(sk_dir, pgr) # main.sk that imports all the other sketch files buf = cStringIO.StringIO() # --bnd-cbits: the number of bits for integer holes bits = max(5, int(math.ceil(math.log(len(methods()), 2)))) buf.write("pragma options \"--bnd-cbits {}\";\n".format(bits)) # --bnd-unroll-amnt: the unroll amount for loops unroll_amnt = None # use a default value if not set unroll_amnt = magic_S # TODO: other criteria? if unroll_amnt: buf.write("pragma options \"--bnd-unroll-amnt {}\";\n".format(unroll_amnt)) # --bnd-inline-amnt: bounds inlining to n levels of recursion inline_amnt = None # use a default value if not set # setting it 1 means there is no recursion if inline_amnt: buf.write("pragma options \"--bnd-inline-amnt {}\";\n".format(inline_amnt)) buf.write("pragma options \"--bnd-bound-mode CALLSITE\";\n") sks = ["log.sk", "type.sk"] + cls_sks for sk in sks: buf.write("include \"{}\";\n".format(sk)) # TODO: make harness (if not exists) with open(os.path.join(sk_dir, "main.sk"), 'w') as f: f.write(buf.getvalue()) logging.info("encoding " + f.name) buf.close()