def setnewflowtargets(self, streamuuid): shouter.shout("Set new Flowtargets") if not self.hasflowtarget(streamuuid): shell.execute("%s add flowtarget -r %s %s %s" % (self.scmcommand, self.repo, self.workspace, streamuuid)) command = "%s set flowtarget -r %s %s --default --current %s" % (self.scmcommand, self.repo, self.workspace, streamuuid) shell.execute(command)
def configure(args = "", preexec= "", debug='disable', shared='shared', deptrack = '', hosttarget = 'both'): if not shared: shared = '' else: shared = '--disable-shared --enable-static' if environ.static else '--enable-shared --disable-static' if not debug: debug = '' else: debug = '--enable-debug' if environ.debug else '--disable-debug' if deptrack == 'disable': deptrack = '--disable-dependency-tracking' else: deptrack = '' # XXXXXXXXXXXS STOOOPID # x86_64-apple-darwin11.2.0 host = '' if hosttarget == 'both': build = sh('echo `uname -m`-apple-darwin`uname -r`', output = False) if environ.get('MONADE_HOST') != build.strip(): host = '--host=%s --build=%s' % (environ.get('MONADE_HOST'), build.strip()) act = fs.join(environ.src, technicolor.local, 'configure') command = '%s %s --with-pic --prefix="%s" %s %s %s %s' % (act, deptrack, environ.prefix, shared, debug, host, args) shell.execute(environ.flush(), command, environ.pwd, preexec)
def kill_worker(): worker_pid = find_worker_pid() if worker_pid is None: print "Error: failed to find worker pid" return fail_cmd = "sudo kill -9 %s" % (worker_pid) execute(fail_cmd, verbose=True)
def simulateCreationAndRenameInGitRepo(self, originalfilename, newfilename): open(originalfilename, 'a').close() # create file Initializer.initialcommit() Commiter.pushmaster() os.rename(originalfilename, newfilename) # change capitalization shell.execute("git add -A") Commiter.handle_captitalization_filename_changes()
def __try_run(self, cmds): root_dir = os.getcwd() os.chdir(self.common_parameters.root_path) evaluated_artefacts = self.common_parameters.artefacts.eval() evaluated_prerequisites = self.common_parameters.prerequisites.eval() should_run = True if evaluated_prerequisites and evaluated_artefacts: should_run = False ui.debug("checking prerequisites ({!s}) for making {!s}".format( evaluated_prerequisites, evaluated_artefacts)) for artefact in evaluated_artefacts: ui.debug(" " + artefact) if fsutils.is_any_newer_than(evaluated_prerequisites, artefact): ui.debug(("going on because {!s}" "needs to be rebuilt").format(artefact)) should_run = True break if should_run: variables.pollute_environment(self.common_parameters.module_name) evaluated_cmds = cmds.eval() for cmd in evaluated_cmds: ui.debug("running {!s}".format(cmd)) shell.execute(cmd) os.chdir(root_dir)
def stop_p2p_persistent_network(control_socket_dir, control_iface, iface): try: shell.execute( '%s -p %s -i %s p2p_group_remove %s' % (P2P_CLI_PATH, control_socket_dir, control_iface, iface)) except: LOGGER.error('failed to stop p2p persistent network')
def build_object(self, target_name, out_filename, in_filename, include_dirs, compiler_flags): ui.debug("building object " + out_filename) with ui.ident: prerequisites = self.__fetch_includes(target_name, in_filename, include_dirs, compiler_flags) prerequisites.append(in_filename) ui.debug("appending prerequisites from pake modules: {!s}".format( fsutils.pake_files)) prerequisites.extend(fsutils.pake_files) ui.debug("prerequisites: {!r}".format(prerequisites)) if fsutils.is_any_newer_than(prerequisites, out_filename): fsutils.mkdir_recursive(os.path.dirname(out_filename)) cmd = configurations.compiler( ) + " " + self.__prepare_compiler_flags( include_dirs, compiler_flags ) + " -c -o " + out_filename + " " + in_filename if command_line.args.verbose: ui.step(configurations.compiler(), cmd) else: ui.step(configurations.compiler(), in_filename) shell.execute(cmd)
def branch(branchname): branchexist = shell.execute( "git show-ref --verify --quiet refs/heads/" + branchname) if branchexist is 0: Commiter.checkout(branchname) else: shell.execute("git checkout -b " + branchname)
def link_application(self, out_filename, in_filenames, link_with, library_dirs): if fsutils.is_any_newer_than( in_filenames, out_filename) or self.__are_libs_newer_than_target( link_with, out_filename): ui.debug("linking application") ui.debug(" files: " + str(in_filenames)) ui.debug(" with libs: " + str(link_with)) ui.debug(" lib dirs: " + str(library_dirs)) parameters = " ".join("-L " + lib_dir for lib_dir in library_dirs) ui.bigstep("linking", out_filename) try: shell.execute(" ".join([ configurations.compiler(), configurations.linker_flags(), "-o", out_filename, " ".join(in_filenames), self.__prepare_linker_flags(link_with), parameters ])) except Exception as e: ui.fatal("cannot link {}, reason: {!s}".format( out_filename, e)) else: ui.bigstep("up to date", out_filename)
def pushbranch(branchname, force=False): if branchname: shouter.shout("Push of branch " + branchname) if force: return shell.execute("git push -f origin " + branchname) else: return shell.execute("git push origin " + branchname)
def setup_config(self, config): """ Assume `project_name` is a directory in the current folder (This is justified by __init__) Check that a benchmark/ folder exists with the right variation. If not, create it. """ benchmark_dir = "%s/benchmark" % self.project_name variation_dir = "%s/variation%s" % (benchmark_dir, config) both_dir = "%s/both" % self.project_name ty_dir = "%s/typed" % self.project_name un_dir = "%s/untyped" % self.project_name if not os.path.exists(benchmark_dir): print("INFO: creating directory '%s'" % benchmark_dir) os.mkdir(benchmark_dir) if not os.path.exists(variation_dir): print("INFO: creating and filling directory '%s'" % variation_dir) os.mkdir(variation_dir) if os.path.exists(both_dir): shell.execute("cp %s/* %s" % (both_dir, variation_dir)) for i in range(len(config)): char = config[i] fname = self.module_names[i].rsplit("/", 1)[-1] home = ty_dir if char == "1" else un_dir shell.execute("cp %s/%s %s" % (home, fname, variation_dir)) return
def build_object(self, target_name, out_filename, in_filename, include_dirs, compiler_flags): abs_source = os.path.join(os.getcwd(), in_filename) ui.debug("building object " + out_filename) with ui.ident: prerequisites = self.__fetch_includes(target_name, abs_source, include_dirs, compiler_flags) prerequisites.append(in_filename) ui.debug("appending prerequisites from pake modules: {!s}" .format(fsutils.pake_files)) prerequisites.extend(fsutils.pake_files) ui.debug("prerequisites: {!r}".format(prerequisites)) if fsutils.is_any_newer_than(prerequisites, out_filename): fsutils.mkdir_recursive(os.path.dirname(out_filename)); cmd = configurations.compiler() + " " + self.__prepare_compiler_flags(include_dirs, compiler_flags) + " -c -o " + out_filename + " " + abs_source if command_line.args.verbose: ui.step(configurations.compiler(), cmd) else: ui.step(configurations.compiler(), in_filename) shell.execute(cmd)
def getchangeentriesbytypeandvalue(self, comparetype, value): dateformat = "yyyy-MM-dd HH:mm:ss" outputfilename = self.config.getlogpath("Compare_" + comparetype + "_" + value + ".txt") comparecommand = "%s --show-alias n --show-uuid y compare ws %s %s %s -r %s -I sw -C @@{name}@@{email}@@ --flow-directions i -D @@\"%s\"@@" \ % (self.config.scmcommand, self.config.workspace, comparetype, value, self.config.repo, dateformat) shell.execute(comparecommand, outputfilename) return ImportHandler.getchangeentriesfromfile(outputfilename)
def load(self): command = "%s load -r %s %s --force" % (self.scmcommand, self.repo, self.workspace) if self.config.includecomponentroots: command += " --include-root" shouter.shout("Start (re)loading current workspace: " + command) shell.execute(command) shouter.shout("Load of workspace finished") Commiter.restore_shed_gitignore(Commiter.get_untracked_statuszlines())
def setcomponentstobaseline(self, componentbaselineentries, streamuuid): for entry in componentbaselineentries: shouter.shout("Set component '%s' to baseline '%s'" % (entry.componentname, entry.baselinename)) replacecommand = "lscm set component -r %s -b %s %s stream %s %s --overwrite-uncommitted" % \ (self.repo, entry.baseline, self.workspace, streamuuid, entry.component) shell.execute(replacecommand)
def setcomponentstobaseline(self, componentbaselineentries, streamuuid): for entry in componentbaselineentries: shouter.shout("Set component '%s'(%s) to baseline '%s' (%s)" % (entry.componentname, entry.component, entry.baselinename, entry.baseline)) replacecommand = "%s set component -r %s -b %s %s stream %s %s --overwrite-uncommitted" % \ (self.scmcommand, self.repo, entry.baseline, self.workspace, streamuuid, entry.component) shell.execute(replacecommand)
def createandload(self, stream, componentbaselineentries=[]): shell.execute("%s create workspace -r %s -s %s %s" % (self.scmcommand, self.repo, stream, self.workspace)) if componentbaselineentries: self.setcomponentstobaseline(componentbaselineentries, stream) else: self.setcomponentstobaseline(ImportHandler(self.config).determineinitialbaseline(stream), stream) self.load()
def initGitRepo(shell): if os.path.isdir(".git"): response = ask() if not response: print("Link canceled") sys.exit(1) shell.execute(["rm", "-rf", ".git"]) shell.execute(["git", "init"])
def get_working_hotspot_iface_using_wext(): try: if 'Mode:Master' in shell.execute('%s %s' % (IWCONFIG_PATH, 'wl0.1')): return 'wl0.1' if 'Mode:Master' in shell.execute('%s %s' % (IWCONFIG_PATH, WIFI_INTERFACE)): return WIFI_INTERFACE return None except: LOGGER.exception('failed to get working hotspot iface using wext') return None
def addandcommit(changeentry): Commiter.replaceauthor(changeentry.author, changeentry.email) shell.execute("git add -A") shell.execute(Commiter.getcommitcommand(changeentry)) Commiter.commitcounter += 1 if Commiter.commitcounter is 30: shouter.shout("30 Commits happend, push current branch to avoid out of memory") Commiter.pushbranch("") Commiter.commitcounter = 0 shouter.shout("Commited change in local git repository")
def start_p2p_persistent_network(iface, control_socket_dir, ssid, password, sets_channel=False): shell.execute('%s -p %s -i %s p2p_set disabled 0' % (P2P_CLI_PATH, control_socket_dir, iface)) shell.execute( '%s -p %s -i %s set driver_param use_p2p_group_interface=1' % (P2P_CLI_PATH, control_socket_dir, iface)) index = shell.execute('%s -p %s -i %s add_network' % (P2P_CLI_PATH, control_socket_dir, iface)).strip() def set_network(param): shell.execute('%s -p %s -i %s set_network %s %s' % (P2P_CLI_PATH, control_socket_dir, iface, index, param)) set_network('mode 3') set_network('disabled 2') set_network('ssid \'"%s"\'' % ssid) set_network('key_mgmt WPA-PSK') set_network('proto RSN') set_network('pairwise CCMP') set_network('psk \'"%s"\'' % password) frequency, channel = get_upstream_frequency_and_channel() if channel: channel = channel if sets_channel else 0 reg_class = 81 if sets_channel else 0 reset_p2p_channels(iface, control_socket_dir, channel, reg_class) reset_p2p_channels(WIFI_INTERFACE, get_wpa_supplicant_control_socket_dir(), channel, reg_class) if frequency: shell.execute('%s -p %s -i %s p2p_group_add persistent=%s freq=%s ' % (P2P_CLI_PATH, control_socket_dir, iface, index, frequency.replace('.', ''))) else: shell.execute('%s -p %s -i %s p2p_group_add persistent=%s' % (P2P_CLI_PATH, control_socket_dir, iface, index)) time.sleep(2) return index
def copy_resources(self, toolchain): root_dir = os.getcwd() os.chdir(self.common_parameters.root_path) for resource in self.common_parameters.resources.eval(): ui.step("copy", resource) shell.execute( "rsync --update -r '{resource}' '{build_dir}/'".format( resource=resource, build_dir=toolchain.build_dir())) os.chdir(root_dir)
def execute_testing_programs(submission_dir): src_dir = os.path.dirname(os.path.realpath(__file__)) cp_files = ['ArrayListLoop.java', 'DblListnode.java', 'LinkedLoopTester.java', 'Loop.java'] for root, dirs, files in os.walk(submission_dir, topdown=True): for student_dir in dirs: for f in cp_files: fp = os.path.join(src_dir, f) shutil.copy(fp, os.path.join(root, student_dir)) testfiles_cp = "cp -f *.txt '%s'\n" % (os.path.join(root, student_dir)) execute(testfiles_cp, verbose=True) execute_tester_in_student_dir(os.path.join(root, student_dir))
def copy_resources(self, toolchain): root_dir = os.getcwd() os.chdir(self.common_parameters.root_path) for resource in self.common_parameters.resources.eval(): ui.step("copy", resource) shell.execute("rsync --update -r '{resource}' '{build_dir}/'" .format(resource=resource, build_dir=toolchain.build_dir())) os.chdir(root_dir)
def getOptions(): ''' Analizu datumoj ''' args = argv[1:] parser = argparse.ArgumentParser(description="Parses command.", argument_default=argparse.SUPPRESS) parser.add_argument("project", nargs="?") parser.add_argument("-c", "--config", nargs="?", dest='config', help="Optional path to a non-default config file.") parser.add_argument("-s", "--shell", dest='shell', action='store_true', help="Keep it in the shell. Do not launch the GUI.") parser.add_argument( "-m", "--make-project", nargs="?", dest='make', help= "Path to a directory of sequential images to load as if it were a StopGo project." ) options = parser.parse_args(args) opts = vars(options) ''' if not opts.has_key('project'): opts['project'] = 'stopgo_project.db' if not opts.has_key('config'): if os.path.isfile( os.path.join( HOME + '.config/stopgo.conf' ) ): opts['config'] = os.path.isfile( os.path.join( HOME + '/.config/stopgo.conf' ) ): elif os.path.isfile('/etc/config/stopgo.conf'): opts['config'] = '/etc/config/stopgo.conf' else: print('Failed: No template file found.') exit() ''' if not opts.has_key('shell'): import gui gui.main(opts) #applo = wx.App() #instancer = gui.GUI(parent=None, id=-1, title="stopgo") #instancer.Show() #applo.MainLoop() else: import shell shell.execute(opts)
def execute_testcases(): testcases = ['java MessageLoopEditor < sampleInput.txt | diff - sampleOutput1.txt'] for test_cmd in testcases: try: execute("echo '\n----> %s' >> grade.out" % test_cmd) execute("%s >> grade.out 2>&1" % test_cmd, verbose=True) except subprocess.CalledProcessError: # ignore pass return
def restart_thrift(custom_conf=""): while True: kill_thrift = "ps -ef |grep hive.thriftserver|grep -v grep | awk '{print $2}' | xargs kill" execute(kill_thrift, ignored_returns=[123], verbose=True) try: res = execute("ss -tlnp |grep 10000") except subprocess.CalledProcessError as e: if e.returncode == 1: # have not get up break else: time.sleep(1) return start_thrift(custom_conf)
def setup_networking(hotspot_interface): control_socket_dir = get_wpa_supplicant_control_socket_dir() setup_network_interface_ip(hotspot_interface, '10.24.1.1', '255.255.255.0') try: shell.execute('%s dnsmasq' % KILLALL_PATH) except: LOGGER.exception('failed to killall dnsmasq') shell.execute('%s -i %s --dhcp-authoritative --no-negcache --user=root --no-resolv --no-hosts ' '--server=8.8.8.8 --dhcp-range=10.24.1.2,10.24.1.254,12h ' '--dhcp-leasefile=/data/data/fq.router/dnsmasq.leases ' '--pid-file=/data/data/fq.router/dnsmasq.pid' % (DNSMASQ_PATH, hotspot_interface)) log_upstream_wifi_status('after setup networking', control_socket_dir)
def createandload(self, stream, componentbaselineentries=[], create=True): if create: shell.execute("lscm create workspace -r %s -s %s %s" % (self.config.repo, stream, self.workspace)) if componentbaselineentries: self.setcomponentstobaseline(componentbaselineentries, stream) else: self.setcomponentstobaseline( ImportHandler( self.config).getcomponentbaselineentriesfromstream(stream), stream) self.load()
def bootstrap(args = "", preexec= ""): if fs.exists(fs.join(environ.src, technicolor.local, 'bootstrap')): command = fs.join(environ.src, technicolor.local, 'bootstrap') elif fs.exists(fs.join(environ.src, technicolor.local, 'bootstrap.sh')): command = fs.join(environ.src, technicolor.local, 'bootstrap.sh') elif fs.exists(fs.join(environ.src, technicolor.local, '..', 'bootstrap')): command = fs.join(environ.src, technicolor.local, '..', 'bootstrap') else: console.fail('Calling bootstrap but there is nothing like that around!') command = '%s --prefix="%s" --with-toolset=$MY_BOOST_TOOLSET %s' % (command, environ.prefix, args) # --libdir="%s/lib" environ.prefix, shell.execute(environ.flush(), command, environ.pwd, preexec)
def reset_p2p_channels(iface, control_socket_dir, channel, reg_class): try: shell.execute('%s -p %s -i %s set p2p_oper_channel %s' % (P2P_CLI_PATH, control_socket_dir, iface, channel)) shell.execute('%s -p %s -i %s set p2p_oper_reg_class %s' % (P2P_CLI_PATH, control_socket_dir, iface, reg_class)) shell.execute('%s -p %s -i %s set p2p_listen_channel %s' % (P2P_CLI_PATH, control_socket_dir, iface, channel)) shell.execute('%s -p %s -i %s set p2p_listen_reg_class %s' % (P2P_CLI_PATH, control_socket_dir, iface, reg_class)) shell.execute('%s -p %s -i %s save_config' % (P2P_CLI_PATH, control_socket_dir, iface)) except: LOGGER.exception('failed to reset p2p channels')
def run_config(self, config, entry_point="main.rkt"): """ Sample the configuration `config`, return a list of observed runtimes. """ self.setup_config(config) print("Running config '%s' for %s iterations"% (config, self.num_iters)) shell.execute(" ".join(["racket" , self.run_script ,"-i", str(self.num_iters) ## -i : Number of iterations ,"-o", self.tmp_output ## -o : Location to save output ,"-x", config ## -x : Exact configuration to run ,"-e", entry_point ## -e : Main file to execute ,self.project_name])) return self.parse_rkt_results()
def execute_tester_in_student_dir(student_dir): src_dir = os.path.dirname(os.path.realpath(__file__)) compile_cmd = 'javac *.java >> grade.out 2>&1' run_cmd = 'java LinkedLoopTester >> grade.out 2>&1' # chdir to student's personal folder print 'change dir to %s' % student_dir os.chdir(student_dir) if os.path.exists('grade.out'): os.remove('grade.out') try: # compile execute(compile_cmd, verbose=True) # run the testing execute("echo '----> General Test' >> grade.out") execute(run_cmd, verbose=True) except TimeoutError: print "TimeoutError: check student\'s program" execute("echo 'TimeoutError: timeout when general testing' >> grade.out 2>&1") except subprocess.CalledProcessError as detail: print "CalledProcessError: %s" % detail try: # run the testcases execute_testcases() finally: os.chdir(src_dir)
def createattributes(): """ create a .gitattributes file (if so specified and not yet present) """ config = configuration.get() if len(config.gitattributes) > 0: gitattribues = ".gitattributes" if not os.path.exists(gitattribues): with open(gitattribues, "w") as attributes: for line in config.gitattributes: attributes.write(line + '\n') shell.execute("git add " + gitattribues) shell.execute("git commit -m %s -q" % shell.quote("Add .gitattributes"))
def createattributes(): """ create a .gitattributes file (if so specified and not yet present) """ config = configuration.get() if len(config.gitattributes) > 0: newline = os.linesep gitattribues = ".gitattributes" if not os.path.exists(gitattribues): with open(gitattribues, "w") as attributes: for line in config.gitattributes: attributes.write(line + newline) shell.execute("git add " + gitattribues) shell.execute("git commit -m %s -q" % shell.quote("Add .gitattributes"))
def send_packet(ovs_path, br, seq_no, src_mac, src_ip, dst_mac, dst_ip, out_ofp_port, payload): print "PKT (" + out_ofp_port + "): {" + src_mac + "," + src_ip + "} -> {" + dst_mac + "," + dst_ip + "}" random.seed(os.getpid()) pkt_cookie = random.randint(1, 65535) ip_payload = payload + "-" + str(seq_no) pktstr = packet_create(pkt_cookie, src_mac, src_ip, dst_mac, dst_ip, ip_payload) pkthex = pktstr.encode('hex') cmd = [ ovs_path + "/ovs-ofctl", "packet-out", br, out_ofp_port, "resubmit(,4)", pkthex ] shell.execute(cmd)
def of_rktd(self, rktdfile): """ (-> Path-String Path-String) Input: a .rktd file; the results of running the benchmarks. Output: the string name of a newly-generated .tab file (a more human-readable and Python-parse-able version of the .rktd) """ # We have a Racket script to generate the .tab file, # make sure it exists. print("Parsing a .tab file from the raw source '%s'" % rktdfile) sexp_to_tab = shell.find_file("sexp-to-tab.rkt") if not sexp_to_tab: raise ValueError("Could not access 'sexp_to_tab' script. Cannot parse '%s'." % rktdfile) shell.execute("racket %s %s" % (sexp_to_tab, rktdfile)) # Strip the suffix from the input file, replace with .tab return "%s.tab" % rktdfile.rsplit(".", 1)[0]
def execute_testcases(): testcases = ['java InteractiveDBTester input1.txt < case1.txt | diff - expected_case1.txt', 'java InteractiveDBTester input1.txt < d1.txt | diff - expected_d1.txt', 'java InteractiveDBTester input1.txt < r1.txt | diff - expected_r1.txt', 'java InteractiveDBTester input1.txt < sf1.txt | diff - expected_sf1.txt', 'java InteractiveDBTester input2.txt < i2.txt | diff - expected_i2.txt', 'java InteractiveDBTester input3.txt < user3.txt | diff - expected_user3.txt'] for test_cmd in testcases: try: execute("echo '\n----> %s' >> grade.out" % test_cmd) execute("%s >> grade.out 2>&1" % test_cmd, verbose=True) except subprocess.CalledProcessError: # ignore pass return
def get_disk_net_read_write(): cmd = "grep vda1 /proc/diskstats" result = re.split(r"\s+", execute(cmd).strip()) disk_read = int(result[5]) disk_write = int(result[9]) cmd = "grep eth0 /proc/net/dev" result = re.split(r"\s+", execute(cmd).strip()) net_read = int(result[1]) net_write = int(result[9]) return { "disk_read": disk_read, "disk_write": disk_write, "net_read": net_read, "net_write": net_write, }
def run_spark_query_with_fail_tests(query_name): # measure the elapsed time for the query at first cmd = "(time /home/ubuntu/software/spark-1.5.0-bin-hadoop2.6/bin/beeline -u jdbc:hive2://group-2-vm1:10000/tpcds_text_db_1_50 -n ubuntu -f $HOME/workload/hive-tpcds-tpch-workload/sample-queries-tpcds/query%s.sql) 2> $HOME/big-data-system/assignment2/output/tpcds_query%s_spark_failtest_no.out" % (query_name, query_name) #custom_conf = "--conf spark.sql.shuffle.partitions=10 --conf spark.storage.memoryFraction=0.02" custom_conf="" rm_eventlog_dir() rm_local_dirs() restart_spark() restart_thrift(custom_conf) sync_caches() start_time = time.time() execute(cmd, verbose=True) elapsed_time = time.time() - start_time fail_timing25 = elapsed_time / 4.0 + 5 fail_timing75 = elapsed_time / 4.0 * 3.0 - 5 print "The task took %f seconds. The 25 failing timing would be %f; and the 75 failing timing would be %f" % (elapsed_time, fail_timing25, fail_timing75) # start the failing fail_cases = ["_orig", "1-25%", "1-75%", "2-25%", "2-75%"] res = [] for case in fail_cases: cmd = "(time /home/ubuntu/software/spark-1.5.0-bin-hadoop2.6/bin/beeline -u jdbc:hive2://group-2-vm1:10000/tpcds_text_db_1_50 -n ubuntu -f $HOME/workload/hive-tpcds-tpch-workload/sample-queries-tpcds/query%s.sql) 2> $HOME/big-data-system/assignment2/output/tpcds_query%s_spark_failtest%s.out" % (query_name, query_name, case) custom_conf = "--conf spark.sql.shuffle.partitions=10 --conf spark.storage.memoryFraction=0.02" rm_eventlog_dir() rm_local_dirs() restart_spark() restart_thrift(custom_conf) sync_caches() if "1-" in case: if "25%" in case: print "Set a timer for failing-task-1 at 25%" t = Timer(fail_timing25, sync_caches) t.start() elif "75%" in case: print "Set a timer for failing-task-1 at 75%" t = Timer(fail_timing75, sync_caches) t.start() elif "2-" in case: if "25%" in case: print "Set a timer for failing-task-2 at 25%" t = Timer(fail_timing25, kill_worker) t.start() elif "75%" in case: print "Set a timer for failing-task-2 at 75%" t = Timer(fail_timing75, kill_worker) t.start() res.append(execute(cmd, verbose=True)) return res
def discard(*changeentries): config = configuration.get() idstodiscard = Changes._collectids(changeentries) exitcode = shell.execute(config.scmcommand + " discard -w " + config.workspace + " -r " + config.repo + " -o" + idstodiscard) if exitcode is 0: for changeEntry in changeentries: changeEntry.setUnaccepted()
def accept(config, logpath, *changeentries): for changeEntry in changeentries: shouter.shout("Accepting: " + changeEntry.tostring()) revisions = Changes._collectids(changeentries) Changes.latest_accept_command = config.scmcommand + " accept -v -o -r " + config.repo + " -t " + \ config.workspace + " --changes" + revisions return shell.execute(Changes.latest_accept_command, logpath, "a")
def __parse_dump_ports(self, match_pattern, field): cmd = [self.ofctl_path, "dump-ports", self.br] dump_ports = shell.execute(cmd).splitlines() out = None process_this_block = False ofp_port = self.get_ofp_port() if (ofp_port < 10): portstr = "port " + ofp_port + ":" else: portstr = "port " + ofp_port + ":" for line in dump_ports: line_tok = line.split() if (line_tok == None) or (line_tok == []): continue if (line.find("port") >= 0): this_portnum = line_tok[1].replace(":", "") if (this_portnum < 10): this_portstr = "port " + line_tok[1] else: this_portstr = "port " + line_tok[1] if (portstr == this_portstr): process_this_block = True else: process_this_block = False if (process_this_block == False): continue if (line.find(match_pattern) < 0): continue out = line_tok[field] break return out
def get_pid(procname): ps_ax = shell.execute(["ps", "ax"]).splitlines() for line in ps_ax: if line.find(procname) < 0: continue line_tok = line.split() return line_tok[0]
def restore_shed_gitignore(statuszlines): """ If a force reload of the RTC workspace sheds .gitignore files away, we need to restore them. In this case they are marked as deletions from git. :param statuszlines: the git status z output lines """ gitignore = ".gitignore" gitignorelen = len(gitignore) deletedfiles = Commiter.splitoutputofgitstatusz(statuszlines, " D ") for deletedfile in deletedfiles: if deletedfile[-gitignorelen:] == gitignore: # only restore .gitignore if sibling .jazzignore still exists jazzignorefile = deletedfile[:-gitignorelen] + ".jazzignore" if os.path.exists(jazzignorefile): shell.execute("git checkout -- %s" % deletedfile)
def checkbranchname(branchname): exitcode = shell.execute( "git check-ref-format --normalize refs/heads/" + branchname) if exitcode is 0: return True else: return False
def has_diff(): """ Check if there would be committed any changes """ return shell.execute( "git diff --quiet && git ls-files --other --exclude-standard | sed q1 >/dev/null" ) is 1
def eval(self, symbol_table): self.sanity() lhs = self.token_list[0].eval(symbol_table) op = self.token_list[1] logger.debug("assignment lhs=%s op=%s", lhs, op) # handle different styles of assignment if op == ":=" or op == "::=": # simply expanded rhs = self.token_list[2].eval(symbol_table) elif op == "=": # recursively expanded # store the expression in the symbol table without evaluating rhs = self.token_list[2] elif op == "!=": # != seems to be a > 3.81 feature so add a version check here if Version.major < 4: raise VersionError("!= not in this version of make") # execute RHS as shell s = self.token_list[2].eval(symbol_table) rhs = shell.execute(s) else: # TODO raise Unimplemented("op=%s"%op) logger.debug("assignment rhs=%s", rhs) symbol_table.add(lhs, rhs) return None
def get_bridge(self): ovs_vswitchd_pid = shell.get_pid("ovs-vswitchd") ovs_vswitchd_target = self.ovs_db_path + "/ovs-vswitchd." + ovs_vswitchd_pid + ".ctl" br = shell.execute( [self.appctl_path, "-t", ovs_vswitchd_target, "ofproto/list"]).splitlines() return br[0]
def createignore(): git_ignore = ".gitignore" if not os.path.exists(git_ignore): with open(git_ignore, "w") as ignore: ignore.write(".jazz5" + '\n') ignore.write(".metadata" + '\n') ignore.write(".jazzShed" + '\n') config = configuration.get() if len(config.ignoredirectories) > 0: ignore.write('\n' + "# directories" + '\n') for directory in config.ignoredirectories: ignore.write('/' + directory + '\n') ignore.write('\n') shell.execute("git add " + git_ignore) shell.execute("git commit -m %s -q" % shell.quote("Add .gitignore"))
def __get_ip_iface_mac(iface): ip_out = shell.execute(["ip", "addr", "show", iface]).splitlines() for line in ip_out: if (line.find("link/ether ") < 0): continue line_tok = line.split() mac = line_tok[1] return mac
def __get_ip_iface_ip(iface): ip_out = shell.execute(["ip", "addr", "show", iface]).splitlines() for line in ip_out: if (line.find("inet ") < 0): continue line_tok = line.split() ip_subnet = line_tok[1] ipaddr = ip_subnet.split("/") return ipaddr[0]