def goto_city(driver): msg.info("Goint to city tab...") try: driver.execute_script("switchMenu(1)") msg.info("done") except: msg.warning("failed")
def fillDAG_sanity(jobsub, events, out): # check if job is done already if isDoneSanity(out): msg.warning ("Standard mctest sanity checks log files found in " + out + " ... " + msg.BOLD + \ "skipping standard:fillDAG_sanity\n", 1) return # not done, add jobs to dag msg.info("\tAdding mctest sanity checks jobs\n") # in parallel mode jobsub.add("<parallel>") # common options options = " --add-event-printout-in-error-log --event-record-print-level 2 --max-num-of-errors-shown 10 " + \ " --check-energy-momentum-conservation " + \ " --check-charge-conservation " + \ " --check-for-pseudoparticles-in-final-state " + \ " --check-for-off-mass-shell-particles-in-final-state " + \ " --check-for-num-of-final-state-nucleons-inconsistent-with-target " + \ " --check-vertex-distribution " + \ " --check-decayer-consistency" # loop over keys and generate gvld_sample_scan command for key in nuPDG.iterkeys(): inputFile = "gntp." + key + ".ghep.root" output = "gntp." + key + ".ghep.root.sanity.log" cmd = "gvld_sample_scan -f input/" + inputFile + " -o " + output + options logFile = "gvld_sample_scan." + key + ".log" jobsub.addJob(events + "/" + inputFile, out, logFile, cmd) # done jobsub.add("</parallel>")
def fillDAGMerge(jobsub, tag, out): # check if job is done already if isDoneMerge(tag, out): msg.warning( "Nucleus merged splines found in " + out + " ... " + msg.BOLD + "skipping nua:fillDAGMerge\n", 1) return # not done, add jobs to dag msg.info("\tAdding nucleus splines (merge) jobs\n") # in serial mode jobsub.add("<serial>") # common options xmlFile = "gxspl-vA-" + tag + ".xml" # merge splines job cmd = "gspladd -d input -o " + xmlFile inputs = out + "/*.xml" logFile = "gspladd.log" jobsub.addJob(inputs, out, logFile, cmd) # convert to root job rootFile = "xsec-vA-" + tag + ".root" cmd = "gspl2root -p " + nuPDG + " -t " + ",".join( targets) + " -o " + rootFile + " -f input/" + xmlFile inputs = out + "/" + xmlFile logFile = "gspl2root.log" jobsub.addJob(inputs, out, logFile, cmd) # done jobsub.add("</serial>")
def match(input1, input2, output, key): """Check if matching dataset is the same for inputs. Copy to output. input1 -- input file input2 -- input file output -- output file key -- dataset to match """ if key not in input1 or key not in input2: msg.error("Both files must contains %s" % key) sys.exit(1) if len(input1[key].shape) != 1 or len(input2[key].shape) != 1: msg.error("Matching key should have (N,) shape.") sys.exit(1) if not np.array_equal(input1[key], input2[key]): msg.error("%s in input files are not the same." % key) sys.exit(1) msg.info("Copying %s" % key) input1.copy(key, output)
def fillDAG_data (jobsub, tag, date, xsec_a_path, outEvents, outRep, outRepSng): # check if job is done already if isDoneData (tag, date, outRep, outRepSng): msg.warning ("xsec validation plots found in " + outRep + " ... " + msg.BOLD + "skipping xsecval:fillDAG_data\n", 1) return # not done, add jobs to dag msg.info ("\tAdding xsec validation (data) jobs\n") # in parallel mode jobsub.add ("<parallel>") # one job for all comparisons without errors inFile = "file_list-" + tag + "-" + date + ".xml" outFile = "genie_" + tag + "-" + date + "-world_nu_xsec_data_comp-all-withref" cmd = "gvld_nu_xsec -g input/" + inFile + " -o " + outFile # add the command to dag inputs = outRep + "/" + inFile + " " + xsec_a_path + "/xsec-vA-" + tag + ".root " + outEvents + "/*.ghep.root" logFile = "gvld_nu_xsec_all.log" jobsub.addJob (inputs, outRep, logFile, cmd) # job per comparison with error for comp in comparisons: outFile = "genie_" + tag + "-" + date + "-world_nu_xsec_data_comp-" + comp cmd = "gvld_nu_xsec -e -g input/" + inFile + " -o " + outFile + " -c " + comp logFile = "gvld_nu_xsec_" + comp + ".log" jobsub.addJob (inputs, outRepSng, logFile, cmd) # done jobsub.add ("</parallel>")
def generate_ise_makefile(self): p.info("Generating makefile for local synthesis.") ise_path = self.__figure_out_ise_path() self.make_writer.generate_ise_makefile( top_mod=self.modules_pool.get_top_module(), ise_path=ise_path)
def fillDAG_GST(jobsub, out, tunes): # check if job is done already if isDoneGST(out, tunes): msg.warning( "hadronization test gst files found in " + out + " ... " + msg.BOLD + "skipping hadronization:fillDAG_GST\n", 1) return # not done, add jobs to dag msg.info("\tAdding hadronization test (gst) jobs\n") # in parallel mode jobsub.add("<parallel>") # loop over keys and generate gntpc command for key in nuPDG.iterkeys(): inputFile = "gntp." + key + ".ghep.root" logFile = "gntpc" + key + ".log" cmd = "gntpc -f gst -i input/" + inputFile jobsub.addJob(out + "/" + inputFile, out, logFile, cmd, None) # same for tunes if specified if not (tunes is None): for tn in range(len(tunes)): cmdTune = "gntpc -f gst -i input/" + tunes[tn] + "-" + inputFile jobsub.addJob( out + "/" + tunes[tn] + "/" + tunes[tn] + "-" + inputFile, out + "/" + tunes[tn], tunes[tn] + "-" + logFile, cmdTune, None) # done jobsub.add("</parallel>")
def generate_remote_synthesis_makefile(self): if self.connection.ssh_user == None or self.connection.ssh_server == None: p.warning("Connection data is not given. " "Accessing environmental variables in the makefile") p.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() if not os.path.exists(top_mod.fetchto): p.warning("There are no modules fetched. " "Are you sure it's correct?") ise_path = self.__figure_out_ise_path() tcl = self.__search_tcl_file() if tcl == None: self.__generate_tcl() tcl = "run.tcl" files = self.modules_pool.build_very_global_file_list() sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(top_mod.syn_project)) self.make_writer.generate_remote_synthesis_makefile( files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.connection.ssh_user, server=self.connection.ssh_server, ise_path=ise_path)
def generate_remote_synthesis_makefile(self): if self.connection.ssh_user == None or self.connection.ssh_server == None: p.warning("Connection data is not given. " "Accessing environmental variables in the makefile") p.info("Generating makefile for remote synthesis.") top_mod = self.modules_pool.get_top_module() if not os.path.exists(top_mod.fetchto): p.warning("There are no modules fetched. " "Are you sure it's correct?") ise_path = self.__figure_out_ise_path() tcl = self.__search_tcl_file() if tcl == None: self.__generate_tcl() tcl = "run.tcl" files = self.modules_pool.build_very_global_file_list() sff = SourceFileFactory() files.add(sff.new(tcl)) files.add(sff.new(top_mod.syn_project)) self.make_writer.generate_remote_synthesis_makefile(files=files, name=top_mod.syn_name, cwd=os.getcwd(), user=self.connection.ssh_user, server=self.connection.ssh_server, ise_path=ise_path)
def createCmpConfig( tag, date, reportdir ): msg.info ("\tCreate configuration XML for xsec test\n") # start GLOBAL CMP CONFIG for key in comparisons.iterkeys(): gcfg = reportdir + "/" + key + "-" + tag + "-" + date + ".xml" try: os.remove(gcfg) except OSError: pass gxml = open( gcfg, 'w' ) print >>gxml, '<?xml version="1.0" encoding="ISO-8859-1"?>' print >>gxml, '<config>' print >>gxml, '\t<experiment name="INuXSecWorld">' print >>gxml, '\t\t<paths_relative_to_geniecmp_topdir> false </paths_relative_to_geniecmp_topdir>' print >>gxml, '\t\t\t<comparison>' for i in range( len( comparisons[key]['datafiles'] ) ): print >>gxml, '\t\t\t\t<spec>' print >>gxml, '\t\t\t\t\t<path2data> data/measurements/vA/intg_xsec/' + comparisons[key]['datafiles'][i] + ' </path2data>' print >>gxml, '\t\t\t\t\t<dataclass> ' + comparisons[key]['dataclass'] + ' </dataclass>' print >>gxml, '\t\t\t\t\t<predictionclass> ' + comparisons[key]['mcpredictions'][i] + ' </predictionclass>' print >>gxml, '\t\t\t\t</spec>' gsimfile = "/file_list-" + tag + "-" + date + ".xml" print >>gxml, '\t\t\t\t<genie> input' + gsimfile + ' </genie>' print >>gxml, '\t\t\t</comparison>' # now finish up and close global config print >>gxml, '\t</experiment>' print >>gxml, '</config>' gxml.close()
def fillDAG_sanity (jobsub, events, out): # check if job is done already if isDoneSanity (out): msg.warning ("Standard mctest sanity checks log files found in " + out + " ... " + msg.BOLD + \ "skipping standard:fillDAG_sanity\n", 1) return # not done, add jobs to dag msg.info ("\tAdding mctest sanity checks jobs\n") # in parallel mode jobsub.add ("<parallel>") # common options options = " --add-event-printout-in-error-log --event-record-print-level 2 --max-num-of-errors-shown 10 " + \ " --check-energy-momentum-conservation " + \ " --check-charge-conservation " + \ " --check-for-pseudoparticles-in-final-state " + \ " --check-for-off-mass-shell-particles-in-final-state " + \ " --check-for-num-of-final-state-nucleons-inconsistent-with-target " + \ " --check-vertex-distribution " + \ " --check-decayer-consistency" # loop over keys and generate gvld_sample_scan command for key in nuPDG.iterkeys(): inputFile = "gntp." + key + ".ghep.root" output = "gntp." + key + ".ghep.root.sanity.log" cmd = "gvld_sample_scan -f input/" + inputFile + " -o " + output + options logFile = "gvld_sample_scan." + key + ".log" jobsub.addJob (events + "/" + inputFile, out, logFile, cmd) # done jobsub.add ("</parallel>")
def __init__(self, config: Config): self.config = config self.visited = set() self.to_be_visited = set() self.state_file_path = os.path.join(config.directory, _STATE_FILENAME) if os.path.exists(self.state_file_path): # there is a state file with open(self.state_file_path, "rb") as state_file: self.visited, self.to_be_visited = pickle.load(state_file) msg.info( f"Read state (visited: {len(self.visited)}, to be visited: {len(self.to_be_visited)})" ) else: msg.info("Initializing...") initial_set = set(["/"]) for category in config.categories: initial_set.update(category.seed) # TODO: Add links from the sitemap # for sitemap_url in url.filter_valid_links(sitemap_urls, categories, base_url): # to_be_visited.add(sitemap_url) for link in initial_set: page_content, page_links = url.content_and_links( config.base_url + link) if page_content is None: msg.warning( f"Unable to reach {link} (no internet connection?)") continue self.add_links(page_links)
def fillDAG_GHEP(jobsub, tag, xsec_a_path, out, main_tune): # check if job is done already if isDoneGHEP(out): msg.warning( "Standard mctest ghep files found in " + out + " ... " + msg.BOLD + "skipping standard:fillDAG_GHEP\n", 1) return # not done, add jobs to dag msg.info("\tAdding standard mctest (ghep) jobs\n") # in parallel mode jobsub.add("<parallel>") # common options inputFile = "gxspl-vA-" + tag + ".xml" if not (main_tune is None): inputFile = main_tune + "-gxspl-vA-" + tag + ".xml" options = " --seed " + mcseed + " --cross-sections input/" + inputFile if not (main_tune is None): options = options + " --tune " + main_tune # loop over keys and generate gevgen command for key in nuPDG.iterkeys(): cmd = "gevgen -n " + nEvents[key] + " -e " + energy[key] + " -p " + nuPDG[key] + " -t " + targetPDG[key] + \ " -r " + key + " --event-generator-list " + generatorList[key] + options logFile = "gevgen_" + key + ".log" jobsub.addJob(xsec_a_path + "/" + inputFile, out, logFile, cmd, None) # done jobsub.add("</parallel>")
def getBuild(tag, date, path): # get build with defined tag and date and save in path buildName = "genie_" + tag + "_buildmaster_" + date # check if build aready exists if os.path.isdir(path + "/" + buildName): msg.warning( path + "/" + buildName + " already exists ... " + msg.BOLD + "skipping jenkins:getBuild\n", 1) return buildName # no build tarball = buildName + ".tgz" # check it build available if tarball not in getBuildList(): msg.error("There is no artifact for " + msg.BOLD + tarball + "\n") print "Available artifacts:\n" for artifact in getBuildList(): print "\t" + artifact + "\n" sys.exit(1) # download build msg.info("Downloading " + msg.BOLD + tarball) urllib.urlretrieve(url + "/artifact/genie_builds/" + tarball, path + "/" + tarball) # extract the build msg.info("Extracting to " + msg.BOLD + path + "/" + buildName + "\n") tarfile.open(path + "/" + tarball, 'r').extractall(path + "/" + buildName) # return buildName return buildName
def general_goods(driver): try: msg.info("Goint to general goods tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(6)") except: msg.warning("couldn't find general goods tab") return False
def city_gate(driver): try: msg.info("Goint to city gate tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(17)") except: msg.warning("couldn't find city gate tab") return False
def country_zone(driver, zone_number): try: msg.info(f"Goint to country zone {zone_number}...") return driver.find_element_by_css_selector( f"#submenu2 > a:nth-child({zone_number})") except: msg.warning(f"couldn't find country zone {zone_number}") return False
def mercenary(driver): try: msg.info("Goint to mercenary tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(8)") except: msg.warning("couldn't find mercenary tab") return False
def maleficia(driver): try: msg.info("Goint to maleficia tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(9)") except: msg.warning("couldn't find maleficia tab") return False
def magnus_hermeticus(driver): try: msg.info("Goint to magnus hermeticus tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(14)") except: msg.warning("couldn't find magnus hermeticus tab") return False
def alchemist(driver): try: msg.info("Goint to alchemist tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(7)") except: msg.warning("couldn't find alchemist tab") return False
def event_zone(driver): try: msg.info("Goint to event zone...") return driver.find_element_by_css_selector( f"#submenu2 > a:last-child") except: msg.warning("couldn't find event zone") return False
def armour_smith(driver): try: msg.info("Goint to armour smith tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(5)") except: msg.warning("couldn't find armour smith tab") return False
def auction_house(driver): try: msg.info("Goint to auction house tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(15)") except: msg.warning("couldn't find auction house tab") return False
def highscore(driver): try: msg.info("Goint to highscore tab...") return driver.find_element_by_css_selector( "#mainmenu > a:nth-child(4)") except: msg.warning("couldn't find highscore tab") return False
def overview(driver): try: msg.info("Goint to overview tab...") return driver.find_element_by_css_selector( "#mainmenu > a:nth-child(1)") except: msg.warning("couldn't find overview tab") return False
def recruiting(driver): try: msg.info("Goint to recruiting tab...") return driver.find_element_by_css_selector( "#mainmenu > a:nth-child(5)") except: msg.warning("couldn't find recruiting tab") return False
def premium(driver): try: msg.info("Goint to premium tab...") return driver.find_element_by_css_selector( "#mainmenu > a:nth-child(6)") except: msg.warning("couldn't find premium tab") return False
def work(driver): try: msg.info("Goint to work tab...") return driver.find_element_by_css_selector( "#submenu1 > a:nth-child(1)") except: msg.warning("couldn't find work tab") return False
def subtab(driver, subtab_number): try: msg.info(f"Going to subtab {subtab_number}...") return driver.find_element_by_css_selector( f"ul#mainnav > li > table > tbody > tr > td:nth-child({subtab_number}) > a" ) except: msg.warning("Couldn't find subtab {subtab_number}") return False
def regreInputOK(cmp_app, regretags, regredir, nreqfiles, xsec_id, xsec_subpath): if not (regretags is None): # need to fetch date stamp for the regression from the leading path # assume that regredir is always /leading/path/to/TIMESTAMP/Index # NOTE: redirect output of split(...) to a separate array; # otherwise len(...) will be the length of regredir, not the length of array after splitting regredir_tmp = regredir.split("/") rdate = regredir_tmp[len(regredir_tmp) - 2] # i.e. one before the last regre_xsec_exists = True regre_events_exist = True for rt in range(len(regretags)): rversion, rtune = regretags[rt].split("/") if not xsec_id is None and not xsec_subpath is None: # # NOTE: this will NOT work for /pnfs on a Jenkins build node because dCache is NOT mounted there # # --> if rtune + "-xsec-" + xsec_id + "-" + rversion + ".root" not in os.listdir(regredir + "/" + regretags[rt] + xsec_subpath): # # instead we have to use (pyton interface to) IFDH tools # xsec_found = IFDH.findMatchingFiles( regredir + "/" + regretags[rt] + xsec_subpath, rtune + "-xsec-" + xsec_id + "-" + rversion + ".root") if (len(xsec_found) <= 0): msg.info("\t\tinput XSec for regression does NOT exits: " + regredir + "/" + regretags[rt] + xsec_subpath + rtune + "-xsec-" + vN + "-" + rversion + ".root ") regre_xsec_exists = False # # NOTE: this will NOT work for /pnfs on a Jenkins build node because dCache is NOT mounted there # # --> regfiles = regredir + "/" + regretags[rt] + "/events/" + cmp_app + "/*.ghep.root" # --> retcode, nevfiles = commands.getstatusoutput("ls -alF " + regfiles + " | wc -l") # --> if ( int(nevfiles) != nreqfiles ): # # instead we have to use (pyton interface to) IFDH tools # evfiles_found = IFDH.findMatchingFiles( regredir + "/" + regretags[rt] + "/events/" + cmp_app + "/", "*.ghep.root") if (len(evfiles_found) != nreqfiles): msg.info( "\t\tTune " + rtune + " : incorrect number of event samples for regression: " + str(len(evfiles_found)) + "; it should be: " + str(nreqfiles)) regre_events_exist = False return (regre_xsec_exists and regre_events_exist) return False
def clean_modules(self): p.info("Removing fetched modules..") remove_list = [m for m in self.modules_pool if m.source in ["svn", "git"] and m.isfetched] remove_list.reverse() #we will remove modules in backward order if len(remove_list): for m in remove_list: p.rawprint("\t" + m.url + " [from: " + m.path + "]") m.remove_dir_from_disk() else: p.info("There are no modules to be removed")
def submit(self): self.dag.close() msg.info ("Done with dag file. Ready to submit.\n") # check if run is not empty if os.stat(self.dagFile).st_size == 0: msg.warning ("Dag file: " + self.dagFile + " is empty. " + msg.RED + msg.BOLD + "NO JOBS TO RUN!!!\n") exit (0) # submit dag msg.info ("Submitting: " + self.dagFile + "\n") subprocess.Popen (self.setup + self.subdag, shell=True, executable="/bin/bash")
def fillDAG_cmp( jobsub, tag, date, xsec_a_path, eventdir, reportdir, main_tune, tunes, regretags, regredir ): # check if job is done already if resultsExist ( tag, date, reportdir ): msg.warning ("MINERvA comparisons plots found in " + reportdir + " ... " + msg.BOLD + "skipping minerva:fillDAG_cmp\n", 1) return # not done, add jobs to dag msg.info ("\tAdding MINERvA comparisons (plots) jobs\n") # in serial mode jobsub.add ("<serial>") config = "global-minerva-cfg-" + tag + "_" + date + ".xml" plotfile = "genie_" + tag + "-minerva.pdf" tablechi2 = "genie_" + tag + "-minerva-summary-chi2.txt" tableks = "genie_" + tag + "-minerva-summary-KS.txt" cmd = "gvld_general_comparison --no-root-output --global-config input/" + config + " -o " + plotfile cmd = cmd + " --summary-chi2-table " + tablechi2 cmd = cmd + " --summary-KS-table " + tableks # add the command to dag # --> old format --> inputs = reportdir + "/*.xml " + eventdir + "/*.ghep.root " inputs = reportdir + "/*.xml " + xsec_a_path + "/xsec-vA-" + tag + ".root " + eventdir + "/*.ghep.root " if not (main_tune is None): inputs = reportdir + "/*.xml " + xsec_a_path + "/" + main_tune + "-xsec-vA-" + tag + ".root " + eventdir + "/*.ghep.root " if not (tunes is None): for tn in range(len(tunes)): # --> old format --> inputs = " " + inputs + eventdir + "/" + tunes[tn] + "/*.ghep.root " inputs = " " + inputs + xsec_a_path + "/" + tunes[tn] + "/" + tunes[tn] + "-xsec-vA-" + tag + ".root " \ + eventdir + "/" + tunes[tn] + "/*.ghep.root " logfile = "gvld_general_comparison.log" regre = None if not (regretags is None): # --> now we need XSec --> regreOK = commonFunctions.regreInputOK( "minerva", regretags, regredir, len(data_struct), None, None ) regreOK = commonFunctions.regreInputOK( "minerva", regretags, regredir, len(data_struct), "vA", "/xsec/nuA" ) if regreOK: regre = "" for rt in range(len(regretags)): # NOTE: no need to fetch rtune because we don't get xsec, otherwise it's part of regretags # regre = regre + regredir + "/" + regretags[rt] + "/events/minerva/*.ghep.root " # NOTE (11/12/19): now we DO NEED to fetch xsec... rversion, rtune = regretags[rt].split("/") regre = regre + regredir + "/" + regretags[rt] + "/xsec/nuA/" + rtune + "-xsec-vA-" + rversion + ".root " regre = regre + regredir + "/" + regretags[rt] + "/events/minerva/*.ghep.root " else: msg.info( "\t\tNO input for regression will be copied over \n" ) regre = None jobsub.addJob ( inputs, reportdir, logfile, cmd, regre ) # done jobsub.add ("</serial>")
def generate_quartus_project(self): p.info("Generating/updating Quartus project.") if not self.modules_pool.is_everything_fetched(): p.error("A module remains unfetched. " "Fetching must be done prior to makefile generation") p.rawprint(str([str(m) for m in self.modules_pool.modules if not m.isfetched])) quit() if os.path.exists(self.top_module.syn_project + ".qsf"): self.__update_existing_quartus_project() else: self.__create_new_quartus_project()
def copy(source, output, keys): """Copy selected datasets. Keyword arguments: source -- input file output -- output file keys -- datasets to be copied """ for k in keys: msg.info("Copying %s" % k) source.copy(k, output)
def generate_ise_project(self): p.info("Generating/updating ISE project") if self.__is_xilinx_screwed(): p.error("Xilinx environment variable is unset or is wrong.\n" "Cannot generate ise project") quit() if not self.modules_pool.is_everything_fetched(): p.echo("A module remains unfetched. Fetching must be done prior to makefile generation") p.echo(str([str(m) for m in self.modules_pool if not m.isfetched])) quit() ise = self.__check_ise_version() if os.path.exists(self.top_module.syn_project): self.__update_existing_ise_project(ise=ise) else: self.__create_new_ise_project(ise=ise)
def generate_isim_makefile(self): # p.info("Generating makefile for simulation.") p.info("Generating ISE Simulation (ISim) makefile for simulation.") solver = DependencySolver() pool = self.modules_pool if not pool.is_everything_fetched(): p.echo("A module remains unfetched. " "Fetching must be done prior to makefile generation. Try issuing \"hdlmake2 --fetch\"") p.echo(str([str(m) for m in self.modules_pool.modules if not m.isfetched])) quit() top_module = pool.get_top_module() flist = pool.build_global_file_list(); flist_sorted = solver.solve(flist); self.make_writer.generate_isim_makefile(flist_sorted, top_module)
def fillDAG_GST (jobsub, out): # check if job is done already if isDoneGST (out): msg.warning ("xsec validation gst files found in " + out + " ... " + msg.BOLD + "skipping xsecval:fillDAG_GST\n", 1) return # not done, add jobs to dag msg.info ("\tAdding xsec validation (gst) jobs\n") # in parallel mode jobsub.add ("<parallel>") # loop over keys and generate gntpc command for key in nuPDG.iterkeys(): inputFile = "gntp." + key + ".ghep.root" logFile = "gntpc" + key + ".log" cmd = "gntpc -f gst -i input/" + inputFile jobsub.addJob (out + "/" + inputFile, out, logFile, cmd) # done jobsub.add ("</parallel>")
def copy(source, output, keys): """Copy requested datasets. Keyword arguments: source -- input file output -- output file keys -- keys to be copied """ for k in keys: if k not in source: msg.warning("%s requested, but not found." % k) continue else: msg.info("Copying %s" % k) source.copy(k, output)
def fillDAG_data (jobsub, tag, date, xsec_n_path, outEvents, outRep): # check if job is done already if isDoneData (tag, date, outRep): msg.warning ("hadronization test plots found in " + outRep + " ... " + msg.BOLD + "skipping hadronization:fillDAG_data\n", 1) return # not done, add jobs to dag msg.info ("\tAdding hadronization test (plots) jobs\n") # in serial mode jobsub.add ("<serial>") inFile = "file_list-" + tag + "-" + date + ".xml" outFile = "genie_" + tag + "-hadronization_test.ps" cmd = "gvld_hadronz_test -g input/" + inFile + " -o " + outFile # add the command to dag inputs = outRep + "/" + inFile + " " + xsec_n_path + "/xsec-vN-" + tag + ".root " + outEvents + "/*.ghep.root" logFile = "gvld_hadronz_test.log" jobsub.addJob (inputs, outRep, logFile, cmd) # done jobsub.add ("</serial>")
def run(self): p.info("Running automatic flow") tm = self.top_module if not self.modules_pool.is_everything_fetched(): self.fetch(unfetched_only = True) if tm.action == "simulation": # Defaults to isim simulator tool if global_mod.sim_tool == "isim": self.generate_isim_makefile() elif global_mod.sim_tool == "vsim": self.generate_vsim_makefile() else: raise RuntimeError("Unrecognized or not specified simulation tool: "+ str(global_mod.sim_tool)) quit() # Force declaration of sim_tool varible in Manifest #if tm.sim_tool == None: # p.error("sim_tool variable must be defined in the manifest") # quit() ## Make distintion between isim and vsim simulators #if tm.sim_tool == "vsim": # self.generate_vsim_makefile() #elif tm.sim_tool == "isim": # self.generate_isim_makefile() #else: # raise RuntimeError("Unrecognized sim tool: "+tm.sim_tool) elif tm.action == "synthesis": if tm.syn_project == None: p.error("syn_project variable must be defined in the manfiest") quit() if tm.target.lower() == "xilinx": self.generate_ise_project() self.generate_ise_makefile() self.generate_remote_synthesis_makefile() elif tm.target.lower() == "altera": self.generate_quartus_project() # self.generate_quartus_makefile() # self.generate_quartus_remote_synthesis_makefile() else: raise RuntimeError("Unrecognized target: "+tm.target) else: p.print_action_help() and quit()
def fillDAGEv (jobsub, tag, xsec_a_path, out): # check if job is done already if isDoneEv (out): msg.warning ("Repeatability test events found in " + out + " ... " + msg.BOLD + "skipping reptest:fillDAGEv\n", 1) return # not done, add jobs to dag msg.info ("\tAdding repeatability test (gevgen) jobs\n") # in parallel mode jobsub.add ("<parallel>") # common options inputFile = "gxspl-vA-" + tag + ".xml" options = " -p 14 -t 1000260560 -e 0.1,50 -f 1/x --seed 123456 --cross-sections input/" + inputFile # loop over runs and generate gevgen command for run in runs: cmd = "gevgen " + options + " -r " + run logFile = "gevgen_" + run + ".log" jobsub.addJob (xsec_a_path + "/" + inputFile, out, logFile, cmd) # done jobsub.add ("</parallel>")
def fillDAGPart (jobsub, tag, out): # check if job is done already if isDonePart (out): msg.warning ("Nucleons splines found in " + out + " ... " + msg.BOLD + "skipping nun:fillDAGPart\n", 1) return # not done, add jobs to dag msg.info ("\tAdding nucleon splines (part) jobs\n") # in parallel mode jobsub.add ("<parallel>") # common options inputs = "none" # loop over keys and generate proper command for key in nuPDG.iterkeys(): cmd = "gmkspl -p " + nuPDG[key] + " -t " + targetPDG[key] + " -n " + nKnots + " -e " + maxEnergy \ + " -o " + outXML[key] + " --event-generator-list " + generatorList[key] logFile = "gmkspl." + outXML[key] + ".log" jobsub.addJob (inputs, out, logFile, cmd) # done jobsub.add ("</parallel>")
def fillDAG_GHEP (jobsub, tag, xsec_a_path, out): # check if job is done already if isDoneGHEP (out): msg.warning ("xsec validation ghep files found in " + out + " ... " + msg.BOLD + "skipping xsecval:fillDAG_GHEP\n", 1) return #not done, add jobs to dag msg.info ("\tAdding xsec validation (ghep) jobs\n") # in parallel mode jobsub.add ("<parallel>") # common configuration inputFile = "gxspl-vA-" + tag + ".xml" options = " -n " + nEvents + " -e " + energy + " -f " + flux + " --seed " + mcseed + \ " --cross-sections input/" + inputFile + " --event-generator-list " + generatorList # loop over keys and generate gevgen command for key in nuPDG.iterkeys(): cmd = "gevgen " + options + " -p " + nuPDG[key] + " -t " + targetPDG[key] + " -r " + key logFile = "gevgen_" + key + ".log" jobsub.addJob (xsec_a_path + "/" + inputFile, out, logFile, cmd) # done jobsub.add ("</parallel>")
def fillDAGPart (jobsub, tag, xsec_n_path, out): # check if job is done already if isDonePart (tag, out): msg.warning ("Nucleus splines found in " + out + " ... " + msg.BOLD + "skipping nua:fillDAGPart\n", 1) return # not done, add jobs to dag msg.info ("\tAdding nucleus splines (part) jobs\n") # in parallel mode jobsub.add ("<parallel>") # common options inputFile = "gxspl-vN-" + tag + ".xml" inputs = xsec_n_path + "/*.xml" options = " --input-cross-sections input/" + inputFile # loop over targets and generate proper command for t in targets: outputFile = "gxspl_" + t + ".xml" cmd = "gmkspl -p " + nuPDG + " -t " + t + " -n " + nKnots + " -e " + maxEnergy + options + \ " --output-cross-sections " + outputFile logFile = "gxspl_" + t + ".xml.log" jobsub.addJob (inputs, out, logFile, cmd) # done jobsub.add ("</parallel>")
def fillDAGTest (jobsub, events, out): # check if job is done already if isDoneTest (out): msg.warning ("Repeatability test logs found in " + out + " ... " + msg.BOLD + "skipping reptest:fillDAGTest\n", 1) return # not done, add jobs to dag msg.info ("\tAdding repeatability test (gvld) jobs\n") # in parallel mode jobsub.add ("<parallel>") # common options options = " --add-event-printout-in-error-log --max-num-of-errors-shown 10 " input1 = "gntp." + runs[0] + ".ghep.root" # loop over runs and generate proper command for run in runs[1:]: input2 = "gntp." + run + ".ghep.root" output = "reptest_runs" + runs[0] + "vs" + run + ".log" logFile = "gvld_repeatability_test_" + runs[0] + "vs" + run + ".log" cmd = "gvld_repeatability_test --first-sample input/" + input1 + \ " --second-sample input/" + input2 + options + " -o " + output jobsub.addJob (events + "/*.ghep.root", out, logFile, cmd) # done jobsub.add ("</parallel>")
def getBuild (tag, date, path): # get build with defined tag and date and save in path buildName = "genie_" + tag + "_buildmaster_" + date # check if build aready exists if os.path.isdir (path + "/" + buildName): msg.warning (path + "/" + buildName + " already exists ... " + msg.BOLD + "skipping jenkins:getBuild\n", 1) return buildName # no build tarball = buildName + ".tgz" # check it build available if tarball not in getBuildList(): msg.error ("There is no artifact for " + msg.BOLD + tarball + "\n") print "Available artifacts:\n" for artifact in getBuildList(): print "\t" + artifact + "\n" sys.exit (1) # download build msg.info ("Downloading " + msg.BOLD + tarball) urllib.urlretrieve (url + "/artifact/genie_builds/" + tarball, path + "/" + tarball) # extract the build msg.info ("Extracting to " + msg.BOLD + path + "/" + buildName + "\n") tarfile.open(path + "/" + tarball, 'r').extractall(path + "/" + buildName) # return buildName return buildName
def get_data(filename, match, keys): """Load file, check if contains match, update datasets based on command line options. Return data dictionary. Keyword arguments: filename -- input hdf5 file match -- common key use to order data keys -- user-chosen datasets to save """ data = hdf5.load(filename) print "\nThe following datasets were found in %s:\n" % filename msg.list_dataset(data) check.key_exists(match, data, filename) if keys: msg.info("Using only: " + keys) update_data(data, [k.strip() for k in keys.split(',')], args.match) return data
def fillDAGMerge (jobsub, tag, out): # check if job is done already if isDoneMerge (tag, out): msg.warning ("Nucleons merged splines found in " + out + " ... " + msg.BOLD + "skipping nun:fillDAGMerge\n", 1) return # not done, add jobs to dag msg.info ("\tAdding nucleon splines (merge) jobs\n") # in serial mode jobsub.add ("<serial>") # common options xmlFile = "gxspl-vN-" + tag + ".xml" # merge splines job cmd = "gspladd -d input -o " + xmlFile inputs = out + "/*.xml" logFile = "gspladd.log" jobsub.addJob (inputs, out, logFile, cmd) # convert to root job rootFile = "xsec-vN-" + tag + ".root" cmd = "gspl2root -p 12,-12,14,-14,16,-16 -t 1000010010,1000000010 -o " + rootFile + " -f input/" + xmlFile inputs = out + "/" + xmlFile logFile = "gspladd.log" jobsub.addJob (inputs, out, logFile, cmd) # done jobsub.add ("</serial>")
def fetch_single_module(self, module): import global_mod new_modules = [] p.vprint("Fetching module: " + str(module)) if module.source == "local": p.vprint("ModPath: " + module.path) else: p.printhr() p.info("Fetching module: " + str(module) +\ " [parent: " + str(module.parent) + "]") if module.source == "svn": p.info("[svn] Fetching to " + module.fetchto) self.__fetch_from_svn(module) if module.source == "git": p.info("[git] Fetching to " + module.fetchto) self.__fetch_from_git(module) module.parse_manifest() new_modules.extend(module.local) new_modules.extend(module.svn) new_modules.extend(module.git) return new_modules
msg.list_dataset(data) check.key_exists(match, data, filename) if keys: msg.info("Using only: " + keys) update_data(data, [k.strip() for k in keys.split(',')], args.match) return data if __name__ == '__main__': msg.box("HDF5 MANIPULATOR: COMBINE") args = parser() data1 = get_data(args.input1, args.match, args.keys1) data2 = get_data(args.input2, args.match, args.keys2) check.different_keys(data1, data2, args.match) data = merge_data(data1, data2, args.match, args.print_warnings, args.show_progress) print "\nThe following datasets will be saved in %s:\n" % args.output msg.list_dataset(data) hdf5.save(args.output, data) msg.info("Done")
from jobsub import Jobsub import parser, jenkins, msg, nun, nua, standard, reptest, xsecval, hadronization import os, datetime def initMessage (args): print msg.BLUE print '*' * 29 print '*', ' ' * 25, '*' print "*\tGENIE Validation", ' ' * 2, '*' print '*', ' ' * 25, '*' print '*' * 29 print msg.GREEN print "Configuration:\n" print "\tGENIE version:\t", args.tag print "\tBuild on:\t", args.build_date print "\tLocated at:\t", args.builds print msg.END if __name__ == "__main__": # parse command line arguments args = parser.getArgs(require_output_path=False, require_run_path=False, usage=__doc__) # find most recent build if date was not defined if args.build_date is None: args.build_date = jenkins.findLast(args.tag) # print configuration summary initMessage(args) # get build msg.info ("Getting GENIE from jenkins...\n") args.buildName = jenkins.getBuild(args.tag, args.build_date, args.builds)
# -*- coding: utf-8 -*- # Description: MySQL netdata python.d module # Author: Pawel Krupa (paulfantom) from base import SimpleService import msg # import 3rd party library to handle MySQL communication try: import MySQLdb # https://github.com/PyMySQL/mysqlclient-python msg.info("using MySQLdb") except ImportError: try: import pymysql as MySQLdb # https://github.com/PyMySQL/PyMySQL msg.info("using pymysql") except ImportError: msg.error("MySQLdb or PyMySQL module is needed to use mysql.chart.py plugin") raise ImportError # default module values (can be overridden per job in `config`) # update_every = 3 priority = 90000 retries = 60 # default configuration (overridden by python.d.plugin) # config = { # 'local': {
def parse(self): options = {} ret = {} if self.config_file is not None: with open(self.config_file, "r") as config_file: content = open(self.config_file, "r").readlines() content = ''.join(content) else: content = '' content = self.arbitrary_code + '\n' + content #now the trick: #I take the arbitrary code and parse it #the values are not important, but thanks to it I can check #if a variable came from the arbitrary code. #This is important because in the manifests only certain group #of variables is allowed. In arbitrary code all of them can be used. arbitrary_options = {} import sys try: with stdoutIO() as s: exec(self.arbitrary_code, arbitrary_options) printed = s.getvalue() if printed: print(printed) except SyntaxError as e: p.error("Invalid syntax in the arbitraty code:\n" + str(e)) quit() except: p.error("Unexpected error while parsing arbitrary code:") p.rawprint(str(sys.exc_info()[0])+':'+str(sys.exc_info()[1])) quit() try: with stdoutIO() as s: exec(content, options) printed = s.getvalue() if len(printed) > 0: p.info("The manifest inside " + self.config_file + " tried to print something:") for line in printed.split('\n'): p.rawprint("> " + line) #print "out:", s.getvalue() except SyntaxError as e: p.error("Invalid syntax in the manifest file " + self.config_file+ ":\n" + str(e)) quit() except: p.error("Encountered unexpected error while parsing " + self.config_file) p.rawprint(str(sys.exc_info()[0]) +':'+ str(sys.exc_info()[1])) quit() for opt_name, val in list(options.items()): #check delivered options if opt_name.startswith('__'): continue if opt_name not in self.__names(): if opt_name in arbitrary_options: continue else: #if opt_name.startswith("global_"): # continue raise NameError("Unrecognized option: " + opt_name) opt = self[opt_name] if type(val) not in opt.types: raise RuntimeError("Given option: "+str(type(val))+" doesn't match specified types:"+str(opt.types)) ret[opt_name] = val # print("Opt_name ", opt_name) if type(val) == type(dict()): try: for key in val: if key not in self[opt_name].allowed_keys: raise RuntimeError("Encountered unallowed key: " +key+ " for options '"+opt_name+"'") except AttributeError: #no allowed_keys member - don't perform any check pass for opt in self.options: #set values for not listed items with defaults try: if opt.name not in ret: ret[opt.name] = opt.default except AttributeError: #no default value in the option pass return ret
paths['hadrep'] = path + "/reports/hadronization_test" # create all directiories for p in paths.values(): if not os.path.exists (p): os.makedirs (p) # return paths dictionary return paths if __name__ == "__main__": # parse command line arguments args = parser.getArgs() # find most recent build if date was not defined if args.build_date is None: args.build_date = jenkins.findLast (args.tag) # print configuration summary initMessage (args) # get build msg.info ("Getting GENIE from jenkins...\n") args.buildName = jenkins.getBuild (args.tag, args.build_date, args.builds) # preapre folder structure for output args.paths = preparePaths (args.output + "/" + args.tag + "/" + args.build_date) # initialize jobsub jobsub = Jobsub (args) # fill dag files with jobs msg.info ("Adding jobs to dag file: " + jobsub.dagFile + "\n") # nucleon cross sections nun.fillDAG (jobsub, args.tag, args.paths) # nucleus cross sections nua.fillDAG (jobsub, args.tag, args.paths) # standard mctest sanity standard.fillDAG (jobsub, args.tag, args.paths) # repeatability test reptest.fillDAG (jobsub, args.tag, args.paths)
def info(self, *params): """ Show information message on stderr """ msg.info(self.chart_name, *params)
def main(): usage = "usage: %prog [options]\n" usage += "type %prog --help to get help message" parser = optparse.OptionParser(usage=usage) parser.add_option("--manifest-help", action="store_true", dest="manifest_help", help="print manifest file variables description") parser.add_option("--make-vsim", dest="make_vsim", action="store_true", default=None, help="generate a ModelSim simulation Makefile") parser.add_option("--make-isim", dest="make_isim", action="store_true", default=None, help="generate a ISE Simulation (ISim) simulation Makefile") parser.add_option("--make-fetch", dest="make_fetch", action="store_true", default=None, help="generate a makefile for modules' fetching") parser.add_option("--make-ise", dest="make_ise", action="store_true", default=None, help="generate a makefile for local ISE synthesis") parser.add_option("--make-remote", dest="make_remote", action="store_true", default=None, help="generate a makefile for remote synthesis") parser.add_option("-f", "--fetch", action="store_true", dest="fetch", default=None, help="fetch and/or update remote modules listed in Manifest") parser.add_option("--clean", action="store_true", dest="clean", default=None, help="remove all modules fetched for this one") parser.add_option("--list", action="store_true", dest="list", default=None, help="List all modules together with their files") parser.add_option("--list-files", action="store_true", dest="list_files", default=None, help="List all files in a from of a space-separated string") parser.add_option("--merge-cores=name", default=None, dest="merge_cores", help="Merges entire synthesizable content of an project into a pair of VHDL/Verilog files") parser.add_option("--ise-proj", action="store_true", dest="ise_proj", default=None, help="create/update an ise project including list of project" "files") parser.add_option("--quartus-proj", action="store_true", dest="quartus_proj", default=None, help="create/update a quartus project including list of project" "files") parser.add_option("-l", "--synthesize-locally", dest="local", default=None, action="store_true", help="perform a local synthesis") parser.add_option("-r", "--synthesize-remotelly", dest="remote", default=None, action="store_true", help="perform a remote synthesis") parser.add_option("--synth-server", dest="synth_server", default=None, help="use given SERVER for remote synthesis", metavar="SERVER") parser.add_option("--synth-user", dest="synth_user", default=None, help="use given USER for remote synthesis", metavar="USER") parser.add_option("--force-ise", dest="force_ise", default=None, type=float, help="Force given ISE version to be used in" " synthesis,use 0 for current version", metavar="ISE") parser.add_option("--py", dest="arbitrary_code", default="", help="add arbitrary code to all manifests' evaluation") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default="false", help="verbose mode") parser.add_option("--version", dest="print_version", action="store_true", default="false", help="print version id of this Hdlmake build") (options, _) = parser.parse_args() global_mod.options = options #HANDLE PROJECT INDEPENDENT OPTIONS if options.manifest_help == True: from manifest_parser import ManifestParser ManifestParser().help() quit() if options.print_version == True: p.print_version() quit() # Check later if a simulation tool should have been specified if options.make_isim == True: global_mod.sim_tool = "isim" elif options.make_vsim == True: global_mod.sim_tool = "vsim" p.info("Simulation tool: " + str(global_mod.sim_tool)) p.vprint("LoadTopManifest") pool = ModulePool() pool.new_module(parent=None, url=os.getcwd(), source="local", fetchto=".") if pool.get_top_module().manifest == None: p.rawprint("No manifest found. At least an empty one is needed") p.rawprint("To see some help, type hdlmake --help") quit() global_mod.top_module = pool.get_top_module() global_mod.global_target = global_mod.top_module.target ssh = Connection(ssh_user=options.synth_user, ssh_server=options.synth_server) from hdlmake_kernel import HdlmakeKernel kernel = HdlmakeKernel(modules_pool=pool, connection=ssh, options=options) options_kernel_mapping = { "fetch" : "fetch", "make_vsim" : "generate_vsim_makefile", "make_isim" : "generate_isim_makefile", "ise_proj" : "generate_ise_project", "quartus_proj" : "generate_quartus_project", "local" : "run_local_synthesis", "remote": "run_remote_synthesis", "make_fetch": "generate_fetch_makefile", "make_ise" : "generate_ise_makefile", "make_remote" : "generate_remote_synthesis_makefile", "list" : "list_modules", "clean" : "clean_modules", "merge_cores" : "merge_cores" } sth_chosen = False import traceback for option, function in options_kernel_mapping.items(): try: is_set = getattr(options, option) if is_set: sth_chosen = True getattr(kernel, function)() except Exception, unknown_error : p.echo("Oooops! We've got an error. Here is the appropriate info:\n") p.print_version() print(unknown_error) traceback.print_exc()