def check(config): logger = logging.getLogger("mpsvalidate") logger.info("Check if TrackerTree.root file exists") outputpath = os.path.join(config.jobDataPath, ".TrackerTree.root") # check if file exists if not os.path.isfile(outputpath): logger.info("TrackerTree.root file does not exist. It will be created now.") configpath = os.path.join(config.mpspath, "test", "trackerTree_cfg.py") logger.info("Path to the config file: {0}".format(configpath)) cmd = ["cmsRun", configpath, "outputFile="+outputpath] if config.globalTag != None: cmd.append("globalTag="+config.globalTag) if config.firstRun != None: cmd.append("firstRun="+config.firstRun) mps_tools.run_checked(cmd, suppress_stderr = True) return os.path.abspath(outputpath)
def check(config): logger = logging.getLogger("mpsvalidate") logger.info("Check if TrackerTree.root file exists") outputpath = os.path.join(config.jobDataPath, ".TrackerTree.root") # check if file exists if not os.path.isfile(outputpath): logger.info( "TrackerTree.root file does not exist. It will be created now.") configpath = os.path.join(config.mpspath, "test", "trackerTree_cfg.py") logger.info("Path to the config file: {0}".format(configpath)) cmd = ["cmsRun", configpath, "outputFile=" + outputpath] if config.globalTag != None: cmd.append("globalTag=" + config.globalTag) if config.firstRun != None: cmd.append("firstRun=" + config.firstRun) mps_tools.run_checked(cmd, suppress_stderr=True) return os.path.abspath(outputpath)
if fire_htcondor: job_submit_file = write_HTCondor_submit_file_pede( Path, scriptPath, mergeCfg, lib) # make a backup copy of the cfg backupCfgPath = os.path.join(Path, mergeCfg + ".bak") cfgPath = os.path.join(Path, mergeCfg) if not os.path.isfile(backupCfgPath): os.system('cp -p ' + cfgPath + ' ' + backupCfgPath) # retrieve weights configuration with open(os.path.join(Path, ".weights.pkl"), "rb") as f: weight_conf = cPickle.load(f) # blank weights mps_tools.run_checked(["mps_weight.pl", "-c"]) # apply weights for name, weight in weight_conf: print(" ".join(["mps_weight.pl", "-N", name, weight])) mps_tools.run_checked( ["mps_weight.pl", "-N", name, weight]) # rewrite the mergeCfg using only 'OK' jobs (uses first mille-job as baseconfig) inCfgPath = theJobData + '/' + lib.JOBDIR[0] + '/the.py' command = 'mps_merge.py -w -c ' + inCfgPath + ' ' + Path + '/' + mergeCfg + ' ' + Path + ' ' + str( lib.nJobs) os.system(command) # rewrite theScript.sh using inly 'OK' jobs command = 'mps_scriptm.pl -c ' + lib.mergeScript + ' ' + scriptPath + ' ' + Path + ' ' + mergeCfg + ' ' + str(
cmd = [ "mps_splice.py", args.config_template, "jobData/{}/theSplit".format(jobdir), "jobData/{}/the.py".format(jobdir), theIsn ] if args.max_events is not None: chunk_size = int(args.max_events / args.n_jobs) event_options = ["--skip-events", str(chunk_size * (j - 1))] max_events = ( args.max_events - (args.n_jobs - 1) * chunk_size if j == args.n_jobs # last job gets the remaining events else chunk_size) event_options.extend(["--max-events", str(max_events)]) cmd.extend(event_options) print(" ".join(cmd)) mps_tools.run_checked(cmd) # create the run script print( "mps_script.pl {} jobData/{}/theScript.sh {}/{} the.py jobData/{}/theSplit {} {} {}" .format(args.batch_script, jobdir, theJobData, jobdir, jobdir, theIsn, args.mss_dir, lib.mssDirPool)) mps_tools.run_checked([ "mps_script.pl", args.batch_script, "jobData/{}/theScript.sh".format(jobdir), os.path.join(theJobData, jobdir), "the.py", "jobData/{}/theSplit".format(jobdir), theIsn, args.mss_dir, lib.mssDirPool ]) # create the merge job entry. This is always done. Whether it is used depends on the "merge" option.
if fire_htcondor: job_submit_file = write_HTCondor_submit_file(Path, scriptPath, mergeCfg, lib) # make a backup copy of the cfg backupCfgPath = os.path.join(Path, mergeCfg+".bak") cfgPath = os.path.join(Path, mergeCfg) if not os.path.isfile(backupCfgPath): os.system('cp -p '+cfgPath+' '+backupCfgPath) # retrieve weights configuration with open(os.path.join(Path, ".weights.pkl"), "rb") as f: weight_conf = cPickle.load(f) # blank weights mps_tools.run_checked(["mps_weight.pl", "-c"]) # apply weights for name,weight in weight_conf: print(" ".join(["mps_weight.pl", "-N", name, weight])) mps_tools.run_checked(["mps_weight.pl", "-N", name, weight]) # rewrite the mergeCfg using only 'OK' jobs (uses first mille-job as baseconfig) inCfgPath = theJobData+'/'+lib.JOBDIR[0]+'/the.py' command ='mps_merge.py -w -c '+inCfgPath+' '+Path+'/'+mergeCfg+' '+Path+' '+str(lib.nJobs) os.system(command) # rewrite theScript.sh using inly 'OK' jobs command = 'mps_scriptm.pl -c '+lib.mergeScript+' '+scriptPath+' '+Path+' '+mergeCfg+' '+str(lib.nJobs)+' '+lib.mssDir+' '+lib.mssDirPool os.system(command)
theIsn = "{0:03d}".format(i) # create the cfg file cmd = ["mps_splice.py", args.config_template, "jobData/{}/theSplit".format(jobdir), "jobData/{}/the.py".format(jobdir), theIsn] if args.max_events is not None: chunk_size = int(args.max_events/args.n_jobs) event_options = ["--skip-events", str(chunk_size*(j-1))] max_events = (args.max_events - (args.n_jobs-1)*chunk_size if j == args.n_jobs # last job gets the remaining events else chunk_size) event_options.extend(["--max-events", str(max_events)]) cmd.extend(event_options) print " ".join(cmd) mps_tools.run_checked(cmd) # create the run script print "mps_script.pl {} jobData/{}/theScript.sh {}/{} the.py jobData/{}/theSplit {} {} {}".format(args.batch_script, jobdir, theJobData, jobdir, jobdir, theIsn, args.mss_dir, lib.mssDirPool) mps_tools.run_checked(["mps_script.pl", args.batch_script, "jobData/{}/theScript.sh".format(jobdir), os.path.join(theJobData, jobdir), "the.py", "jobData/{}/theSplit".format(jobdir), theIsn, args.mss_dir, lib.mssDirPool]) # create the merge job entry. This is always done. Whether it is used depends on the "merge" option. jobdir = "jobm"; lib.JOBDIR.append(jobdir) lib.JOBID.append("") lib.JOBSTATUS.append("SETUP")