def main(): # Try to handle <vector<vector float> > structures in ROOT ROOT.gROOT.ProcessLine('.L Loader.C+') # input (grid specific list: file1,file2,file3,etc....) inputFiles = sys.argv[1].split(',') if len(sys.argv) > 2: if int(sys.argv[2]) > 0 and int(sys.argv[2]): jobs = int(sys.argv[2]) else: print "[WARNING] Ilegal number of processes (must be larger then 1 and less then the number of input files) specified, using single threaded analysis." jobs = 1 else: print "[INFORMATION] No number of processes specified using available on system, using all available CPUs." jobs = 0 printBanner({"jobs" : jobs, "ninputfiles" : len(inputFiles)}) runner = JobHandler(output=OutputFileName, DisplayResult = InteractivePlots, SaveResult = SaveToFile) runner.addJob(analysis) for filename in inputFiles: runner.addInputFile(filename) runner.executeJobs(jobs) print 90*"="
def localStart(self, job): # Try to handle <vector<vector float> > structures in ROOT ROOT.gROOT.ProcessLine(".L GluinoAnalysis/Loader.C+") # input (grid specific list: file1,file2,file3,etc....) inputFiles = self.filestring.split(",") if self.processors > 0: jobs = self.processors else: print "[INFORMATION] No number of processes specified using available on system, using all available CPUs." jobs = -1 # Initialize the Job handler runner = JobHandler( output=self.OutputFileName, DisplayResult=self.InteractivePlots, SaveResult=self.SaveToFile, joboptions=self.joboptions, ) if jobs == -1: # Print a better number then zero jobs = runner.getCPUs() self.printBanner({"jobs": jobs, "ninputfiles": len(inputFiles)}) # Add the analysis job to the queue (which is redundant at the moment... [remove?]) runner.addJob(job) for filename in inputFiles: runner.addInputFile(filename) # runner.prepareSubmission() # Execute! runner.executeJobs(jobs) print 90 * "="
def main(self, job, config): # Try to handle <vector<vector float> > structures in ROOT ROOT.gROOT.ProcessLine('.L workdir/GluinoAnalysis/Loader.C+') # # Initialize the Job handler runner = JobHandler(output="", DisplayResult = False, SaveResult = False, joboptions = config["joboptions"]) # # Add the analysis job to the queue (which is redundant at the moment... [remove?]) runner.addJob(job) for filename in config["input"]: runner.addInputFile(filename) print "[INFO] Ready to run GluinoAnalysisWorker" # Execute! out = runner.executeJobs(config["maxcores"]) print "[INFO] Job done, sending output to client..." return out
from job import Job from jobhandler import JobHandler import logging from logging import config as logconfig import yaml with open('log.yaml', 'r') as f: config = yaml.safe_load(f.read()) logconfig.dictConfig(config) jh = JobHandler.fromYaml("jobs2.yaml") jh.run()