def WriteFillWithJetContainer(part, file, rank, status): # If PTrank, no fill if part.PTrank != 0: return # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Put jet if part.particle.Find(21): file.write(' '+container+\ '.push_back(&(event.rec()->jets()[i]));\n') return # Put b jet if part.particle.Find(5): file.write(' if (event.rec()->jets()[i].btag()) '+\ container+'.push_back(&(event.rec()->jets()[i]));\n') # Put nb jet if part.particle.Find(1): file.write(' if (!event.rec()->jets()[i].btag()) '+\ container+'.push_back(&(event.rec()->jets()[i]));\n')
def WriteFillWithMuonContainer(part, file, rank, status): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Put negative muon if part.particle.Find(13): file.write(' if (event.rec()->muons()[i].charge()<0) '+\ container+'.push_back(&(event.rec()->muons()[i]));\n') # Put positive muon if part.particle.Find(-13): file.write(' if (event.rec()->muons()[i].charge()>0) '+\ container+'.push_back(&(event.rec()->muons()[i]));\n') # Put isolated negative muon if part.particle.Find(130): file.write(' if ( (event.rec()->muons()[i].charge()<0) &&'+\ ' PHYSICS->IsIsolatedMuon(event.rec()->muons()[i],event.rec()) ) '+\ container+'.push_back(&(event.rec()->muons()[i]));\n') # Put isolated positive muon if part.particle.Find(-130): file.write(' if ( (event.rec()->muons()[i].charge()>0) &&'+\ ' PHYSICS->IsIsolatedMuon(event.rec()->muons()[i],event.rec()) ) '+\ container+'.push_back(&(event.rec()->muons()[i]));\n')
def WriteParticle(file, part, rank, status, regions, level): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)): return # Getting new name newname = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) if level in [MA5RunningType.PARTON, MA5RunningType.HADRON]: # Creating new container file.write(" std::vector<const MCParticleFormat*> " +\ newname + ";\n") # Creating function for filling container if part.PTrank == 0: WriteParticle2(file, part, rank, status) else: # Creating new container file.write(" std::vector<const RecParticleFormat*> " +\ newname + ";\n")
def WriteParticle2(file, part, rank, status): # Skipping if already defined if InstanceName.Find(part.name + rank + status): return # Getting new name newname = InstanceName.Get(part.name + rank + status) # Do mother before if part.mumType != "": WriteParticle2(file, part.mumPart, rank, 'allstate') # Identifier function file.write(' bool isP_'+newname+\ '(const MCParticleFormat* part) const {\n') # Null pointer file.write(' if ( part==0 ) return false;\n') # FinalSate if status == "finalstate": file.write( " if ( !PHYSICS->Id->IsFinalState(part) ) return false;\n") elif status == "initialstate": file.write( " if ( !PHYSICS->Id->IsInitialState(part) ) return false;\n") elif status == "interstate": file.write( " if ( !PHYSICS->Id->IsInterState(part) ) return false;\n") # Id file.write(' if ( ') variables = [] for item in part.particle.ids: variables.append('(part->pdgid()!=' + str(item) + ')') file.write('&&'.join(variables)) file.write(' ) return false;\n') # Mother if part.mumType != "": mumname = InstanceName.Get(part.mumPart.name + rank + 'allstate') if part.mumType == "<": file.write(' if ( !isP_' + mumname +\ '(part->mother1()) ) return false;\n') elif part.mumType == "<<": file.write(' const MCParticleFormat* cand = part;\n') file.write(' bool success=false;\n') file.write(' while(cand->mother1()!=0)\n') file.write(' {\n') file.write(' if ( isP_' + mumname +\ '(cand->mother1()) ) {success=true;break;}\n') file.write(' cand = cand->mother1();\n') file.write(' }\n') file.write(' if (!success) return false;\n') # PT rank # return file.write(' return true; }\n')
def WriteAvoidRedundancies(file,iabs,ihisto,combi1,combi2,main,iterator1,iterator2): # Shortcut histo = main.selection[iabs] obs = main.selection[iabs].observable allmode1 = ( len(combi1)==1 and combi1.ALL ) allmode2 = ( len(combi2)==1 and combi2.ALL ) # Getting container name containers1=[] for item in combi1: containers1.append(InstanceName.Get('P_'+\ item.name+\ histo.rank+\ histo.statuscode)) # Getting container name containers2=[] for item in combi2: containers2.append(InstanceName.Get('P_'+\ item.name+\ histo.rank+\ histo.statuscode)) # Case of one particle/multiparticle if len(combi1)==1 and len(combi2)==1: file.write(' if ( '+containers1[0]+'['+iterator1+'[0]] == '+\ containers2[0]+'['+iterator2+'[0]] ) continue;\n')
def WriteJobSum2N(file,iabs,combi1,combi2,main,tagName,tagIndex,condition,iterator1,iterator2): cut = main.selection[iabs] obs = condition.observable # Getting container name containers1=[] for item in combi1: containers1.append(InstanceName.Get('P_'+\ item.name+cut.rank+cut.statuscode)) containers2=[] for item in combi2: containers2.append(InstanceName.Get('P_'+\ item.name+cut.rank+cut.statuscode)) # Case of one particle/multiparticle if len(combi1)==1 and len(combi2)==1: file.write(' if (') file.write(containers1[0]+'['+iterator1+'[0]]->' +\ 'dr('+containers2[0]+'['+iterator2+'[0]])' +\ OperatorType.convert2cpp(condition.operator) +\ str(condition.threshold) +\ ') {'+tagName+'['+str(tagIndex)+']=true; break;}\n') return # Operation : sum or diff if obs.combination in [CombinationType.SUMSCALAR,\ CombinationType.SUMVECTOR,\ CombinationType.DEFAULT]: oper_string = '+' else: oper_string = '-' # Vector sum/diff if obs.combination in [CombinationType.DEFAULT,\ CombinationType.SUMVECTOR,\ CombinationType.DIFFVECTOR]: # First part file.write(' ParticleBaseFormat q1;\n') for ind in range(0,len(combi1)): file.write(' q1'+oper_string+'='+\ containers1[ind]+'[+'+iterator1+'['+str(ind)+']]->'+\ 'momentum();\n') # Second part file.write(' ParticleBaseFormat q2;\n') for ind in range(0,len(combi2)): file.write(' q2'+oper_string+'='+\ containers2[ind]+'['+iterator2+'['+str(ind)+']]->'+\ 'momentum();\n') # Result file.write(' if (q1.dr(q2)'+\ ''+ OperatorType.convert2cpp(condition.operator) + \ str(condition.threshold) + \ ') {'+tagName+'['+str(tagIndex)+']=true; break;}\n')
def WriteCleanContainer(part, file, rank, status): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Getting id name id = 'isP_' + InstanceName.Get(part.name + rank + status) file.write(' ' + container + '.clear();\n')
def WriteJobRank(part, file, rank, status): if part.PTrank == 0: return # Skipping if already defined if InstanceName.Find("PTRANK_" + part.name + rank + status): return container = InstanceName.Get('P_' + part.name + rank + status) file.write(' // Sorting particle collection according to ' + rank + '\n') file.write(' // for getting ' + str(part.PTrank) + 'th particle\n') file.write(' PHYSICS->rankFilter(' + container + ',' + str(part.PTrank) + ',' + rank + ');\n\n')
def WriteFillContainer(part, file, rank, status): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Getting id name id = 'isP_' + InstanceName.Get(part.name + rank + status) file.write(' if ('+id+'((&(event.mc()->particles()[i])))) ' +\ container + '.push_back(&(event.mc()->particles()[i]));\n')
def Open(self): # Checking input dir if not os.path.isdir(self.input_path): logging.error("no directory denoted by '" + self.input_path + "' found.") return False # Creating list of ROOT files for ind in range(0, len(self.main.datasets)): name = InstanceName.Get(self.main.datasets[ind].name) self.files.append( RootFileReader( os.path.normpath(self.input_path + "/root/" + name + ".root"))) # Trying to open each ROOT files for ind in range(0, len(self.files)): if not self.files[ind].Open(): for i in range(0, ind): self.files[i].Close() return False # Good end return True
def WriteExecute(file, main, part_list): # Function header file.write('bool user::Execute(SampleFormat& sample, ' +\ 'const EventFormat& event)\n{\n') # Getting the event weight file.write(' MAfloat32 __event_weight__ = 1.0;\n') file.write(' if (weighted_events_ && event.mc()!=0) ' +\ '__event_weight__ = event.mc()->weight();\n\n') file.write( ' if (sample.mc()!=0) sample.mc()->addWeightedEvents(__event_weight__);\n' ) # Reseting instance name InstanceName.Clear() # Clearing and filling containers WriteContainer(file, main, part_list) # Writing each step of the selection WriteSelection(file, main, part_list) # End file.write(' return true;\n') file.write('}\n\n')
def WriteJobSameCombi(file, iabs, icut, combination, redundancies, main, iterator='ind'): if len(combination) == 1 or not redundancies: return cut = main.selection[iabs] # Getting container name containers = [] for item in combination: containers.append(InstanceName.Get('P_'+\ item.name+cut.rank+cut.statuscode)) file.write('\n // Checking if consistent combination\n') if main.mode in [MA5RunningType.PARTON, MA5RunningType.HADRON]: file.write(' std::set<const MCParticleFormat*> mycombi;\n') else: file.write(' std::set<const RecParticleFormat*> mycombi;\n') file.write(' for (MAuint32 i=0;i<' + str(len(combination)) + ';i++)\n') file.write(' {\n') for i in range(0, len(combination)): file.write(' mycombi.insert(' + containers[i] + '[' + iterator + '[i]]);\n') file.write(' }\n') file.write(' MAbool matched=false;\n') file.write(' for (MAuint32 i=0;i<combis.size();i++)\n') file.write(' if (combis[i]==mycombi) {matched=true; break;}\n') file.write(' if (matched) continue;\n') file.write(' else combis.push_back(mycombi);\n\n')
def CheckFile(self,dataset): name=InstanceName.Get(dataset.name) if os.path.isfile(self.safdir+"/"+name+"/MadAnalysis5job.saf"): return True else: logging.error("File called '"+self.safdir+"/"+name+".saf' is not found.") return False
def Open(self): # Checking input dir if not os.path.isdir(self.input_path): logging.error("no directory denoted by '" + self.input_path + "' found.") return False # Creating list of ROOT files for ind in range(0, len(self.main.datasets)): name = InstanceName.Get(self.main.datasets[ind].name) self.files.append( RootFileReader( os.path.normpath(self.input_path + "/root/" + name + ".root"))) # Trying to open each ROOT files for ind in range(0, len(self.files)): if not self.files[ind].Open(): for i in range(0, ind): self.files[i].Close() return False # Creating production directory if not FolderWriter.CreateDirectory(self.output_path, True): return False # Creating cut flow for each file for ind in range(0, len(self.files)): self.cutflow.append(CutFlow(self.main.datasets[ind],\ self.main.selection,\ self.main.lumi, self.main)) # Good end return True
def GetPlotNames(self, mode, output_path): allnames = [] # Loop on each dataset for mydataset in range(0, len(self.main.datasets)): datasetname = InstanceName.Get(self.main.datasets[mydataset].name) names = [] # Loop over DJR for i in range(0, 100): name = "DJR" + str(i + 1) + "_total" test = False for h in range(len(self.detail[mydataset])): if self.detail[mydataset][h].name == name: test = True break if test: names.append(output_path+"/merging_" +\ datasetname+"_"+str(i+1))#+"." +\ # ReportFormatType.convert2filetype(mode)) allnames.append(names) return allnames
def RunJob(self, dataset): # Getting the dataset name name = InstanceName.Get(dataset.name) # Creating Output folder is not defined if not os.path.isdir(self.path + "/Output/" + name): os.mkdir(self.path + "/Output/" + name) # folder where the program is launched folder = self.path + '/Build/' # shell command commands = ['./MadAnalysis5job'] # Weighted events if not dataset.weighted_events: commands.append('--no_event_weight') # Release commands.append('--ma5_version="'+\ self.main.archi_info.ma5_version+';'+\ self.main.archi_info.ma5_date+'"') # Inputs commands.append('../Input/' + name + '.list') # Running SampleAnalyzer if self.main.redirectSAlogger: result = ShellCommand.ExecuteWithMA5Logging(commands, folder) else: result = ShellCommand.Execute(commands, folder) return result
def WriteDatasetList(self, dataset): name = InstanceName.Get(dataset.name) file = open(self.path + "/Input/" + name + ".list", "w") for item in dataset: file.write(item) file.write("\n") file.close()
def Open(self): if not self.resubmit: InstanceName.Clear() return FolderWriter.CreateDirectory(self.path, question=True) else: recast = (self.main.recasting.status == "on") return self.CheckJobStructure(recast)
def WriteFillWithPhotonContainer(part, file, rank, status): # If PTrank, no fill if part.PTrank != 0: return # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Put photon if part.particle.Find(22): file.write(' ' + container + '.push_back(&(event.rec()->photons()[i]));\n')
def WriteFillWithMHTContainerMC(part, file, rank, status): # If PTrank, no fill if part.PTrank != 0: return # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Put MHT if part.particle.Find(99): file.write(' '+container+\ '.push_back(&(event.mc()->MHT()));\n')
def CheckRootFile(self, dataset): name = InstanceName.Get(dataset.name) if os.path.isfile(self.rootdir + "/" + name + ".root"): return True else: logging.error("File called '" + self.rootdir + "/" + name + ".root' is not found.") return False
def CheckFile(self, dataset): name = InstanceName.Get(dataset.name) if os.path.isfile(self.safdir + "/" + name + "/" + name + ".saf"): return True else: logging.getLogger('MA5').error("File called '" + self.safdir + "/" + name + '/' + name + ".saf' is not found.") return False
def WriteFillWithTauContainer(part, file, rank, status): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status) # Put negative tau if part.particle.Find(15): file.write(' if (event.rec()->taus()[i].charge()<0) '+\ container+'.push_back(&(event.rec()->taus()[i]));\n') # Put positive tau if part.particle.Find(-15): file.write(' if (event.rec()->taus()[i].charge()>0) '+\ container+'.push_back(&(event.rec()->taus()[i]));\n')
def ExtractDatasetInfo(self, dataset): from ROOT import TFile name = InstanceName.Get(dataset.name) filename = self.rootdir + "/" + name + ".root" rootfile = TFile(filename) if rootfile.IsZombie(): logging.error("file called '" + self.rootdir + "/" + name + ".root is not found") return # Getting data from ROOT file xsections = rootfile.Get("general/xsections") if not bool(xsections): ErrorMsg_BranchNotFound('general/xsections', filename) return xerrors = rootfile.Get("general/xerrors") if not bool(xerrors): ErrorMsg_BranchNotFound('general/xerrors', filename) return nevents = rootfile.Get("general/nevents") if not bool(nevents): ErrorMsg_BranchNotFound('general/nevents', filename) return # Checking indices if xsections.GetNoElements() is 0: ErrorMsg_BranchEmpty("branch 'general/xsections' is empty") return if xerrors.GetNoElements() is 0: ErrorMsg_BranchEmpty("branch 'general/xerrors' is empty") return if nevents.GetNoElements() is 0: ErrorMsg_BranchEmpty("branch 'general/nevents' is empty") return if xsections.GetNoElements()!=xerrors.GetNoElements() or \ xerrors.GetNoElements()!=nevents.GetNoElements() or \ nevents.GetNoElements()!=xsections.GetNoElements(): logging.error("the 'general' branches have different size "\ "in the file '"+filename+"'") return if xsections.GetNoElements() is not (len(dataset) + 1): logging.error( "number of data files do not correspond in the file '" + filename + "'") return # Extracting data dataset.measured_xsection = xsections[xsections.GetNoElements() - 1] dataset.measured_xerror = xerrors[xerrors.GetNoElements() - 1] dataset.measured_n = int(nevents[nevents.GetNoElements() - 1])
def WriteDatasetList(self, dataset): if not os.path.isdir(self.path + "/lists"): os.mkdir(self.path + "/lists") name = InstanceName.Get(dataset.name) file = open(self.path + "/lists/" + name + ".list", "w") for item in dataset: file.write(item) file.write("\n") file.close()
def WriteJobRank(part, file, rank, status, regions): if part.PTrank == 0: return # Skipping if already defined if InstanceName.Find("PTRANK_" + part.name + rank + status): return container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) refpart = copy.copy(part) refpart.PTrank = 0 newcontainer = InstanceName.Get('P_' + refpart.name + 'PTordering' + status + '_REG_' + '_'.join(regions)) file.write(' // Sorting particle collection according to ' + rank + '\n') file.write(' // for getting ' + str(part.PTrank) + 'th particle\n') file.write(' '+container+'=SORTER->rankFilter('+\ newcontainer+','+str(part.PTrank)+','+rank+');\n\n')
def RunJob(self, dataset): if not os.path.isdir(self.path + "/root"): os.mkdir(self.path + "/root") name = InstanceName.Get(dataset.name) res=os.system('cd '\ +self.path+'/root;'\ +' ../SampleAnalyzer/'\ +'SampleAnalyzer --analysis=MadAnalysis5job ../lists/'+ name+'.list') return True
def WriteFillContainer(part, file, rank, status, regions): # If PTrank, no fill if part.PTrank != 0: return # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)): return # Getting container name container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) # Getting id name id = 'isP_' + InstanceName.Get(part.name + rank + status) file.write(' if ('+id+'((&(event.mc()->particles()[i])))) ' +\ container + '.push_back(&(event.mc()->particles()[i]));\n')
def DrawDatasetPlots(self, histos, dataset, histo_path, modes, output_paths, rootfiles): # Loop over DJR for i in range(0, 100): DJRplots = [] # Looking for global plot name = "DJR" + str(i + 1) + "_total" test = False for h in range(len(histos)): if histos[h].name == name: DJRplots.append(histos[h]) test = True break # Global plot not found ? if not test: break # Loop over njets for j in range(0, 100): # Looking for njet plot name = "DJR" + str(i + 1) + "_" + str(j) + "jet" test = False for h in range(len(histos)): if histos[h].name == name: DJRplots.append(histos[h]) test = True break # njet plot not found ? if not test: break # Save the canvas in the report format datasetname = InstanceName.Get(dataset.name) index = i + 1 filenameC = histo_path+"/merging_" +\ datasetname+"_"+str(index) rootfiles.append(filenameC) filenameC += '.C' output_files = [] for iout in range(0, len(output_paths)): output_files.append(output_paths[iout]+\ "/merging_" +\ datasetname+"_"+str(index)+"." +\ ReportFormatType.convert2filetype(modes[iout])) # Drawing self.DrawROOT(DJRplots, dataset, filenameC, output_files, index)
def WriteParticle(file, part, rank, status, level): # Skipping if already defined if InstanceName.Find('P_' + part.name + rank + status): return # Getting new name newname = InstanceName.Get('P_' + part.name + rank + status) if level in [MA5RunningType.PARTON, MA5RunningType.HADRON]: # Creating new container file.write(" std::vector<const MCParticleFormat*> " +\ newname + ";\n") WriteParticle2(file, part, rank, status) else: # Creating new container file.write(" std::vector<const RecParticleFormat*> " +\ newname + ";\n")