def __apply_configuration__(self): #GaudiKernel.ProcessJobOptions.PrintOff() from GaudiKernel.Configurable import ConfigurableGeneric as RFileCnv RFileCnv('RFileCnv').GlobalCompression = "LZMA:6" ############## Set other properties ########### self._safeSet( LHCbApp(), ['EvtMax','SkipEvents','Simulation', 'DataType' , 'CondDBtag','DDDBtag'] ) ApplicationMgr().AppName="Tesla, utilising DaVinci" # if self.getProp('Mode') is "Online": self.setProp('WriteFSR',True) self._configureForOnline() else: DecodeRawEvent().DataOnDemand=True RecombineRawEvent() if self.getProp('Simulation')==True: self._unpackMC() # self._configureOutput() # from Configurables import EventSelector EventSelector().PrintFreq = 1000 # Add monitors if they are there if len(self.getProp('Monitors'))>0: self._configureHistos() if self.getProp('KillInputTurbo'): enk = EventNodeKiller('KillTurbo') enk.Nodes = [ "Turbo" ] ApplicationMgr().TopAlg.insert( 0, enk.getFullName() ) ApplicationMgr().TopAlg+=[self.teslaSeq]
def stagelocally(): print "Warning: staging files locally" # first create the target directory, if it doesn't exist yet import re, os, getpass username = getpass.getuser() targetdir = '/tmp/%s/stagedfiles/' % username if os.path.isdir('/pool/spool/'): targetdir = '/pool/spool/%s/stagedfiles/' % username if not os.path.isdir(targetdir): os.makedirs(targetdir) # remove any old files in that directory os.system("find %s -atime +7 -exec rm '{}' \;" % targetdir) from Configurables import EventSelector eventinput = EventSelector().Input neweventinput = [] for i in eventinput: # need some pattern matching ... x = re.match("DATAFILE='([^']*)'", i) if not x: trouble #for j in x.groups() : print j fullpath = x.group(1) #print 'fullfilename:', fullpath #extract the filename itself, without the path filename = fullpath[fullpath.rfind("/") + 1:] targetfilename = targetdir + filename issuccess = True if not os.path.exists(targetfilename): #if filename contains castor, use rfcp if fullpath.find('castor') >= 0: castorfile = fullpath[fullpath.find("/"):] print "copying file %s from castor\n" % castorfile os.system('rfcp %s %s' % (castorfile, targetfilename)) elif fullpath.find('LFN:') >= 0: lfnfile = fullpath.replace("LFN", "lfn") print "copying file %s from grid\n" % lfnfile os.system("lcg-cp % s %s " % (lfnfile, targetfilename)) else: print 'cannot cache file locally: %s\n' % fullpath issuccess = False # now fix the input file if issuccess: i = i.replace(fullpath, targetfilename) neweventinput.append(i) EventSelector().Input = neweventinput
# ../options/Moore.py # import Gaudi.Configuration from Moore.Configuration import Moore # if you want to generate a configuration, uncomment the following lines: #Moore().generateConfig = True #Moore().configLabel = 'Default' #Moore().ThresholdSettings = 'Commissioning_PassThrough' #Moore().configLabel = 'ODINRandom acc=0, TELL1Error acc=1' Moore().ThresholdSettings = 'Physics_September2012' Moore().EvtMax = 1000 from Configurables import EventSelector EventSelector().PrintFreq = 100 Moore().ForceSingleL0Configuration = False from PRConfig.TestFileDB import test_file_db input = test_file_db['2012_raw_default'] #input.filenames = [ '/data/bfys/graven/0x46/'+f.split('/')[-1] for f in input.filenames ]# what? Gerhard, don't add your own private file locations into a release thanks! input.run(configurable=Moore()) Moore().inputFiles = input.filenames Moore().Split = '' ##only for experts! from Configurables import MooreExpert #turn of TISTOS, "Gerhard's Sledgehammer" MooreExpert().Hlt2Independent=True
################################################################################ seq.Members += [tpl] DaVinci().appendToMainSequence([seq]) ################################################################################ # DaVinci # uDST config at the top DaVinci().Lumi = True DaVinci().DDDBtag = 'dddb-20130111' DaVinci().CondDBtag = 'cond-20130114' DaVinci().Simulation = simulation DaVinci().EvtMax = -1 DaVinci().SkipEvents = 0 DaVinci().DataType = '2012' DaVinci().PrintFreq = 1000 DaVinci().TupleFile = 'bdt_d_bbdt.root' ################################################################################ ################################################################################ # Run on some eos data located at CERN ################################################################################ from Configurables import EventSelector DaVinci().EvtMax = 10000 eos = 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/LHCb/Collision12/BHADRON.MDST' EventSelector().Input = [ 'PFN:{}/00020198/0000/00020198_00007143_1.bhadron.mdst'.format(eos), 'PFN:{}/00020198/0000/00020198_00007352_1.bhadron.mdst'.format(eos), 'PFN:{}/00020738/0000/00020738_00003969_1.bhadron.mdst'.format(eos), 'PFN:{}/00020456/0000/00020456_00001811_1.bhadron.mdst'.format(eos) ] ################################################################################
from Gaudi.Configuration import importOptions ## loop over command line arguments for a in sys.argv[1:] : fname = str(a) fname = os.path.expandvars ( fname ) fname = os.path.expandvars ( fname ) fname = os.path.expandvars ( fname ) fname = os.path.expanduser ( fname ) if not os.path.exists ( fname ) : continue if not os.path.isfile ( fname ) : continue ## import options from configuration files try : importOptions( fname ) except : pass ## from Configurables import EventSelector evtsel = EventSelector() flist = evtsel.Input print '## number of Input files %s' % len(flist) print flist # ============================================================================= # The END # =============================================================================
def main(): # Setup the option parser usage = "usage: %prog [options] inputfile <inputfile>" parser = optparse.OptionParser(usage=usage) parser.add_option("-d", "--datatype", action="store", dest="DataType", default="2015", help="DataType to run on.") parser.add_option("-n", "--evtmax", type="int", action="store", dest="EvtMax", default=10000, help="Number of events to run") parser.add_option("--dddbtag", action="store", dest="DDDBtag", default='MC09-20090602', help="DDDBTag to use") parser.add_option("--conddbtag", action="store", dest="CondDBtag", default='sim-20090402-vc-md100', help="CondDBtag to use") parser.add_option("--settings", action="store", dest="ThresholdSettings", default='Physics_25ns_September2015', help="ThresholdSettings to use") parser.add_option("-s", "--simulation", action="store_true", dest="Simulation", default=False, help="Run on simulated data") parser.add_option("--dbsnapshot", action="store_true", dest="UseDBSnapshot", default=False, help="Use a DB snapshot") parser.add_option("-v", "--verbose", action="store_true", dest="Verbose", default=False, help="Verbose output") parser.add_option("--rch", action="store_true", dest="RunChangeHandler", default=False, help="Use the RunChangeHandler") # Parse the arguments (options, args) = parser.parse_args() # Make sure there is data to run on # Put the options into the Moore configurable Moore().ThresholdSettings = options.ThresholdSettings #Moore().OutputLevel="VERBOSE" Moore().EvtMax = options.EvtMax #Moore().UseDBSnapshot = options.UseDBSnapshot # DEPRECATED from Configurables import CondDB CondDB().UseDBSnapshot = options.UseDBSnapshot # Moore().DDDBtag = options.DDDBtag Moore().CondDBtag = options.CondDBtag Moore().Simulation = options.Simulation Moore().DataType = options.DataType Moore().inputFiles = args EventSelector().PrintFreq = 100 # Instanciate the AppMgr to get the Hlt lines from their Sequences appMgr = AppMgr() hlt1Seq = Sequence("Hlt1") hlt1Lines = set() for m in hlt1Seq.Members: hlt1Lines.add(m.name()) hlt2Seq = Sequence("Hlt2") hlt2Lines = set() for m in hlt2Seq.Members: hlt2Lines.add(m.name()) # The AppMgr is no longer needed appMgr.exit() print "HLT1LINES" for line in hlt1Lines: print line print "HLT2LINES" for line in hlt2Lines: print line
, "TupleToolPid" ] decaytuple2.Decay = "[pi+]cc" ####################################################################### # # Configure the application # #remove the welcome message from Configurables import LoKiSvc LoKiSvc().Welcome = False from Configurables import EventSelector EventSelector().PrintFreq = 500 from Configurables import DaVinci DaVinci().TupleFile = "SmrdPions.root" DaVinci().EvtMax = 10 DaVinci().HistogramFile = "DVHistos.root" DaVinci().DataType = '2010' DaVinci().Simulation = True # MC Options DaVinci().DDDBtag = 'head-20101206' DaVinci().CondDBtag = "sim-20101210-vc-md100"
# Enable specialized branch caching: root.CacheBranches = [ '_Event.*', '_Event_R.*', '_Event_Rec.*', '_Event_Rec_R.*', '_Event_Rec_Header.*' '_Event_Rec_Header_R.*' ] root.VetoBranches = [ # '_Event_pRec_*', # '_Event_Semileptonic_*' ] EventPersistencySvc().CnvServices.append(root) EventSelector().Input = [ "DATA='PFN:"+input_file+"' SVC='Gaudi::RootEvtSelector'" ] EventSelector().PrintFreq = 1000 MessageSvc().OutputLevel = 3 def update(): statistic = {} test = open('/proc/self/io') io = test.readlines() test.close() for l in io: temp = l.split(':') statistic[temp[0]]=int(temp[1]) statistic['time']=time.time() return statistic def printDelta(s0,s1):
def main(): parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("--debug", action="store_true", dest="debug", default=False, help="Debug?") parser.add_option("-d", "--datatype", action="store", dest="DataType", default="2015", help="DataType to run on.") parser.add_option("-n", "--evtmax", type="int", action="store", dest="EvtMax", default=10000, help="Number of events to run") parser.add_option("--dddbtag", action="store", dest="DDDBtag", default='dddb-20150526', help="DDDBTag to use") parser.add_option("--conddbtag", action="store", dest="CondDBtag", default='cond-20150617', help="CondDBtag to use") parser.add_option("--settings", action="store", dest="ThresholdSettings", default='Physics_25ns_September2015', help="ThresholdSettings to use") parser.add_option("--TCK", action="store", dest="TCK", default='', help="HLT TCK. If unspecified,then run from settings") parser.add_option("--simulation", action="store_true", dest="Simulation", default=False, help="Run on simulated data") parser.add_option("--input_rate", action="store", dest="input_rate", default=1.e6, help="Input rate from L0 in Hz") parser.add_option("--tuplefile", action="store", dest="tuplefile", default="", help="Output root file") parser.add_option("--inputdata", action="store", dest="inputdata", default="Physics1600TestNode", help="Name of inputdata") # Parse the arguments (options, args) = parser.parse_args() #### configured from the arguments input_rate = options.input_rate Moore().EvtMax = options.EvtMax Moore().DDDBtag = options.DDDBtag Moore().CondDBtag = options.CondDBtag Moore().Simulation = options.Simulation Moore().DataType = options.DataType if options.TCK != "": Moore().UseTCK = True Moore().InitialTCK = options.TCK else: Moore().ThresholdSettings = options.ThresholdSettings Moore().UseTCK = False #### hard coded here Moore().ForceSingleL0Configuration = False Moore().OutputLevel = 6 Moore().RemoveInputHltRawBanks = True Moore().Simulation = False Moore().Split = "" Moore().CheckOdin = False from Configurables import CondDB CondDB().IgnoreHeartBeat = True CondDB().EnableRunChangeHandler = True EventSelector().PrintFreq = 100 #### configure raw data importOptions('$STDOPTS/DecodeRawEvent.py') EventPersistencySvc().CnvServices.append('LHCb::RawDataCnvSvc') from Configurables import DataOnDemandSvc DataOnDemandSvc().AlgMap['Hlt/DecReports'] = "HltDecReportsDecoder" from GaudiConf import IOHelper if options.inputdata == "Physics1600": IOHelper("MDF").inputFiles([ "mdf:root://eoslhcb.cern.ch//eos/lhcb/wg/HLT/BWdivData/Run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_0.mdf" ]) elif options.inputdata == "Physics1600TestNode": IOHelper("MDF").inputFiles([ "/localdisk/bw_division/run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_0.mdf", "/localdisk/bw_division/run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_1.mdf", "/localdisk/bw_division/run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_2.mdf", "/localdisk/bw_division/run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_3.mdf", "/localdisk/bw_division/run164440_L0Filtered_0x00A2_Mika/2015NB_25ns_0x00A2_4.mdf" ]) ### getting ready for the event loop gaudi = AppMgr(outputlevel=4) gaudi.ExtSvc += ['ToolSvc'] gaudi.ExtSvc.append('DataOnDemandSvc') gaudi.initialize() ### get the list of active line names Lines = {} for level in ["Hlt1", "Hlt2"]: Lines[level] = set() for m in Sequence(level).Members: Lines[level].add(m.name()) ### remove certain lines from the accounting remove = set( ["Hlt1MBNoBias", "Hlt2Transparent", "Hlt1Global", "Hlt2Global"]) Lines["Hlt1"] -= remove Lines["Hlt2"] -= remove ### this is what I would prefer to do... #appendPostConfigAction( setLines(Lines["Hlt1"],Lines["Hlt2"])) ### option to create a tuple with all of the decisions if options.tuplefile != "": from ROOT import (TTree, TFile) from array import array TF = TFile(options.tuplefile, "RECREATE") DecMaps = {} ## dicto of branches for the TTrees DecTrees = {} ## dicto of TTrees for level in ["Hlt1", "Hlt2"]: DecTrees[level] = TTree('Tuple%s' % level, 'Tuple%s' % level) DecMaps[level] = {} for l in Lines[level]: DecMaps[level][l] = array('i', [0]) DecTrees[level].Branch(l, DecMaps[level][l], '%sDecision/I' % l) ### this will be dictionary of lines and their counters for the rates line_stats = {} for line in Lines["Hlt1"].union(Lines["Hlt2"]).union( set(["Hlt1Global", "Hlt2Global"])): line_stats[line] = {"passed_incl": 0, "passed_excl": 0} ### counters for various regex stream_stats = { "Turbo": { "filter": "Hlt2.(?!.*?TurboCalib).*Turbo" }, "Turcal": { "filter": "Hlt2.*TurboCalib" }, "Full": { "filter": "Hlt2.(?!.*?Turbo).(?!.*?TurboCalib)" } } for k, v in stream_stats.iteritems(): v["pass_this_event"] = False v["passed"] = 0 print '*' * 100 print Lines print line_stats print stream_stats print '*' * 100 i = 0 processed = 0 #### start of the event loop while i < Moore().EvtMax: i += 1 # run the sequences on this event gaudi.run(1) processed += 1 if not gaudi.evtsvc()['Hlt1/DecReports']: break ### reset the stream counters for s in stream_stats.keys(): stream_stats[s]["pass_this_event"] = False ### this is a nasty hack since I can't seem to switch ### off some of the Hlt1 lines with a post config action ### so I have to do my own book-keeping to see if one ### of the good hlt1 lines fired. ### I only consider events in Hlt2 if they pass my subset ### of good hlt1 lines. PassMyGlobal = {"Hlt1": False, "Hlt2": False} # loop over levels for level in ["Hlt1", "Hlt2"]: if level == "Hlt2" and not PassMyGlobal["Hlt1"]: continue # do the dec reports exist? # for Hlt2 this check does the job of telling us whether the event passed Hlt1 if gaudi.evtsvc()['%s/DecReports' % level]: # get the dec reports reps = gaudi.evtsvc()['%s/DecReports' % level] # get the list of lines from the reports #lines = reps.decReports().keys() # counter for how many hlt lines fired in this event nPassed = 0 # loop over all lines for line in Lines[level]: # protection. why is this needed though? if not line + "Decision" in reps.decReports().keys(): print '%s not in %s' % (line, reps.decReports().keys()) continue # just check this once LINE_FIRED = reps.decReport(line + "Decision").decision() # my global counter if LINE_FIRED: PassMyGlobal[level] = True # does this event fire any lines that match my "streams"? if LINE_FIRED and level == "Hlt2" and not line == "Hlt2Global": # and not line in remove: for s in stream_stats.keys(): if re.match(stream_stats[s]["filter"], line, flags=0): stream_stats[s]["pass_this_event"] = True # set the variable to be stored in the tuple if options.tuplefile != "": if LINE_FIRED: DecMaps[level][line][0] = 1 else: DecMaps[level][line][0] = 0 # if this is the first fired event then # need to initialise the dictionary entry #if not line in line_stats.keys(): # line_stats[line] = {"passed_incl":0, # "passed_excl":0} # increment the counter for this line if LINE_FIRED: line_stats[line]["passed_incl"] += 1 if not "Global" in line: nPassed += 1 ### for the exclusives # my own global counter if PassMyGlobal[level]: line_stats["%sGlobal" % level]["passed_incl"] += 1 # now go back and count the number of exclusive fires of this line # just need to ignore HltXGlobal for line in Lines[level]: if not line + "Decision" in reps.decReports().keys(): continue # protection if reps.decReport(line + "Decision").decision() and nPassed == 1: if not "Global" in line: line_stats[line]["passed_excl"] += 1 # fill the tree if options.tuplefile != "": DecTrees[level].Fill() # stream accounting for s in stream_stats.keys(): if stream_stats[s]["pass_this_event"] == True: stream_stats[s]["passed"] += 1 # write the root file if options.tuplefile != "": TF.Write() # I was hoping that this would dump all of the gaudi related # printout before I print the summary that I'm interested in. # Apparently this doesn't work, but it's only really a cosmetic thing. #gaudi.finalize() sys.stdout.flush() ############################################# ###### print the summary tables ############# ############################################# for k, v in line_stats.iteritems(): v["processed"] = processed for k, v in stream_stats.iteritems(): v["processed"] = processed GlobalRates = {} print '-' * 100 print 'HLT rates summary starts here' print '-' * 100 #### print the global rates print 'removed lines: %s' % remove print 'processed: %s' % processed print '%s Hlt1Lines' % (len(Lines['Hlt1'])) print '%s Hlt2Lines' % (len(Lines['Hlt2'])) for level in ['Hlt1', 'Hlt2']: rate = getrate(1.e-3 * input_rate, line_stats["%sGlobal" % level]["passed_incl"], line_stats["%sGlobal" % level]["processed"]) print '%sGlobal rate = (%s+-%s)kHz' % (level, rate[0], rate[1]) ### print the stream rates for k, v in stream_stats.iteritems(): print '%s rate = %s kHz' % (k, v["passed"] / float(v["processed"])) #### order by inclusive rate for level in ['Hlt1', 'Hlt2']: iLine = -1 ## counter for line table_row("**", "*Line*", "*Incl.*", "*Excl.*") OrderedStats = {} for key, value in line_stats.iteritems(): if level in key: OrderedStats[key] = value["passed_incl"] for line_name, rate in sorted(OrderedStats.iteritems(), key=lambda (v, k): (k, v), reverse=True): iLine += 1 rate_incl = getrate(1.e-3 * input_rate, line_stats[line_name]["passed_incl"], line_stats[line_name]["processed"]) rate_excl = getrate(1.e-3 * input_rate, line_stats[line_name]["passed_excl"], line_stats[line_name]["processed"]) if "Global" in line_name: GlobalRates[level] = rate_incl else: table_row(iLine, line_name.replace("Decision", ""), "%.2f+-%.2f" % (rate_incl[0], rate_incl[1]), "%.2f+-%.2f" % (rate_excl[0], rate_excl[1])) ## do we pass the test?? MaxRates = {"Hlt1": 400., "Hlt2": 40.} for level in ['Hlt1', 'Hlt2']: if GlobalRates[level][0] > MaxRates[level]: print '%s rate = %s > max = %s' % (level, GlobalRates[level][0], MaxRates[level]) elif GlobalRates[level][0] == 0: print '%s rate is zero' % level else: print '%s rates OK' % level print '-' * 100 print 'HLT rates summary ends here' print '-' * 100 sys.stdout.flush()
mctuple = MCDecayTreeTuple("MCDecayTreeTuple") mctuple.Decay = decay mctuple.ToolList = [ "MCTupleToolHierarchy", "LoKi::Hybrid::MCTupleTool/LoKi_Photos" ] # Add a 'number of photons' branch #mctuple.addTupleTool("MCTupleToolReconstructed").Associate = False #mctuple.MCTupleToolReconstructed.FillPID = False mctuple.addTupleTool("MCTupleToolKinematic").Verbose = True mctuple.addTupleTool("LoKi::Hybrid::TupleTool/LoKi_Photos").Variables = { "nPhotos": "MCNINTREE(('gamma' == MCABSID))" } mctuple.addTupleTool("MCTupleToolP2VV").Calculator = 'MCBs2JpsiPhiAngleCalculator' # Print the decay tree for any particle in decay_heads printMC = PrintMCTree() printMC.ParticleNames = decay_heads # Name of the .xgen file produced by Gauss EventSelector().Input = ["DATAFILE='{0}' TYP='POOL_ROOTTREE' Opt='READ'".format(datafile) for datafile in datafiles] # Configure DaVinci DaVinci().PrintFreq = 10000 DaVinci().TupleFile = "/Disk/speyside8/lhcb/gcowan1/generation/Bs2JpsiPhi/DVntuple.root" DaVinci().Simulation = True DaVinci().Lumi = False DaVinci().DataType = str(year) #DaVinci().UserAlgorithms = [printMC, mctuple] DaVinci().UserAlgorithms = [mctuple]
def main(): # Setup the option parser usage = "usage: %prog [options] inputfile <inputfile>" parser = optparse.OptionParser(usage=usage) parser.add_option("-d", "--datatype", action="store", dest="DataType", default="2009", help="DataType to run on.") parser.add_option("-n", "--evtmax", type="int", action="store", dest="EvtMax", default=1e4, help="Number of events to run") parser.add_option("--dddbtag", action="store", dest="DDDBtag", default='MC09-20090602', help="DDDBTag to use") parser.add_option("--conddbtag", action="store", dest="CondDBtag", default='sim-20090402-vc-md100', help="CondDBtag to use") parser.add_option("--settings", action="store", dest="ThresholdSettings", default='Physics_10000Vis_1000L0_40Hlt1_Apr09', help="ThresholdSettings to use") parser.add_option("-s", "--simulation", action="store_true", dest="Simulation", default=False, help="Run on simulated data") parser.add_option("--dbsnapshot", action="store_true", dest="UseDBSnapshot", default=False, help="Use a DB snapshot") parser.add_option("--snd", action="store", dest="SnapshotDirectory", default='/user/graven/MOORE/conditions', type="string", help="DB Snapshot directory") parser.add_option("--oracle", action="store_true", dest="UseOracle", default=False, help="Use Oracle") parser.add_option("-v", "--verbose", action="store_true", dest="Verbose", default=False, help="Verbose output") parser.add_option("--acceptslow", action="store_true", dest="AcceptIfSlow", default=False, help="Accept slow events") parser.add_option("--hlt1lines", action="store", dest="Hlt1Lines", default="", help="Colon seperated list of additional hlt1 lines") parser.add_option("--hlt2lines", action="store", dest="Hlt2Lines", default="", help="Colon seperated list of additional hlt2 lines") parser.add_option("--rch", action="store_true", dest="RunChangeHandler", default=False, help="Use the RunChangeHandler") parser.add_option("--l0", action="store_true", dest="L0", default=False, help="Rerun L0") parser.add_option("--site", action="store", type="string", dest="Site", default="", help="Site at which we run") parser.add_option("--tempdir", action="store", type="string", dest="Tempdir", default="/tmpdir", help="Tempdir for the filestager") parser.add_option("--tuplefile", action="store", type="string", dest="TupleFile", default="tuples.root", help="NTuple filename") parser.add_option("-f", "--filestager", action="store_true", dest="FileStager", default=False, help="Use the filestager") parser.add_option( "-c", "--verbose_classes", action="store", type="string", dest="VerboseClasses", default="", help="Colon seperated list of classes to be made verbose.") # Parse the command line arguments (options, args) = parser.parse_args() # Put the options into the Moore configurable Moore().ThresholdSettings = options.ThresholdSettings Moore().Verbose = options.Verbose Moore().EvtMax = options.EvtMax Moore().UseDBSnapshot = options.UseDBSnapshot Moore().DBSnapshotDirectory = options.SnapshotDirectory Moore().DDDBtag = options.DDDBtag Moore().CondDBtag = options.CondDBtag Moore().Simulation = options.Simulation Moore().DataType = options.DataType Moore().EnableAcceptIfSlow = options.AcceptIfSlow Moore().outputFile = "" Moore().ForceSingleL0Configuration = False Moore().RequireRoutingBits = [0x0, 0x4, 0x0] Moore().L0 = options.L0 Moore().ReplaceL0BanksWithEmulated = options.L0 if options.UseOracle: CondDB().UseOracle = True site = 'UNKNOWN' try: site = os.environ['DIRACSITE'] except KeyError: if len(options.Site): site = options.Site config = ConfigLFC(site) appendPostConfigAction(config.setLFCSite) # Inputdata is now handled through separate option files, this is for # testing/convenience if len(args): Moore().inputFiles = args try: descriptor = EventSelector().Input[0] if descriptor.find(".raw") != -1: from Configurables import LHCb__RawDataCnvSvc as RawDataCnvSvc EventPersistencySvc().CnvServices.append( RawDataCnvSvc('RawDataCnvSvc')) elif descriptor.find(".dst") != -1: importOptions('$GAUDIPOOLDBROOT/options/GaudiPoolDbRoot.opts') except IndexError: pass freq = 0 if (len(options.VerboseClasses)): freq = 1 else: freq = 100 EventSelector().PrintFreq = freq # RunChangeHandler if options.RunChangeHandler: Moore().EnableRunChangeHandler = True from Configurables import MagneticFieldSvc MagneticFieldSvc().UseSetCurrent = True # XMLSummary from Configurables import LHCbApp LHCbApp().XMLSummary = 'summary.xml' # Use the filestager? if options.FileStager: from FileStager.Configuration import configureFileStager configureFileStager() # Put the comma separated lists of lines into lists hlt1Lines = [] for line in options.Hlt1Lines.split(";"): if (len(line.strip())): hlt1Lines.append(line) hlt2Lines = [] for line in options.Hlt2Lines.split(";"): if (len(line.strip())): hlt2Lines.append(line) # parse the specification of the classes to set to verbose verboseClasses = [] for cl in options.VerboseClasses.split(";"): cl = cl.strip() if (len(cl)): verboseClasses.append(cl.replace("::", "__")) # Instantiate the class to apply the required configuration config = Config(hlt1Lines, hlt2Lines) appendPostConfigAction(config.postConfigAction) # Set the OutputLevel for requested classed if len(verboseClasses): configOL = ConfigOutputLevel(verboseClasses, 1) appendPostConfigAction(configOL.setOutputLevel) # Add the TupleHltDecReports alg to the sequence if options.TupleFile: tupleAlg = TupleHltDecReports("TupleHltDecReports") addTupleAlg = ConfigTupleAlg(filename=options.TupleFile) appendPostConfigAction(addTupleAlg.addTupleAlg) # Instantiate the AppMgr appMgr = AppMgr() # Make sure that we have Hlt lines to run if not len(Sequence("Hlt1").Members) or not len(Sequence("Hlt2").Members): print "error, no lines to run\n" return 2 # Run the required number of events sc = appMgr.run(Moore().EvtMax) if sc.isFailure(): return 2 # Done sc = appMgr.exit() if sc.isFailure(): return 2 else: return 0