def __set_init_time__(self, utcDatetime): """ Configure the initialization time, depends on the value of self.IntitialTime Default: using the lower between the proposed time and the current time 'Now' : use the higher between the current time and the proposed time dateime : use this datetime """ #default situation if self.getProp("InitialTime").lower()=="safe" or not len(self.getProp("InitialTime")): #default situation utcDatetime = min(datetime.utcnow(), utcDatetime) elif self.getProp("InitialTime").lower()=="now": #Moore! utcDatetime = max(datetime.utcnow(), utcDatetime) else: raise TypeError("DDDBConf.InitialTime, I don't have a good way to convert "+self.getProp("InitialTime")+" to a datetime, please use 'Now' or 'Safe'") from Configurables import EventClockSvc ecs = EventClockSvc() # do not overwrite already set values if not ecs.isPropertySet("InitialTime"): dt = utcDatetime - datetime(1970, 1, 1, 0) ns = (dt.days * 24 * 60 * 60 + dt.seconds) * 1000000000 ecs.InitialTime = ns else: t = datetime(1970, 1, 1, 0) + timedelta(seconds=ecs.InitialTime/1000000000) log.warning("EventClockSvc().InitialTime already set to %s UTC (requested %s UTC)", t.isoformat(), utcDatetime.isoformat())
def patchEscher(true_online_version, alignment_module, n=-1): import GaudiConf.DstConf import Escher.Configuration from Configurables import MagneticFieldSvc from Configurables import TAlignment from TAlignment.VertexSelections import configuredPVSelection Online = importOnline() from Configurables import EventClockSvc initialTime = long(time.time() * 1e9) clkSvc = EventClockSvc() clkSvc.InitialTime = initialTime from Gaudi.Configuration import appendPostConfigAction appendPostConfigAction(fakeEventTime(initialTime)) TAlignment().RunList = Online.DeferredRuns if hasattr( Online, "DeferredRuns") else [] sys.stdout.flush() sys.stderr.flush() escher = EscherCommon(true_online_version, alignment_module) hostname = HostName() escher.InputType = "MDF" escher.PrintFreq = 10000 # escher.EvtMax=300 # asddir = Online.ASDDir + "/" if hasattr(Online, "ASDDir") else "/group/online/alignment/EscherOut/" asddir = Online.ASDDir + "/" if hasattr( Online, "ASDDir") else "/calib/align/EscherOut/" if n == -1: suffix = "_Escher.out" else: suffix = ("_%02d_Escher.out" % n) TAlignment().OutputDataFile = asddir + hostname + suffix TAlignment().UpdateInFinalize = False return escher
def __call__(self): from Configurables import EventClockSvc, FakeEventTime clkSvc = EventClockSvc() clkSvc.EventTimeDecoder = "FakeEventTime" clkSvc.addTool(FakeEventTime, "FakeEventTime") clkSvc.FakeEventTime.StartTime = self.__initialTime + 1 from Configurables import GaudiSequencer, createODIN initSeq = GaudiSequencer("InitEscherSeq") initSeq.Members.insert(0, createODIN())
def configureEventTime(): """ Configure EventClockSvc to get event time from RecHeader first and then from ODIN in case of failure. Returns EventClockSvs() Author: Marco Clemencic. """ #turn off setting that's done in DecodeRawEvent first DecodeRawEvent().EvtClockBank = "" ecs = EventClockSvc() ecs.addTool(TimeDecoderList, "EventTimeDecoder") tdl = ecs.EventTimeDecoder tdl.addTool(RecEventTime) tdl.addTool(OdinTimeDecoder) tdl.Decoders = [tdl.RecEventTime, tdl.OdinTimeDecoder] return ecs
def _configureInput(self): """ Tune initialisation """ # Input data type inputType = self.getProp("InputType").upper() # Get the event time (for CondDb) from ODIN from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = "OdinTimeDecoder" # if property set explcicitly - use it! if self.isPropertySet('EnableUnpack'): unPack = self.getProp('EnableUnpack') DstConf(EnableUnpack=unPack) PhysConf(EnableUnpack=unPack) elif inputType != "MDF": defaultUnpacking = ["Reconstruction", "Stripping"] DstConf(EnableUnpack=defaultUnpacking) PhysConf(EnableUnpack=defaultUnpacking) if inputType != "MDST": if self.getProp("Simulation"): DstConf().setProp("SimType", "Full") return inputType
def doIt(): """ specific post-config action for (x)GEN-files """ extension = "xgen" ext = extension.upper() from Configurables import DataOnDemandSvc dod = DataOnDemandSvc () from copy import deepcopy algs = deepcopy ( dod.AlgMap ) bad = set() for key in algs : if 0 <= key.find ( 'Rec' ) : bad.add ( key ) elif 0 <= key.find ( 'Raw' ) : bad.add ( key ) elif 0 <= key.find ( 'DAQ' ) : bad.add ( key ) elif 0 <= key.find ( 'Trigger' ) : bad.add ( key ) elif 0 <= key.find ( 'Phys' ) : bad.add ( key ) elif 0 <= key.find ( 'Prev/' ) : bad.add ( key ) elif 0 <= key.find ( 'Next/' ) : bad.add ( key ) elif 0 <= key.find ( '/MC/' ) and 'GEN' == ext : bad.add ( key ) for b in bad : del algs[b] dod.AlgMap = algs from Configurables import EventClockSvc, CondDB EventClockSvc ( EventTimeDecoder = "FakeEventTime" ) CondDB ( IgnoreHeartBeat = True )
def configureInput(self, inputType): """ Tune initialisation according to input type """ # By default, Brunel only needs to open one input file at a time # Only set to zero if not previously set to something else. if not IODataManager().isPropertySet("AgeLimit") : IODataManager().AgeLimit = 0 if self._isReprocessing(inputType): # Kill knowledge of any previous Brunel processing from Configurables import ( TESCheck, EventNodeKiller ) InitReprocSeq = GaudiSequencer( "InitReprocSeq" ) if ( self.getProp("WithMC") and inputType in ["XDST","DST"] ): # Load linkers, to kill them (avoid appending to them later) InitReprocSeq.Members.append( "TESCheck" ) TESCheck().Inputs = ["Link/Rec/Track/Best"] killer = EventNodeKiller() killer.Nodes += [ "Raw", "Link/Rec" ] if self.getProp("SkipTracking"): killer.Nodes += [ "pRec/Rich", "pRec/Muon", "pRec/Calo", "pRec/Track/Muon", "pRec/ProtoP" ] else: killer.Nodes += [ "pRec", "Rec" ] InitReprocSeq.Members.append( killer ) ### see configureOutput to see how the remainder of the juggler is configured # Get the event time (for CondDb) from ODIN from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = "OdinTimeDecoder";
def _gen_postconfig_(): """ specific post-config action for (x)GEN-files """ logger.info('Start post-config action for (x)gen-files') from Configurables import DataOnDemandSvc dod = DataOnDemandSvc() from copy import deepcopy algs = deepcopy(dod.AlgMap) bad = set() for key in algs: if 0 <= key.find('Rec'): bad.add(key) elif 0 <= key.find('Raw'): bad.add(key) elif 0 <= key.find('DAQ'): bad.add(key) elif 0 <= key.find('Trigger'): bad.add(key) elif 0 <= key.find('Phys'): bad.add(key) elif 0 <= key.find('Prev/'): bad.add(key) elif 0 <= key.find('Next/'): bad.add(key) elif 0 <= key.find('/MC/') and 'GEN' == ext: bad.add(key) for b in bad: logger.debug('Remove key from DataOnDemand actions %s' % key) del algs[b] logger.info('Remove %d keys from DataOnDemand actions ' % len(bad)) dod.AlgMap = algs from Configurables import EventClockSvc, CondDB EventClockSvc(EventTimeDecoder="FakeEventTime") logger.info('Use fake event time decoder for (x)gen-files') CondDB(IgnoreHeartBeat=True) logger.info('Ignore Heart-beat for (x)gen-files')
def configure(): from Gaudi.Configuration import (ApplicationMgr, MessageSvc, ERROR) from Configurables import DDDBConf, CondDB, CondDBAccessSvc, EventClockSvc, FakeEventTime dddbConf = DDDBConf() cdb = CondDB() cdb.PartitionConnectionString[ "DQFLAGS"] = "sqlite_file:../data/DQFLAGS.db/DQFLAGS" cdb.Tags["DQFLAGS"] = "" ecs = EventClockSvc(InitialTime=toTimeStamp(datetime(2012, 1, 1, 12))) ecs.addTool(FakeEventTime, "EventTimeDecoder") ecs.EventTimeDecoder.StartTime = ecs.InitialTime ecs.EventTimeDecoder.TimeStep = toTimeStamp(timedelta(days=1)) ApplicationMgr(TopAlg=["LoadDDDB"], EvtSel="NONE") MessageSvc(OutputLevel=ERROR)
def patchBrunel(true_online_version): """ Instantiate the options to run Brunel with raw data @author M.Frank """ import Brunel.Configuration import OnlineEnv brunel = Brunel.Configuration.Brunel() brunel.OnlineMode = True try: brunel.DDDBtag = OnlineEnv.DDDBTag except: print "DDDBTag not found, use default" try: brunel.CondDBtag = OnlineEnv.CondDBTag except: print "CondDBTag not found, use default" conddb = CondDB() conddb.IgnoreHeartBeat = True # # Adjust to pickup the proper online conditions # import Online as RunChange_All conddb.setProp('RunChangeHandlerConditions', RunChange_All.ConditionMap) conddb.setProp('EnableRunChangeHandler', True) # # Brunel output configuration # brunel.WriteFSR = False # This crashes Jaap's stuff brunel.DataType = "2013" brunel.OutputType = '' EventLoopMgr().OutputLevel = MSG_DEBUG #ERROR EventLoopMgr().Warnings = False from Configurables import EventClockSvc EventClockSvc().InitialTime = 1322701200000000000 brunel.UseDBSnapshot = True # try it # brunel.PartitionName = "FEST" # Hack by Chris print "# Warning using CKThetaQuartzRefractCorrections = [ 0,-0.0001,0 ]" from Configurables import RichRecSysConf rConf = RichRecSysConf("RichOfflineRec") rConf.richTools().photonReco().CKThetaQuartzRefractCorrections = [ 0, -0.001, 0 ] brunel.OutputLevel = MSG_WARNING brunel.PrintFreq = -1 HistogramPersistencySvc().OutputFile = "" HistogramPersistencySvc().OutputLevel = MSG_ERROR #print brunel return brunel
def defineDB(self): # Delegate handling of properties to DDDBConf self.setOtherProps(DDDBConf(), ["Simulation", "DataType"]) # Set CondDB tags if given, otherwise use default defined in DDDBConf from Configurables import CondDB if hasattr(self, "DDDBtag"): CondDB().Tags["DDDB"] = self.getProp("DDDBtag") if hasattr(self, "CondDBtag"): CondDB().Tags["LHCBCOND"] = self.getProp("CondDBtag") CondDB().Tags["SIMCOND"] = self.getProp("CondDBtag") if hasattr(self, "DQFLAGStag"): CondDB().Tags["DQFLAGS"] = self.getProp("DQFLAGStag") # Set up a time decoder for real data (Simulation uses FakeEventTime) if not self.getProp("Simulation"): from Configurables import EventClockSvc ecs = EventClockSvc() # do not overwrite already set values if not ecs.isPropertySet("EventTimeDecoder"): ecs.EventTimeDecoder = "OdinTimeDecoder"
def configMonitor(): import os from Gaudi.Configuration import EventPersistencySvc, HistogramPersistencySvc from Configurables import (LHCbApp, LHCb__RawDataCnvSvc, GaudiSequencer, UpdateAndReset, createODIN, ApplicationMgr ) app = LHCbApp() app.DataType = '2015' app.EvtMax = -1 EventPersistencySvc().CnvServices.append( LHCb__RawDataCnvSvc('RawDataCnvSvc') ) HistogramPersistencySvc().OutputFile = '' HistogramPersistencySvc().Warnings = False UpdateAndReset().saveHistograms = 1 #UpdateAndReset().saverCycle = 3600 from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = 'OdinTimeDecoder' appMgr = ApplicationMgr() # Decoder from Configurables import HCRawBankDecoder decoder = HCRawBankDecoder() decoder.Monitoring = True # Monitor from Configurables import HCDigitMonitor monitor = HCDigitMonitor() monitor.CrateB = 0 monitor.CrateF = 1 monitor.ChannelsB0 = [47, 46, 45, 44] monitor.ChannelsB1 = [23, 22, 21, 20] monitor.ChannelsB2 = [11, 10, 9, 8] monitor.ChannelsF1 = [23, 22, 21, 46] monitor.ChannelsF2 = [11, 10, 9, 8] # Top level sequence topSeq = GaudiSequencer("TopSequence") topSeq.Members = [createODIN(), decoder, monitor] appMgr.TopAlg = [topSeq] return app, monitor
def SetEvtClock(bank, db=None): """ Add specific decoder to EvtClockSvc, replace disparate code """ if db is None: from DAQSys.Decoders import DecoderDB db = DecoderDB from DAQSys.DecoderClass import decodersForBank odinconfs = decodersForBank(db, bank) if len(odinconfs): #force to take the same public tool publicTool = odinconfs[0].PublicTools[0] from Configurables import EventClockSvc #force it to use the same public tool as the rest of the ODIN decoding EventClockSvc(EventTimeDecoder=publicTool.replace("ToolSvc.", "") + ":PUBLIC")
def _setup_rch(self): # Setup DB snapshot and RCH from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = 'OdinTimeDecoder' tag = { "DDDB": self._config['DDDBtag'], "LHCBCOND": self._config['CondDBtag'], "ONLINE": 'fake' } baseloc = '/group/online/hlt/conditions' from Configurables import CondDB conddb = CondDB() # hack to allow us to chance connectionstrings... conddb.Online = True # Set alternative connection strings and tags # (see Det/DetCond's configurable... ) dbPartitions = ["DDDB", "LHCBCOND", "ONLINE"] for part in dbPartitions: if tag[part] is 'default': raise KeyError('must specify an explicit %s tag' % part) conddb.PartitionConnectionString[ part] = "sqlite_file:%(dir)s/%(part)s_%(tag)s.db/%(part)s" % { "dir": baseloc, "part": part, "tag": tag[part] } conddb.Tags[part] = tag[part] # Set the location of the Online conditions from Configurables import MagneticFieldSvc MagneticFieldSvc().UseSetCurrent = True online_xml = '%s/LHCb/2015/%%d/online.xml' % baseloc from Configurables import RunChangeHandlerSvc rch = RunChangeHandlerSvc() rch.Conditions = { "Conditions/Online/LHCb/Magnet/Set": online_xml, "Conditions/Online/Velo/MotionSystem": online_xml, "Conditions/Online/LHCb/Lumi/LumiSettings": online_xml, "Conditions/Online/LHCb/RunParameters": online_xml, "Conditions/Online/Rich1/R1HltGasParameters": online_xml, "Conditions/Online/Rich2/R2HltGasParameters": online_xml } ApplicationMgr().ExtSvc.append(rch)
LHCbApp().XMLSummary = 'summary.xml' #-- set explicit CondDB tag LHCbApp().CondDBtag = 'cond-20141002' #--- determine application to run from Configurables import LumiAlgsConf, DumpFSR from LumiAlgs.LumiIntegratorConf import LumiIntegratorConf #-- cannot test this in REC due to dependence on PropertyConfigSvc (TCK) ## LumiIntegratorConf().UseOnline = False LumiIntegratorConf().UseOnline = True # clock service for CondDB from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = "OdinTimeDecoder" # standard sequence from configurable LumiAlgsConf().LumiSequencer = GaudiSequencer("LumiSeq", ShortCircuit=False) LumiAlgsConf().InputType = 'DST' LumiAlgsConf().OutputLevel = INFO # standard sequence from configurable LumiIntegratorConf().LumiSequencer = GaudiSequencer("LumiIntSeq", ShortCircuit=False) #-- main ApplicationMgr( TopAlg=[ GaudiSequencer("LumiSeq"), GaudiSequencer("LumiIntSeq"),
list_conditions = sum(ConditionMap.values(),[]) # Configure Brunel from Configurables import LHCbApp app = LHCbApp() app.DataType = '2015' app.Simulation = False app.EvtMax = 1 import HLT2Params app.DDDBtag = HLT2Params.DDDBTag app.CondDBtag = HLT2Params.CondDBTag from Configurables import EventClockSvc, FakeEventTime, EventDataSvc ecs = EventClockSvc() ecs.InitialTime = arguments.start[0] ecs.addTool(FakeEventTime, "EventTimeDecoder") ecs.EventTimeDecoder.TimeStep = 10 ecs.EventTimeDecoder.StartTime = arguments.start[0] from Configurables import DumpConditions DumpConditions().RunStartTime = arguments.start[0] DumpConditions().RunNumber = arguments.run[0] DumpConditions().OutFile= arguments.output[0] DumpConditions().Conditions = list_conditions from Configurables import CondDB cdb = CondDB() if arguments.online: import CondMap
dets = [] for i in range(len(ks)): k = ks[i] excl=False; for j in range(len(excludeFiles)): if k.find(excludeFiles[j]) >= 0: excl = True break if excl : continue detxml = k[k.rfind('/')+1:] det = detxml[:detxml.rfind('.xml')] det = det.lower() dets.append(det) print "Moving to Offline DB the following detector Data ",dets ecs = EventClockSvc() ecs.InitialTime = RunOption.RunStartTime*1000000000 ecs.addTool(FakeEventTime,"EventTimeDecoder") ecs.EventTimeDecoder.StartTime = ecs.InitialTime ecs.EventTimeDecoder.TimeStep = 10 #xmlCnvSvc = XmlCnvSvc(AllowGenericConversion = True) DDDBConf() #detDataSvc = DetectorDataSvc() #DetectorPersistencySvc( CnvServices = [ xmlCnvSvc ] ) cdb = CondDB() cdb.RunChangeHandlerConditions=CondMap.ConditionMap cdb.EnableRunChangeHandler = True cdb.EnableRunStampCheck=False cdb.UseDBSnapshot = True cdb.Tags = { "DDDB" : RunOption.DDDBTag,
Time2 = time.mktime(time.strptime(Date2, "%Y-%m-%d %H:%M:%S")) TimeStep = Time2 - StartTime EndTime = time.mktime(time.localtime()) print "Start Time", int(StartTime) * 1000000000 print "End Time", int(EndTime) * 1000000000 print "Time steps", int(TimeStep) * 1000000000 nSteps = (EndTime - StartTime) / TimeStep print "This time range requires", int(nSteps), "steps" # have to convert time from s to ns since the epoch ApplicationMgr().EvtMax = int(nSteps) ApplicationMgr().OutputLevel = DEBUG EventClockSvc().EventTimeDecoder = 'FakeEventTime' EventClockSvc().InitialTime = int(StartTime) * 1000000000 from Configurables import FakeEventTime EventClockSvc().addTool(FakeEventTime) # required to add step otherwise 2nd event has the same time in the EventClockSvc as the initial time EventClockSvc().FakeEventTime.StartTime = int(StartTime) * 1000000000 + int( TimeStep) * 1000000000 EventClockSvc().FakeEventTime.TimeStep = int(TimeStep) * 1000000000 EventClockSvc().OutputLevel = 2 from Configurables import STPerformanceMonitor, ST__STActiveFraction ttFraction = ST__STActiveFraction("TTActiveFraction") ttFraction.DetType = "TT" ttFraction.StartTime = int(StartTime) * 1000000000 ttFraction.TimeStep = int(TimeStep) * 1000000000 ttFraction.Steps = int(nSteps)
from Configurables import LHCbApp #LHCbApp().DDDBtag = "head-20120413" #LHCbApp().CondDBtag = "cond-20120730" #LHCbApp().Simulation = simulation #if simulation : # LHCbApp().DDDBtag = "dddb-20120831" # LHCbApp().CondDBtag = "sim-20121025-vc-md100" #else : LHCbApp().DDDBtag = "default" LHCbApp().CondDBtag = "default" import time from Configurables import EventClockSvc EventClockSvc().InitialTime = int((time.time() - 3600) * 1e9) #databases from Wouter (the monolayer alignment will be in the #default database) # path_monolayer = "/afs/cern.ch/user/w/wouter/public/AlignDB/" # ddbs = [] # ddbs.append(path_monolayer + "OTMonoGeometry.db/DDDB") # ddbs.append(path_monolayer + "OTMonoCatalog.db/LHCBCOND") # ddbs.append(path_monolayer + "OTMonoAlign20141225.db/LHCBCOND") # ddbs.append(path_monolayer + "OTGeometryT0.db/DDDB") # ddbs.append(path_monolayer + "OTCondT0.db/LHCBCOND") # counter = 1 # for db in ddbs: # from Configurables import ( CondDB, CondDBAccessSvc )
# gaudirun.py Brunel-Cosmics.py 2008-Cosmic-Data.py # from Gaudi.Configuration import * from Configurables import Escher, LHCbApp, TrackSys, EventClockSvc #-- File catalogs. First one is read-write FileCatalog().Catalogs = ["xmlcatalog_file:MyCatalog.xml"] #-- Use latest database tags for real data LHCbApp().DDDBtag = "default" LHCbApp().CondDBtag = "default" LHCbApp().DDDBtag = "HEAD" LHCbApp().CondDBtag = "HEAD" #-- Set a reasonable time for the first event EventClockSvc().InitialTime = 1260350949785664000 from Configurables import (CondDB, CondDBAccessSvc) otCalib = CondDBAccessSvc('OTCalib') #otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ConditionsOTCalibration.db/LHCBCOND' #otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/CalibrationOTQuarterT0s_071209.db/LHCBCOND' otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/Collision09_OT_ModuleT0s_220110_sigma4ns.db/LHCBCOND' CondDB().addLayer(otCalib) # Latest cosmic run, with CALO, OT and (!!) RICH2 (35569 events) Escher().DatasetName = 'collisions' Escher().InputType = 'MDF' #EventSelector().Input.append("DATA='castor:/castor/cern.ch/grid/lhcb/data/2009/RAW/FULL/LHCb/BEAM1/62558/062558_0000000001.raw' SVC='LHCb::MDFSelector'") #import os #runnr = os.environ['COSMICSRUNNR']
def extractAlignmentParameters( elementsWithTESAndCondDBNodes, since, until, valueExtractor=lambda detElm: getGlobalPositionFromGeometryInfo( detElm.geometry()), DDDBtag="default", CondDBtag="default", alignDBs=[]): """ The method talking to the detector svc Extract from all DetectorElements down from each element in elementsWithTESAndCondDBNodes ( format { elm : ( detTES, [ condDBNode ] ) } ), alignment parameters using valueExtractor, for all iovs between since and until (datetimes), using the CondDBNodes. The default database is configured with database tags DDDBtag and CondDBtag, and all alignDBs [ (tag, connectString) ] are added as layers to the CondDB. Returns a dict { element : [ ( (iovBegin, iovEnd), alignmentTree ) ] } """ # Static configuration of the application manager from Configurables import LHCbApp, ApplicationMgr from LHCbKernel.Configuration import FATAL, ERROR, WARNING, INFO, DEBUG, VERBOSE ApplicationMgr().AppName = "AlignmentCollector" ApplicationMgr().OutputLevel = ERROR LHCbApp().DDDBtag = DDDBtag LHCbApp().CondDBtag = CondDBtag # >>> This part stolen from Det/DetCond/tests/scripts/getIOVs.py ApplicationMgr().TopAlg = ["LoadDDDB"] from Configurables import EventClockSvc, FakeEventTime ecs = EventClockSvc( InitialTime=toTimeStamp(datetime(2010, 1, 1, 12, tzinfo=pytz.utc))) ecs.addTool(FakeEventTime, "EventTimeDecoder") ecs.EventTimeDecoder.StartTime = ecs.InitialTime ecs.EventTimeDecoder.TimeStep = toTimeStamp(timedelta(days=1)) # <<< + "lhcbcond" below layers = ["LHCBCOND"] if len(alignDBs) > 0: from Configurables import CondDB, CondDBAccessSvc for i, (connectString, tag) in enumerate(alignDBs): layerName = "AlignCond%i" % i alignCond = CondDBAccessSvc(layerName) alignCond.ConnectionString = connectString alignCond.DefaultTAG = tag CondDB().addLayer(alignCond) layers.append(layerName) # run a gaudi application from GaudiPython import AppMgr, gbl gaudi = AppMgr() gaudi.createSvc("UpdateManagerSvc") updateManagerSvc = gaudi.service("UpdateManagerSvc", interface="IUpdateManagerSvc") gaudi.initialize() conddbReaders = list( gaudi.service(name, gbl.ICondDBReader) for name in reversed(layers)) detDataSvc = updateManagerSvc.detDataSvc() alignmentTrees = dict( (detName, []) for detName in elementsWithTESAndCondDBNodes.iterkeys()) for detName, (detPath, condNodes) in elementsWithTESAndCondDBNodes.iteritems(): ### get the IOVs for all elements, and combine them timeLine = [("gap", (since, until))] for layerReader in conddbReaders: timeLineUpdated = list(timeLine) alreadyInserted = 0 for i, (typ, (gapBegin, gapEnd)) in enumerate(timeLine): if typ == "gap": iovs = combinedIOVs([ list((max(toDateTime(iov.since.ns()), gapBegin), min(toDateTime(iov.until.ns()), gapEnd)) for iov in layerReader.getIOVs( node, gbl.ICondDBReader.IOV( gbl.Gaudi.Time(toTimeStamp(gapBegin)), gbl.Gaudi.Time(toTimeStamp(gapEnd))), 0)) for node in TrackingAlignmentCondDBNodes[detName] ]) if len(iovs) != 0: updatedTimeSlice = list( ("iov", (begin, end)) for begin, end in iovs) if updatedTimeSlice[0][1][0] > gapBegin: updatedTimeSlice.insert( 0, ("gap", (gapBegin, updatedTimeSlice[0][1][0]))) if updatedTimeSlice[-1][1][1] < gapEnd: updatedTimeSlice.append( ("gap", (updatedTimeSlice[-1][1][1], gapEnd))) timeLineUpdated[i + alreadyInserted:i + alreadyInserted + 1] = updatedTimeSlice alreadyInserted += len(updatedTimeSlice) - 1 logging.debug("timeline after adding %s : %s" % (layerReader, timeLine)) timeLine = timeLineUpdated iovs = list(timespan for typ, timespan in timeLine if typ == "iov") ### For every IOV, extract the parameters for begin, end in iovs: detDataSvc.setEventTime( gbl.Gaudi.Time(toTimeStamp(begin + (end - begin) / 2))) updateManagerSvc.newEvent() motionSystem = None if detName != "Velo" else gaudi.detSvc( ).getObject("/dd/Conditions/Online/Velo/MotionSystem") logging.info("Extracting parameters for %s between %s and %s" % (detName, begin, end)) detTree = getAlignableTreeFromDetectorElement( gaudi.detSvc().getObject(detPath), nodeValue=valueExtractor, parentName=detPath, motionSystem=motionSystem) detTree.name = detName alignmentTrees[detName].append(((begin, end), detTree)) gaudi.finalize() gaudi.exit() return alignmentTrees
def setup(): initialTime = long(time.time() * 1e9) OTGaudiSeq = GaudiSequencer("OTt0OnlineClbrSeq") OTt0OnlineClbrAlg = OTt0OnlineClbr("OTt0OnlineClbrAlg") #OTt0OnlineClbrAlg.InputFiles = [ "/hist/Savesets/2013/LHCb/Brunel/01/20/Brunel-135576-20130120T161302-EOR.root"] # OT T0 calibration algorithm OTt0OnlineClbrAlg.InputTasks = ["Brunel"] OTt0OnlineClbrAlg.Partition = partition OTt0OnlineClbrAlg.ReadInitialT0FromDB = False OTt0OnlineClbrAlg.SaveFits = False OTt0OnlineClbrAlg.RunOnline = True OTt0OnlineClbrAlg.CheckDataT0 = True OTt0OnlineClbrAlg.PublishedName = "OT/Calib" OTt0OnlineClbrAlg.XMLFilePath = "/group/online/alignment/OT/Calib" OTt0OnlineClbrAlg.OutputLevel = MSG_INFO OTt0OnlineClbrAlg.UseClockPhase = (partition != "FEST") OTt0OnlineClbrAlg.InitialTime = initialTime OTt0OnlineClbrAlg.Threshold = 0.1 OTt0OnlineClbrAlg.MaxDifference = 2 # Keep analysis task going. OTt0OnlineClbrAlg.StopAlgSequence = False OTGaudiSeq.Members += [OTt0OnlineClbrAlg] OTGaudiSeq.IgnoreFilterPassed = True ## Configure saving of histograms from Configurables import UpdateAndReset ur = UpdateAndReset() ur.saveHistograms = 1 ApplicationMgr().TopAlg.insert(0, ur) from Configurables import MonitorSvc MonitorSvc().disableDimPropServer = 1 MonitorSvc().disableDimCmdServer = 1 #import OnlineEnv MonitorSvc().ExpandCounterServices = 0 MonitorSvc().ExpandNameInfix = "<part>_x_<program>/" #MonitorSvc().PartitionName = OnlineEnv.PartitionName; MonitorSvc().PartitionName = partition MonitorSvc().ProgramName = "OTOnlineCalib_0" #setup the histograms and the monitoring service #ApplicationMgr().ExtSvc.append( 'MonitorSvc' ) from Configurables import RootHistCnv__PersSvc RootHistCnv__PersSvc().OutputEnabled = False ApplicationMgr().TopAlg += [OTGaudiSeq] ApplicationMgr().EvtSel = "NONE" ApplicationMgr().ExtSvc += [ "LHCb::PublishSvc", "MonitorSvc", "IncidentSvc" ] ApplicationMgr().Runable = "LHCb::OnlineRunable/Runable" from Configurables import CondDB conddb = CondDB() conddb.Tags["ONLINE"] = 'fake' conddb.IgnoreHeartBeat = True conddb.UseDBSnapshot = True conddb.DBSnapshotDirectory = "/group/online/hlt/conditions" from Configurables import EventClockSvc, FakeEventTime, EventDataSvc ecs = EventClockSvc() ecs.InitialTime = initialTime ecs.addTool(FakeEventTime, "EventTimeDecoder") ecs.EventTimeDecoder.StartTime = initialTime ecs.EventTimeDecoder.TimeStep = 10 EventDataSvc().ForceLeaves = True # Configure DB tags and per-run conditions to be used to be the same as what # the HLT1 reconstruction farm uses. This is done by directly importing the # python file to ensure the script can also start when LHCb is running # passthrough. conddb.EnableRunChangeHandler = True conddb.RunChangeHandlerConditions = { 'LHCb/2015/%d/ot.xml': ["Conditions/Calibration/OT/CalibrationGlobal"] } from Configurables import LHCbApp import ConditionsMap LHCbApp().CondDBtag = ConditionsMap.CondDBTag LHCbApp().DDDBtag = ConditionsMap.DDDBTag # LHCbApp().CondDBtag = 'cond-20150409-2' # LHCbApp().DDDBtag = 'dddb-20150119-3' LHCbApp().DataType = '2015'
Escher().Simulation = False Escher().SpecialData += ["earlyData"] Escher().InputType = "MDF" #Escher().InputType = "DST" #-- Use latest 2009 database tags for real data #LHCbApp().DDDBtag = "head-20090330" #LHCbApp().CondDBtag = "head-20090402" LHCbApp().DDDBtag = "default" LHCbApp().CondDBtag = "default" LHCbApp().DDDBtag = "HEAD" LHCbApp().CondDBtag = "HEAD" from Configurables import EventClockSvc EventClockSvc().InitialTime = 1270079584012864000 from Configurables import (CondDB, CondDBAccessSvc) cdb = CondDB() #cdb.PartitionConnectionString["ONLINE"] = "sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201004.db/ONLINE" #cdb.Tags["ONLINE"] = "" # maybe it works if we read it as a layer? myOnline = CondDBAccessSvc('MyOnline') myOnline.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201004.db/ONLINE' CondDB().addLayer(myOnline) otCalib = CondDBAccessSvc('OTCalib') #otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/ModuleT0s_2.7ns_3.35ns_180310.db/LHCBCOND' otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/ModuleT0s_69648_140410.db/LHCBCOND' CondDB().addLayer(otCalib)
LHCbApp().DDDBtag = "default" LHCbApp().CondDBtag = "default" LHCbApp().DDDBtag = "HEAD" LHCbApp().CondDBtag = "HEAD" from Configurables import (CondDB, CondDBAccessSvc) otCalib = CondDBAccessSvc('OTCalib') otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/Collision09_OT_ModuleT0s_220110_sigma4ns.db/LHCBCOND' CondDB().addLayer(otCalib) # Latest cosmic run, with CALO, OT and (!!) RICH2 (35569 events) Escher().DatasetName = 'collisions' Escher().InputType = 'DST' from Configurables import EventClockSvc EventClockSvc().InitialTime = 1260495107691392000 prefix = 'PFN:castor:/castor/cern.ch/user/t/truf/data_2009/' data = [ '00005727_00000001_00000011_2_nolumi.dst', '00005727_00000012_00000026_2_nolumi.dst', '00005727_00000027_00000039_2_nolumi.dst', '00005727_00000040_00000052_2_nolumi.dst', '00005727_00000053_00000057_2_nolumi.dst', '00005730_00000001_00000002_2_nolumi.dst', '00005731_00000001_00000056_2_nolumi.dst' ] # copy the files to /pool if it exists import os if os.path.isdir('/pool/spool/'):
from Gaudi.Configuration import * import os #Use Oliver's XML DDDB describing the FT detector #DDDBConf().DbRoot = "/afs/cern.ch/user/o/ogruenbe/public/FT_upgrade/myDDDB-LHCb-Feb2012/lhcb.xml" #-- the above is a running target, therefore use the following snaphot: DDDBConf().DbRoot = "/afs/cern.ch/user/p/phopchev/public/FT/DDDBSlice_FT_v3/lhcb.xml" CondDB().Tags['DDDB'] = 'HEAD' lhcbApp = LHCbApp() lhcbApp.Simulation = True ''' lhcbApp.DataType = '2011' lhcbApp.DDDBtag = "head-20110914" lhcbApp.CondDBtag = "sim-20111020-vc-md100" #dataDir = os.environ['PANORAMIXDATA'] #EventSelector().Input = ["DATAFILE='PFN:"+dataDir+"/2011_Bs2DsmuX.dst' TYP='POOL_ROOTTREE'"] ''' lhcbApp.DataType = "2011" lhcbApp.DDDBtag = "MC11-20111102" lhcbApp.CondDBtag = "sim-20111111-vc-md100" EventSelector().Input = ["DATAFILE='/castor/cern.ch/user/o/ogruenbe/Bs_mumu_v3.sim' TYP='POOL_ROOTTREE'"] ### Set fake event time to avoid useless ERROR messages from the EventClockSvc (no DAQ/RawEvent as it is MC) from Configurables import EventClockSvc EventClockSvc().EventTimeDecoder = 'FakeEventTime'
#LHCbApp().CondDBtag = "HEAD" LHCbApp().DDDBtag = 'head-20110823' LHCbApp().CondDBtag = 'head-20110901' from Configurables import (CondDB, CondDBAccessSvc) cdb = CondDB() #cdb.PartitionConnectionString["ONLINE"] = "sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201103.db/ONLINE" #cdb.Tags["ONLINE"] = "fake" # maybe it works if we read it as a layer? myOnline = CondDBAccessSvc('MyOnline') myOnline.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-2011.db/ONLINE' CondDB().addLayer(myOnline) #importOptions("$APPCONFIGOPTS/DisableLFC.py") cdb.UseOracle = False cdb.DisableLFC = True import os runnr = os.environ['RUNNR'] filenames = ['/pool/spool/wouter/dimuons_%s.dst' % runnr] for f in filenames: fullname = "DATAFILE='" + f + "' TYP='POOL_ROOTTREE' OPT='READ'" EventSelector().Input.append(fullname) print "EvenSelector.Input:", EventSelector().Input from Configurables import EventClockSvc #EventClockSvc().InitialTime = 1314000149027776000 EventClockSvc().InitialTime = int(os.environ['INITIALTIME'])
from DAQSys.Decoders import DecoderDB for i, v in DecoderDB.iteritems(): v.Properties["OutputLevel"] = VERBOSE if "Hlt" in i and "ReportsDecoder" in i: v.Active = False #v.Inputs={"InputRawEventLocation":"DAQ/RawEvent"} for b in ["UT", "FT", "FTCluster", "VP", "VL"]: if b in v.Banks: v.Active = False DecoderDB["MuonRec"].Active = False DecoderDB["createODIN"].Active = False #DecoderDB["OdinTimeDecoder/ToolSvc.OdinTimeDecoder"].Active=True from Configurables import EventClockSvc, OdinTimeDecoder, ODINDecodeTool ecs = EventClockSvc() ecs.addTool(OdinTimeDecoder, 'EventTimeDecoder') ecs.EventTimeDecoder.addTool(ODINDecodeTool) ecs.EventTimeDecoder.ODINDecodeTool.RawEventLocations = ['Crazy/RawEvent'] DecodeRawEvent().OverrideInputs = 999 from Configurables import GaudiSequencer #DecodeRawEvent().Sequencer=GaudiSequencer("SPAM") for i, v in DecoderDB.iteritems(): if v.Active: GaudiSequencer("SPAM").Members.append(v.setup()) from Configurables import StoreExplorerAlg StoreExplorerAlg().Load = True
def rawDataToNtuple(options): # print options required_options = [ "runNumber", "start", "end", "outputdir", "nEvtsPerStep", "totsteps" ] for check_opts in required_options: if not options.has_key(check_opts): print "Please specify minimal options!" print "Option \'" + check_opts + "\' is missing!" sys.exit() start = options["start"] end = options["end"] runNumber = options["runNumber"] outputdir = options["outputdir"] totsteps = options["totsteps"] nEvtsPerStep = options["nEvtsPerStep"] from Configurables import DDDBConf, CondDB, CondDBAccessSvc, NTupleSvc, EventClockSvc, Brunel, LHCbApp # if options.has_key("IgnoreHeartBeat"): # CondDB().IgnoreHeartBeat = options["IgnoreHeartBeat"] if options.has_key("addCondDBLayer"): altag = "HEAD" if options.has_key("addCondDBLayer_tag"): altag = options["addCondDBLayer_tag"] CondDB().addLayer( CondDBAccessSvc("myCond", ConnectionString="sqlite_file:" + options["addCondDBLayer"] + "/LHCBCOND", DefaultTAG=altag)) # Need this line so as to not get db errors- should be fixed properly at some point CondDB().IgnoreHeartBeat = True CondDB().EnableRunStampCheck = False # customDBs = glob.glob('/group/rich/ActiveDBSlices/*.db') # for db in customDBs: # CondDB().addLayer( CondDBAccessSvc(os.path.basename(db), ConnectionString="sqlite_file:"+db+"/LHCBCOND", DefaultTAG="HEAD") ) # importOptions('$STDOPTS/DecodeRawEvent.py') #importOptions("$STDOPTS/RootHist.opts") #importOptions("$STDOPTS/RawDataIO.opts") #DEBUG by DisplayingHitMaps=False from Configurables import MDMRich1Algorithm mdmAlg = MDMRich1Algorithm("Rich1MDCS") mdmAlg.NumberOfEventsPerStep = nEvtsPerStep mdmAlg.StoreHistos = False mdmAlg.DEBUG = False if options.has_key("StoreHistos"): mdmAlg.StoreHistos = options["StoreHistos"] if options.has_key("DEBUG"): mdmAlg.DEBUG = options["DEBUG"] print "start step: " + str(start) print "stop step: " + str(end) print "processing " + str(nEvtsPerStep * (end - start)) + " events" tuplestring = "NTuple_Run%i_Steps%04d-%04d.root" % (runNumber, start, end) if options.has_key("TupleName"): tuplestring = options["TupleName"] histoname = "Histos_Run%i_Steps%04d-%04d.root" % (runNumber, start, end) if options.has_key("HistoName"): histoname = options["HistoName"] if outputdir != "": tuplestring = "%s/%s" % (outputdir, tuplestring) histoname = "%s/%s" % (outputdir, histoname) tuplename = "RICHTUPLE1 DATAFILE=\'%s\' TYP=\'ROOT\' OPT=\'NEW\'" % ( tuplestring) # Currently put in manually. Edit here to use correct db and conddb tags LHCbApp().DDDBtag = "dddb-20150724" LHCbApp().CondDBtag = "cond-20160123" if options.has_key("DDDBtag"): LHCbApp().DDDBtag = options["DDDBtag"] if options.has_key("CondDBtag"): LHCbApp().CondDBtag = options["CondDBtag"] #customDBs = glob.glob('/group/rich/ActiveDBSlices/*.db') #for db in customDBs: # CondDB().addLayer( CondDBAccessSvc(os.path.basename(db), ConnectionString="sqlite_file:"+db+"/LHCBCOND", DefaultTAG="HEAD") ) ApplicationMgr().TopAlg += [mdmAlg] ApplicationMgr().ExtSvc += ['DataOnDemandSvc'] ApplicationMgr().EvtMax = end * nEvtsPerStep # Timing information for application from Configurables import AuditorSvc, SequencerTimerTool ApplicationMgr().ExtSvc += ['AuditorSvc'] ApplicationMgr().AuditAlgorithms = True AuditorSvc().Auditors += ['TimingAuditor'] SequencerTimerTool().OutputLevel = 4 LHCbApp().TimeStamp = True HistogramPersistencySvc().OutputFile = histoname NTupleSvc().Output = [tuplename] EventSelector().PrintFreq = 10 EventSelector().PrintFreq = nEvtsPerStep EventSelector().FirstEvent = start * nEvtsPerStep print "First event: " + str(start * nEvtsPerStep) print "Last event: " + str(end * nEvtsPerStep) #get data, will look in local cluster first, then on castor isLocal = True if options.has_key("isLocal"): isLocal = options["isLocal"] DATA_and_Year = (getData(runNumber, start, end, totsteps, isLocal)) DATA = DATA_and_Year["DATA"] if not len(DATA) > 0: print "Data not found in local, switching to CASTOR" DATA_and_Year = getData(runNumber, start, end, totsteps, not isLocal) DATA = DATA_and_Year["DATA"] if not len(DATA) > 0: print "DATA not found anywhere!" sys.exit() LHCbApp.DataType = str(DATA_and_Year["year"]) EventSelector().Input = DATA EventClockSvc().EventTimeDecoder = "OdinTimeDecoder" appMgr = GaudiPython.AppMgr() appMgr.HistogramPersistency = "ROOT" #appMgr.OutputLevel=DEBUG evtSvc = appMgr.evtSvc() esel = appMgr.evtsel() esel.PrintFreq = nEvtsPerStep appMgr.initialize() appMgr.run(nEvtsPerStep * (end - start)) appMgr.stop() appMgr.finalize()
def setupOnline(): """ Setup the online environment: Buffer managers, event serialisation, etc. @author M.Frank """ from Configurables import LHCb__FILEEvtSelector as es from Configurables import LHCb__AlignDrv as Adrv from Configurables import EventClockSvc Online = importOnline() app = Gaudi.ApplicationMgr() app.AppName = '' app.HistogramPersistency = 'ROOT' app.SvcOptMapping.append('LHCb::FILEEvtSelector/EventSelector') app.SvcOptMapping.append('LHCb::FmcMessageSvc/MessageSvc') #app.EvtMax = 10000 Online.rawPersistencySvc() evtloop = Configs.EventLoopMgr('LHCb::OnlineRunable/EmptyEventLoop') evtloop.Warnings = False evtloop.EvtSel = "NONE" app.EventLoop = evtloop app.HistogramPersistency = "NONE" # runable = Configs.Runable # runable.MEPManager = "" app.AuditAlgorithms = False Configs.MonitorSvc().OutputLevel = MSG_ERROR Configs.MonitorSvc().UniqueServiceNames = 1 Configs.RootHistCnv__PersSvc("RootHistSvc").OutputLevel = MSG_ERROR app.OutputLevel = MSG_INFO def __propAtt(att, fr, to, d=None): if hasattr(fr, att): setattr(to, att, getattr(fr, att)) elif d: setattr(to, att, d) from Configurables import AlignOnlineIterator as Aiter ad = Adrv("AlignDrv") ad.PartitionName = Online.PartitionName ad.FitterClass = "AlignOnlineIterator" ad.FitterName = "AlIterator" __propAtt('RefFileName', Online, ad) ## The Alignment driver is the runable app.Runable = ad.getType() + "/" + ad.getName() ad.addTool(Aiter, ad.FitterName) ai = ad.AlIterator ai.PartitionName = Online.PartitionName ai.ASDFilePattern = "_Escher.out" ai.OutputLevel = 3 ai.MaxIteration = MAX_NITER ai.ServiceInfix = "" ai.ReferenceRunNr = Online.DeferredRuns[0] if hasattr( Online, "DeferredRuns") else -1 runType = os.environ.get('RUN_TYPE', 'Unknown') runType = runType.split('|')[-1].strip() if '|' in runType else runType if runType == 'Tracker': sds = ['TT', 'IT', 'OT'] ai.RunType = runType elif runType in ('Velo', 'Muon'): sds = [runType] ai.RunType = runType else: print 'WARNING: RUN_TYPE is not one of Velo, Tracker or Muon. Will assume all subdetectors' sds = ['Velo', 'TT', 'IT', 'OT', 'Muon'] ai.SubDetectors = sds # for attr, default in [('ASDDir', "/group/online/alignment/EscherOut/"), # ('OnlineXmlDir', "/group/online/alignment"), # ('AlignXmlDir', "/group/online/AligWork")]: for attr, default in [('ASDDir', "/calib/align/EscherOut/"), ('OnlineXmlDir', "/group/online/alignment"), ('AlignXmlDir', "/group/online/AligWork")]: __propAtt(attr, Online, ai, default) initialTime = long(time.time() * 1e9) clkSvc = EventClockSvc() clkSvc.InitialTime = initialTime from Configurables import FakeEventTime clkSvc.EventTimeDecoder = "FakeEventTime" clkSvc.addTool(FakeEventTime, "FakeEventTime") clkSvc.FakeEventTime.StartTime = initialTime
# values in ns (so multiply values from above link by 1e9) from Configurables import EventClockSvc #EventClockSvc( InitialTime = 1274313600000000000 ) # 20th April 2010 #EventClockSvc( InitialTime = 1287968400000000000 ) # 25th Octo 2010 (1am) #EventClockSvc( InitialTime = 1306879200000000000 ) # 1st June 2011 #EventClockSvc( InitialTime = 1317460149000000000 ) # 1st Octo 2011 #EventClockSvc( InitialTime = 1319155200000000000 ) # 21st Octo 2011 #EventClockSvc( InitialTime = 1341100800000000000 ) # 1st July 2012 #EventClockSvc( InitialTime = 1350259200000000000 ) # 15th Octo 2012 #EventClockSvc( InitialTime = 1351123200000000000 ) # 25th Octo 2012 #EventClockSvc( InitialTime = 1351645200000000000 ) # 31st Octo 2012 #EventClockSvc( InitialTime = 1352764800000000000 ) # 13th Nov 2012 #EventClockSvc( InitialTime = 1354233600000000000 ) # 30th Nov 2012 #EventClockSvc( InitialTime = 1355533200000000000 ) # 15th Dec 2012 #EventClockSvc( InitialTime = 1359072000000000000 ) # 25th Jan 2013 EventClockSvc(InitialTime=1433635200000000000) # 7th June 2015 # Timestamps in messages LHCbApp().TimeStamp = True # No output files Brunel().OutputType = "None" # Only tracking and RICH reco # Run1 #Brunel().RecoSequence = ["Decoding","VELO","TT","IT","OT","Tr","Vertex","RICH"] # Run2 Brunel().RecoSequence = [ "Decoding", "VELO", "TT", "IT", "OT", "TrHLT1", "Vertex", "TrHLT2", "RICH" ]