Пример #1
0
 def _setRawEventLocations(self):
     """
     Copied from Moore, check the raw event locations are set correctly
     """
     if (not DecodeRawEvent().isPropertySet("OverrideInputs")
         ) or DecodeRawEvent().getProp("OverrideInputs") is None:
         #default version which comes out of the Pit,
         #currently just DAQ/RawEvent
         DecodeRawEvent().OverrideInputs = "Pit"
Пример #2
0
    def __apply_configuration__(self):
        DecodeRawEvent()

        # just to make sure we don't forget
        if self.getProp("SimplifiedGeom"):
            TrackSys().ExpertTracking += ['simplifiedGeometry']
        TrackSys().ExpertTracking += ['kalmanSmoother']

        # Set up the database. Normally done from LHCbApp
        self.defineDB()

        # Set up transient store, if not yet done. This is normally done from LHCbApp
        EventDataSvc(ForceLeaves=True, RootCLID=1, EnableFaultHandler=True)

        # this is normally done from RecoTracking.py
        if TrackSys().fieldOff():
            from Configurables import MagneticFieldSvc
            MagneticFieldSvc().UseConstantField = True
            MagneticFieldSvc().UseConditions = False
            MagneticFieldSvc().ScaleFactor = 0

        # this is normally done from Brunel
        importOptions("$GAUDIPOOLDBROOT/options/GaudiPoolDbRoot.opts")

        #from Configurables import ApplicationMgr, HistogramPersistencySvc
        #ApplicationMgr().HistogramPersistency = 'ROOT'
        #HistogramPersistencySvc().OutputFile = 'alignhistos.root'

        self.sequencers()
Пример #3
0
    def __apply_configuration__(self):
        #GaudiKernel.ProcessJobOptions.PrintOff()
        
        from GaudiKernel.Configurable import ConfigurableGeneric as RFileCnv
        RFileCnv('RFileCnv').GlobalCompression = "LZMA:6"

        ############## Set other properties ###########
        self._safeSet( LHCbApp(), ['EvtMax','SkipEvents','Simulation', 'DataType' , 'CondDBtag','DDDBtag'] )
        ApplicationMgr().AppName="Tesla, utilising DaVinci"
        #
        if self.getProp('Mode') is "Online":
            self.setProp('WriteFSR',True)
            self._configureForOnline()
        else:
            DecodeRawEvent().DataOnDemand=True
            RecombineRawEvent()
            if self.getProp('Simulation')==True:
                self._unpackMC()
        #
        self._configureOutput()
        #
        from Configurables import EventSelector
        EventSelector().PrintFreq = 1000

        # Add monitors if they are there
        if len(self.getProp('Monitors'))>0:
            self._configureHistos()

        if self.getProp('KillInputTurbo'):
            enk = EventNodeKiller('KillTurbo')
            enk.Nodes = [ "Turbo" ]
            ApplicationMgr().TopAlg.insert( 0,  enk.getFullName() ) 

        ApplicationMgr().TopAlg+=[self.teslaSeq]
Пример #4
0
    def decodingSeq(self, outputLevel=INFO):
        if not allConfigurables.get("DecodingSeq"):
            if outputLevel == VERBOSE:
                print "VERBOSE: Decoding Sequencer not defined! Defining!"
            decodingSequencer = GaudiSequencer("DecodingSeq")
            decodingSequencer.MeasureTime = True

            from DAQSys.Decoders import DecoderDB
            from DAQSys.DecoderClass import decodersForBank
            DecodeRawEvent().DataOnDemand = False
            decs = []
            decs += decodersForBank(DecoderDB, "ODIN")
            decs += decodersForBank(DecoderDB, "Velo")
            decs += decodersForBank(DecoderDB, "IT")
            decs += decodersForBank(DecoderDB, "TT")
            for d in decs:
                d.Properties["OutputLevel"] = outputLevel

            decodingSequencer.Members = [d.setup() for d in decs]

            ## ST Decoding
            from Configurables import (STOfflinePosition)

            itClusterPosition = STOfflinePosition("ITClusterPosition",
                                                  OutputLevel=outputLevel)
            itClusterPosition.ErrorVec = [0.22, 0.11, 0.24, 0.20]

            ## Muons (not yet)

            return decodingSequencer
        else:
            if outputLevel == VERBOSE:
                print "VERBOSE: Decoding Sequencer already defined!"
            return allConfigurables.get("DecodingSeq")
Пример #5
0
    def _setRawEventLocations(self):
        """
        Check that I can set DecodeRawEvent.py options correctly.
        """
        #if not set, I will override the inputs with the "Pit locations"
        if (not DecodeRawEvent().isPropertySet("OverrideInputs")
            ) or DecodeRawEvent().getProp("OverrideInputs") is None:
            #default version which comes out of the Pit,
            #currently just DAQ/RawEvent
            DecodeRawEvent().OverrideInputs = "Pit"
            return
        from RawEventCompat.Configuration import _checkv
        from Configurables import RawEventFormatConf
        RawEventFormatConf().loadIfRequired()
        #if set explicitly to the pit locations, all is good already
        if _checkv(
                DecodeRawEvent().getProp("OverrideInputs")) == _checkv("Pit"):
            return
        #else it's a screw-up
        if self.getProp("RunOnline"):
            raise ValueError(
                "When running in Online Mode, you'd better not try and reset the RawEventLocations, this is baaaad"
            )
        if self.getProp("UseTCK"):
            raise ValueError(
                "You are trying to reset RawEvent inputs/outputs when running from a TCK, this won't work because they are fixed inside the TCK anyway! (they're part of the Hlt sequence), try adding RecombineRawEvent() or RawEventJuggler() instead, or some other such trick."
            )
        if self.getProp("generateConfig"):
            raise ValueError(
                "When generating a TCK, you'd better not be trying to overwrite the RawEvent input/output locations, this would be a disaster!, try adding RecombineRawEvent() or RawEventJuggler() instead or some other such trick."
            )
        # if input is MDF, RAW, DIGI, XDIGI, then raise an error,
        # these locations are always "DAQ/RawEvent"
        files = self.getProp('inputFiles')
        files = files + EventSelector().Input

        ext = files[0].split('.')[-1].strip().split('?')[0].strip().upper()
        if ext in ["MDF", "RAW", "DIGI", "XDIGI"]:
            raise ValueError(
                "When running from a DIGI, XDIGI or RAW file, the only raw event location is DAQ/RawEvent, but you're resetting it into"
                + DecodeRawEvent().getProp("OverrideInputs"))
Пример #6
0
def configureEventTime():
    """
    Configure EventClockSvc to get event time from RecHeader first
    and then from ODIN in case of failure.
    Returns EventClockSvs()
    Author: Marco Clemencic.
    """
    #turn off setting that's done in DecodeRawEvent first
    DecodeRawEvent().EvtClockBank = ""
    ecs = EventClockSvc()
    ecs.addTool(TimeDecoderList, "EventTimeDecoder")
    tdl = ecs.EventTimeDecoder
    tdl.addTool(RecEventTime)
    tdl.addTool(OdinTimeDecoder)
    tdl.Decoders = [tdl.RecEventTime, tdl.OdinTimeDecoder]
    return ecs
Пример #7
0
    def __apply_configuration__(self):

        #print "WAAAAAAAAAAAAAHHHHHHHHHHHHHHHH"

        ############## Set other properties ###########
        self._safeSet(LHCbApp(), [
            'EvtMax', 'SkipEvents', 'Simulation', 'DataType', 'CondDBtag',
            'DDDBtag'
        ])

        ApplicationMgr().AppName = "L0App within Moore"

        ############## The raw event ##################

        from Configurables import DataOnDemandSvc
        dod = DataOnDemandSvc()
        if dod not in ApplicationMgr().ExtSvc:
            ApplicationMgr().ExtSvc.append(dod)

        DecodeRawEvent().DataOnDemand = True

        importOptions("$L0TCK/L0DUConfig.opts")

        self._setRawEventLocations()

        ############## The L0 Part  ###################

        from DAQSys.Decoders import DecoderDB
        l0du = DecoderDB["L0DUFromRawTool"]
        l0du.Properties["StatusOnTES"] = False

        #configure L0 Sequence
        l0seq = GaudiSequencer("L0")
        ApplicationMgr().TopAlg += [l0seq]
        L0TCK = '0x%s' % self.getProp('TCK')[-4:]

        L0Conf().setProp("TCK", L0TCK)
        L0Conf().setProp("L0Sequencer", l0seq)
        self.setOtherProps(L0Conf(), ["DataType"])

        if (self.getProp("ReplaceL0Banks")):
            L0Conf().setProp("ReplaceL0BanksWithEmulated", True)
        else:
            L0Conf().setProp("SimulateL0", True)

        #done, that was quite easy, now for the output files
        self._configureOutput()
Пример #8
0
    def configureInput(self, inputType):
        """
        Tune initialisation according to input type
        """

        # POOL Persistency, now in LHCbApp
        #importOptions("$GAUDIPOOLDBROOT/options/GaudiPoolDbRoot.opts")

        # By default, Escher only needs to open one input file at a time
        # Only set to zero if not previously set to something else.
        if not IODataManager().isPropertySet("AgeLimit"):
            IODataManager().AgeLimit = 0

        if inputType in ["XDST", "DST", "RDST", "ETC"]:
            # Kill knowledge of any previous Brunel processing
            from Configurables import (TESCheck, EventNodeKiller)
            InitReprocSeq = GaudiSequencer("InitReprocSeq")
            if (self.getProp("WithMC") and inputType in ["XDST", "DST"]):
                # Load linkers, to kill them (avoid appending to them later)
                InitReprocSeq.Members.append("TESCheck")
                TESCheck().Inputs = ["Link/Rec/Track/Best"]
            InitReprocSeq.Members.append("EventNodeKiller")
            EventNodeKiller().Nodes = ["pRec", "Rec", "Raw", "Link/Rec"]

        if inputType == "ETC":
            raise DeprecationWarning, "ETC are no longer supported by LHCb software"
            from Configurables import TagCollectionSvc
            ApplicationMgr().ExtSvc += [TagCollectionSvc("EvtTupleSvc")]
            # Read ETC selection results into TES for writing to DST
            IODataManager().AgeLimit += 1

        #if inputType in [ "MDF", "RDST", "ETC" ]:
        #    # In case raw data resides in MDF file
        #    EventPersistencySvc().CnvServices.append("LHCb::RawDataCnvSvc")
        DecodeRawEvent()

        if self.getProp("UseFileStager"):
            import os, getpass
            from FileStager.Configuration import configureFileStager
            from Configurables import FileStagerSvc
            configureFileStager(keep=True)
            targetdir = '/tmp/' + getpass.getuser() + '/stagedfiles'
            if os.path.isdir('/pool/spool/'):
                targetdir = '/pool/spool/' + getpass.getuser() + '/stagedfiles'
            if not os.path.isdir(targetdir):
                os.makedirs(targetdir)
            FileStagerSvc().Tempdir = targetdir
Пример #9
0
def execute():
    RecombineRawEvent()
    DecodeRawEvent().DataOnDemand = True

    from Configurables import ConfigTarFileAccessSvc
    ConfigTarFileAccessSvc().File = "config.tar"

    #Tesla().OutputLevel = DEBUG
    Tesla().TriggerLine = "Hlt2DiMuonJPsi"
    Tesla().ReportVersion = 2
    #Tesla().EvtMax = -1

    #from GaudiConf.IOHelper import IOHelper
    #ioh = IOHelper()
    #ioh.setupServices()
    #ioh.inputFiles(["EarlyEvents-Extended-L0-Turbo.xdst"])
    Tesla().outputFile = "EarlyEvents-Extended-L0-Turbo.xdst"
Пример #10
0
from Configurables import Tesla
from Gaudi.Configuration import *

from Configurables import RecombineRawEvent, DecodeRawEvent
RecombineRawEvent()
DecodeRawEvent().DataOnDemand = True

from Configurables import ConfigTarFileAccessSvc
ConfigTarFileAccessSvc(
).File = '/afs/cern.ch/work/s/sbenson/public/forTeslaExtendedReps/config.tar'

Tesla().TriggerLine = "Hlt2DiMuonJPsi"
Tesla().ReportVersion = 2
Tesla().EvtMax = -1

from GaudiConf.IOHelper import IOHelper
ioh = IOHelper()
ioh.setupServices()
ioh.inputFiles(["/tmp/ikomarov/With_new_HLT.dst"])
Tesla().outputFile = "/tmp/ikomarov/Turbo.dst"
Пример #11
0
from Gaudi.Configuration import *
from Configurables import DecodeRawEvent, LHCbApp
from Configurables import GaudiSequencer
from Configurables import EventClockSvc

LHCbApp()
#EventClockSvc(EventTimeDecoder = "OdinTimeDecoder")

mySeq = GaudiSequencer("Decoding")
mySeq.OutputLevel = VERBOSE
DecodeRawEvent().Sequencer = mySeq
ApplicationMgr().TopAlg = [mySeq]

#deactivate Upgrade banks

from DAQSys.Decoders import DecoderDB
[
    DecoderDB[k].deactivate() for k in DecoderDB if (DecoderDB[k].Active and (
        ('FT' in k) or ('UT' in k) or ('VL' in k) or ('VP' in k)))
]

#configure L0TCKs
importOptions('$L0TCK/L0DUConfig.opts')
Пример #12
0
from Gaudi.Configuration import *
from Configurables import DecodeRawEvent

from DAQSys.Decoders import DecoderDB

for i,v in DecoderDB.iteritems():
    v.Properties["OutputLevel"]=VERBOSE
    if "Hlt" in i and "ReportsDecoder" in i:
        v.Active=False
        #v.Inputs={"InputRawEventLocation":"DAQ/RawEvent"}
    for b in ["UT","FT","FTCluster","VP","VL"]:
        if b in v.Banks:
            v.Active=False

DecoderDB["MuonRec"].Active=False

DecodeRawEvent().OverrideInputs=999
from Configurables import GaudiSequencer
#DecodeRawEvent().Sequencer=GaudiSequencer("SPAM")

for i,v in DecoderDB.iteritems():
    if v.Active:
       GaudiSequencer("SPAM").Members.append(v.setup())

from Configurables import StoreExplorerAlg

StoreExplorerAlg().Load=True

ApplicationMgr().TopAlg=[GaudiSequencer("SPAM"),StoreExplorerAlg()]
Пример #13
0
    def dataOnDemand(self):
        """
        dataOnDemand service
        """
        dataOnDemand = DataOnDemandSvc()

        dataOnDemand.NodeMap['/Event/Rec'] = 'DataObject'
        dataOnDemand.NodeMap['/Event/Rec/Muon'] = 'DataObject'
        dataOnDemand.NodeMap['/Event/Rec/Rich'] = 'DataObject'
        dataOnDemand.NodeMap['/Event/Phys'] = 'DataObject'
        dataOnDemand.NodeMap['/Event/Relations/Phys'] = 'DataObject'

        # raw event
        DecodeRawEvent().DataOnDemand = True
        #require L0 just in case I need to do L0 decoding
        importOptions("$L0TCK/L0DUConfig.opts")

        # ANN PID recalibration
        if self.getProp("AllowPIDRecalib"):

            from Gaudi.Configuration import appendPostConfigAction

            datatype = self.getProp("DataType")

            # Default settings for calibration is off
            recoRegex = "v43r2(.*)"

            # If Run I data, perform Reco14 recalibration on the fly
            if (datatype == '2009' or datatype == '2010' or datatype == '2011'
                    or datatype == '2012' or datatype == '2013'
                    or datatype == '2015'):

                def _ANNPIDReCalib_():

                    from Configurables import (DstConf, DataOnDemandSvc,
                                               ChargedProtoANNPIDConf,
                                               ChargedProtoParticleMapper,
                                               ApplicationVersionFilter)

                    # Sequence to fill
                    annPIDSeq = GaudiSequencer("ANNPIDSeq")

                    # Only rerun on Reco14 (Run1) and Reco15 (2015) samples
                    recoRegex = "(v43r2|v47r9|v48r2)(.*)"
                    annPIDSeq.Members += [
                        ApplicationVersionFilter(
                            name="ANNPIDRecoVersionFilter",
                            HeaderLocation="Rec/Header",
                            VersionRegex=recoRegex)
                    ]

                    # ANN PID Configurable
                    annPIDConf = ChargedProtoANNPIDConf("ReDoANNPID")

                    # Configure Configurable for recalibration of the DST charged protos
                    annPIDConf.DataType = self.getProp("DataType")
                    annPIDConf.RecoSequencer = annPIDSeq
                    annPIDConf.applyConf()

                    # Update the DoD sequence to run this at the end
                    chargedLoc = "/Event/Rec/ProtoP/Charged"
                    if chargedLoc in DataOnDemandSvc().AlgMap.keys():
                        chargedSeq = DataOnDemandSvc().AlgMap[chargedLoc]
                        chargedSeq.Members += [annPIDSeq]

                    # Now for uDSTs. Update the DoD mappers to run a custom one
                    # for charged Protos, and includes the recalibration
                    cppmapper = ChargedProtoParticleMapper(
                        "UnpackChargedPPsMapper")
                    # Clone the settings from the DST configurable
                    cppmapper.ANNPIDTune = annPIDConf.tune(annPIDConf.DataType)
                    cppmapper.TrackTypes = annPIDConf.TrackTypes
                    cppmapper.PIDTypes = annPIDConf.PIDTypes
                    # Again, only rerun the ANNPID on Reco14 data
                    cppmapper.VersionRegex = recoRegex
                    # Update the DoD mapper lists
                    DataOnDemandSvc().NodeMappingTools = [
                        cppmapper
                    ] + DataOnDemandSvc().NodeMappingTools
                    DataOnDemandSvc().AlgMappingTools = [
                        cppmapper
                    ] + DataOnDemandSvc().AlgMappingTools

                # Append post config action
                appendPostConfigAction(_ANNPIDReCalib_)
Пример #14
0
    def _configureForOnline(self):
        #
        DecodeRawEvent().DataOnDemand=False
        writer=InputCopyStream( self.writerName )
        DstConf().setProp("SplitRawEventOutput", self.getProp("RawFormatVersion"))
        
        # Use RawEventJuggler to create the Turbo stream raw event format
        tck = "0x409f0045" # DUMMY
        TurboBanksSeq=GaudiSequencer("TurboBanksSeq")
        RawEventJuggler().TCK=tck
        RawEventJuggler().Input="Moore"
        RawEventJuggler().Output=self.getProp("RawFormatVersion")
        RawEventJuggler().Sequencer=TurboBanksSeq
        RawEventJuggler().WriterOptItemList=writer
        RawEventJuggler().KillExtraNodes=True
        RawEventJuggler().KillExtraBanks=True
        RawEventJuggler().KillExtraDirectories = True
        self.teslaSeq.Members += [TurboBanksSeq]
        
        # Begin Lumi configuration
        lumiSeq = GaudiSequencer("LumiSeq")
        #
        # Add ODIN decoder to LumiSeq ***
        from DAQSys.Decoders import DecoderDB
        CreateODIN=DecoderDB["createODIN"].setup()
        #********************************
        #
        # Main algorithm config
        lumiCounters = GaudiSequencer("LumiCounters")
        lumiCounters.Members+=[CreateODIN]
        lumiSeq.Members += [ lumiCounters ]
        LumiAlgsConf().LumiSequencer = lumiCounters
        LumiAlgsConf().OutputLevel = self.getProp('OutputLevel')
        LumiAlgsConf().InputType = "MDF"
        #
        # Filter out Lumi only triggers from further processing, but still write to output
        # Trigger masks changed in 2016, see LHCBPS-1486
        physFilterRequireMask = []
        lumiFilterRequireMask = []
        if self.getProp( "DataType" ) in ["2012","2015"]: # 2012 needed for nightlies tests.
            physFilterRequireMask = [ 0x0, 0x4, 0x0 ]
            lumiFilterRequireMask = [ 0x0, 0x2, 0x0 ]
        else:
            physFilterRequireMask = [ 0x0, 0x0, 0x80000000 ]
            lumiFilterRequireMask = [ 0x0, 0x0, 0x40000000 ]
        from Configurables import HltRoutingBitsFilter
        physFilter = HltRoutingBitsFilter( "PhysFilter", RequireMask = physFilterRequireMask )
        lumiFilter = HltRoutingBitsFilter( "LumiFilter", RequireMask = lumiFilterRequireMask )

        lumiSeq.Members += [ lumiFilter, physFilter ]
        lumiSeq.ModeOR = True
        #
        from Configurables import RecordStream
        FSRWriter = RecordStream( "FSROutputStreamDstWriter")
        FSRWriter.OutputLevel = INFO
        #
        # Sequence to be executed if physics sequence not called (nano events)
        notPhysSeq = GaudiSequencer("NotPhysicsSeq")
        notPhysSeq.ModeOR = True
        notPhysSeq.Members = [ physFilter ]
        writer.AcceptAlgs += ["LumiSeq","NotPhysicsSeq"]
        self.teslaSeq.Members += [lumiSeq, notPhysSeq]
Пример #15
0
    def configureOutput(self, dstType, withMC, handleLumi):
        """
        Set up output stream
        """

        # Merge genFSRs
        if self.getProp("WriteFSR"):
            if self.getProp("MergeGenFSR"):
                GaudiSequencer("OutputDSTSeq").Members += ["GenFSRMerge"]
                                                                                                                                              
        if dstType in [ "XDST", "DST", "LDST", "RDST" ]:
            writerName = "DstWriter"
            packType  = self.getProp( "PackType" )

            # event output
            dstWriter = OutputStream( writerName )
            dstWriter.AcceptAlgs += ["Reco"] # Write only if Rec phase completed
            if handleLumi and self.getProp( "WriteLumi" ):
                dstWriter.AcceptAlgs += ["LumiSeq"] # Write also if Lumi sequence completed
            # set verbosity
            if self.getProp( "ProductionMode" ):
                if not dstWriter.isPropertySet( "OutputLevel" ):
                    dstWriter.OutputLevel = INFO
                if self.getProp("WriteFSR"):
                    FSRWriter = RecordStream( "FSROutputStreamDstWriter")
                    if not FSRWriter.isPropertySet( "OutputLevel" ):
                        FSRWriter.OutputLevel = INFO
            # Suppress spurious error when reading POOL files without run records
            if self.getProp("WriteFSR"):
                if self.getProp( "InputType" ).upper() not in [ "MDF" ]:
                    from Configurables import FileRecordDataSvc
                    FileRecordDataSvc().OutputLevel = FATAL

            if dstType == "XDST":
                # Allow multiple files open at once (SIM,DST,DIGI etc.)
                IODataManager().AgeLimit += 1

            if dstType in ["DST","XDST","LDST"] and packType not in ["MDF"]:
                jseq=GaudiSequencer("RawEventSplitSeq")
                #################################
                # Split the Raw Event for the DST
                # Use the RawEventJuggler.
                # Not delegated to DSTConf.
                # Some information must be shared with DSTConf
                #################################
                juggler=RawEventJuggler()
                juggler.Sequencer=jseq
                dstseq=GaudiSequencer("OutputDSTSeq")
                dstseq.Members.append(jseq)
                # Set the output version if not already overwritten
                if juggler.isPropertySet("Output") and juggler.getProp("Output") is not None:
                    #it's already set
                    pass
                else:
                    juggler.setProp("Output",self.getProp("SplitRawEventOutput"))
                #set the input version, could come from several places
                if self.isPropertySet("SplitRawEventInput") and self.getProp("SplitRawEventInput") is not None:
                    #if set, take it from Brunel()
                    juggler.setProp("Input",self.getProp("SplitRawEventInput"))
                    #otherwise use the setting of the juggler if it is set
                elif juggler.isPropertySet("Input") and juggler.getProp("Input") is not None:
                    pass
                    #else find it from DecodeRawEvent
                elif DecodeRawEvent().isPropertySet("OverrideInputs") and DecodeRawEvent().getProp("OverrideInputs") is not None:
                    juggler.setProp("Input",DecodeRawEvent().getProp("OverrideInputs"))
                    #else if I'm input with a DST, assume it is a Stripping20 type
                elif self._isReprocessing(self.getProp("InputType")):
                    juggler.setProp("Input",2.0)
                else:
                    #or set the default to whatever comes out of Moore by default
                    juggler.setProp("Input","Moore")
                
                #share information from the Juggler with DSTConf
                #always write out to where the Juggler asked!    
                DstConf().setProp("SplitRawEventOutput", juggler.getProp("Output"))
                #or else the default in the juggler is used, should be 0.0
                #TODO, handle the turned off Calo, shouldn't actually be a problem...
                from RawEventCompat.Configuration import _checkv
                from Configurables import RawEventFormatConf
                RawEventFormatConf().loadIfRequired()
                if juggler.getProp("Input")!=juggler.getProp("Output"):
                    if (juggler.getProp("Input") is None or juggler.getProp("Output")) is None or (_checkv(juggler.getProp("Input")))!=(_checkv(juggler.getProp("Output"))):
                        juggler.KillExtraBanks=True
                        juggler.KillExtraNodes=True
                        #really kill /Event/DAQ to prevent it re-appearing!
                        juggler.KillExtraDirectories=True
            
            from Configurables import TrackToDST

            # Filter Best Track States to be written
            trackFilter = TrackToDST("FilterBestTrackStates")
            from Configurables import ProcessPhase
            ProcessPhase("Output").DetectorList += [ "DST" ]
            GaudiSequencer("OutputDSTSeq").Members += [ trackFilter ]
            
            ### For Run 2, filter the fitted Velo tracks for PV
            if( self.getProp("DataType") in self.Run2DataTypes ):
                fittedVeloTracksFilter = TrackToDST("FilterFittedVeloTrackStates", veloStates = ["ClosestToBeam"])
                fittedVeloTracksFilter.TracksInContainer = "Rec/Track/FittedHLT1VeloTracks"
                GaudiSequencer("OutputDSTSeq").Members += [ fittedVeloTracksFilter ]
           
            if "Muon" in self.getProp("Detectors"):
                # Filter Muon Track States            
                muonTrackFilter = TrackToDST("FilterMuonTrackStates")
                muonTrackFilter.TracksInContainer = "/Event/Rec/Track/Muon"
                GaudiSequencer("OutputDSTSeq").Members += [ muonTrackFilter ]


            if packType != "NONE":
                # Add the sequence to pack the DST containers
                packSeq = GaudiSequencer("PackDST")
                DstConf().PackSequencer = packSeq
                DstConf().AlwaysCreate  = True
                GaudiSequencer("OutputDSTSeq").Members += [ packSeq ]
                # Run the packers also on Lumi only events to write empty containers
                if handleLumi and self.getProp( "WriteLumi" ):
                    notPhysSeq = GaudiSequencer("NotPhysicsSeq")
                    notPhysSeq.Members += [ packSeq ]

            # Define the file content
            DstConf().Writer     = writerName
            DstConf().DstType    = dstType
            DstConf().PackType   = packType
            #In case we didn't juggle the raw event,
            #We should write out the same as the input type!
            if not DstConf().isPropertySet("SplitRawEventOutput"):
                if self.isPropertySet("SplitRawEventInput") and self.getProp("SplitRawEventInput") is not None:
                    DstConf().setProp("SplitRawEventOutput",self.getProp("SplitRawEventInput"))
                elif DecodeRawEvent().isPropertySet("OverrideInputs") and DecodeRawEvent().getProp("OverrideInputs") is not None:
                    DstConf().setProp("SplitRawEventOutput",DecodeRawEvent().getProp("SplitRawEventInput"))
            
            if withMC:
                DstConf().SimType = "Full"
            elif self.getProp("DigiType").capitalize() == "Minimal":
                from Configurables import PackMCVertex
                GaudiSequencer("OutputDSTSeq").Members += [PackMCVertex()]
                DstConf().SimType = "Minimal"
            DstConf().OutputName = self.outputName()
            self.setOtherProps(DstConf(),["DataType","WriteFSR"])
Пример #16
0
            break
    return n


# Configure all the unpacking, algorithms, tags and input files
appConf = ApplicationMgr()
appConf.ExtSvc += ['ToolSvc', 'DataOnDemandSvc', LoKiSvc()]

dv = DaVinci()
dv.DataType = "2012"

# disable for older versions of DV
# generally it seems in older versions of DV
# this whole script 'breaks' at places
# raising exceptions and yet works ...
dre = DecodeRawEvent()
dre.DataOnDemand = True

lhcbApp = LHCbApp()
lhcbApp.Simulation = True
CondDB().Upgrade = False
# don't really need tags for looking around
# LHCbApp().DDDBtag = ...
# LHCbApp().CondDBtag  = ...

# Pass file to open as first command line argument
inputFiles = [sys.argv[-1]]
IOHelper('ROOT').inputFiles(inputFiles)

# Configure two instances of the TriggerTisTos tool to pick things up from the
# split Hlt1 and Hlt2 locations.
Пример #17
0
from DAQSys.Decoders import DecoderDB as ddb
from DAQSys.DecoderClass import decodersForBank
from Configurables import GaudiSequencer, DecodeRawEvent, DataOnDemandSvc

mySeq = GaudiSequencer("DecodeTest")
mySeq.Members += [d.setup() for d in decodersForBank(ddb, "Velo")]

newDec = ddb["OTTimeCreator"].clone("OTTimeCreator/Ot2")
newDec.Properties["OutputLevel"] = 42
newDec.overrideInputs("Other/RawEvent")
newDec.overrideOutputs("Other/OTTimes")

DecodeRawEvent().OverrideInputs = "Strip20"
DecodeRawEvent().DataOnDemand = True

DecodeRawEvent().__apply_configuration__()

if "Other/OTTimes" not in DataOnDemandSvc().AlgMap:
    raise KeyError("Other/OTTimes not correctly added to DataOnDemand")

print "Pass"
Пример #18
0
    def defineOptions(self):

        # Kept for Dirac backward compatibility
        if self.getProp( "NoWarnings" ) :
            log.warning("Brunel().NoWarnings=True property is obsolete and maintained for Dirac compatibility. Please use Brunel().ProductionMode=True instead")
            self.setProp( "ProductionMode", True )

        # Special settings for production
        if self.getProp( "ProductionMode" ) :
            if not self.isPropertySet( "OutputLevel" ) :
                self.setProp("OutputLevel", ERROR)
            if not LHCbApp().isPropertySet( "TimeStamp" ) :
                LHCbApp().setProp( "TimeStamp", True )
            if not self.isPropertySet( "PrintFreq" ) :
                self.setProp("PrintFreq", 1000)

        # HC does not exist in Run 1, so use appropriate split output
        if self.getProp( "DataType" ) in self.Run1DataTypes :
            if not self.isPropertySet( "SplitRawEventOutput" ) :
                self.setProp( "SplitRawEventOutput", 4.0 )

        # Online mode
        if self.getProp( "OnlineMode" ) :
            if not self.isPropertySet("Histograms") :
                self.setProp("Histograms","Online")
            if not CondDB().isPropertySet("Online") :
                CondDB().setProp("Online", True)

        inputType = self.getProp( "InputType" ).upper()
        if inputType not in self.KnownInputTypes:
            raise TypeError( "Invalid inputType '%s'"%inputType )

        outputType = self.getProp( "OutputType" ).upper()

        histOpt = self.getProp("Histograms")
        if histOpt not in self.KnownHistograms:
            raise RuntimeError("Unknown Histograms option '%s'"%histOpt)

        withMC = self.getProp("WithMC")
        if withMC:
            if inputType in [ "MDF" ]:
                log.warning( "WithMC = True, but InputType = '%s'. Forcing WithMC = False"%inputType )
                withMC = False # Force it, MDF never contains MC truth
            if outputType in [ "RDST" ]:
                log.warning( "WithMC = True, but OutputType = '%s'. Forcing WithMC = False"%inputType )
                withMC = False # Force it, RDST never contains MC truth

        if self.getProp("WriteFSR") and self.getProp("PackType").upper() in ["MDF"]:
            if hasattr( self, "WriteFSR" ): log.warning("Don't know how to write FSR to MDF output file")
            self.setProp("WriteFSR", False)

        if self.getProp( "MergeGenFSR") and not self.getProp( "Simulation" ):
            if hasattr( self, "MergeGenFSR" ): log.warning("Cannot MergeGenFSR on real data")
            self.setProp( "MergeGenFSR", False )
                
        # Do not look for Hlt errors in data without HltDecReports bank
        if self.getProp( "DataType" ) in [ "2008", "2009" ]:
            self.setProp( "VetoHltErrorEvents", False )
        # For simulation, change the default but allow to override
        if self.getProp( "Simulation" ) and not hasattr( self, "VetoHltErrorEvents" ):
            self.setProp( "VetoHltErrorEvents", False )


        # Flag to handle or not LumiEvents
        handleLumi = inputType in ["MDF"] and not withMC and not self.getProp('UseDBSnapshot')

        # Top level configuration for skipping tracking
        if self.getProp("SkipTracking"):
            if inputType in ["MDF", "DIGI" ]:
                raise RuntimeError( "Cannot skip tracking if tracks not present on the input file" )
            if withMC:
                raise RuntimeError( "SkipTracking not (yet) supported for simulation input" )
            if( self.getProp("DataType") is "Upgrade"):
                raise RuntimeError( "SkipTracking not (yet) supported for Upgrade configurations" )
            ApplicationMgr().ExtSvc += [ "DataOnDemandSvc" ] # to decode the tracks from the DST
            DstConf().setProp("EnableUnpack", ["Tracking"] )

        # veto Hlt Error Events
        vetoHltErrorEvents = self.getProp("VetoHltErrorEvents")

        self.configureSequences( withMC, handleLumi, vetoHltErrorEvents )

        self.configureInit( inputType )

        self.configureInput( inputType )

        self.configureOutput( outputType, withMC, handleLumi )

        if withMC:
            # Create associators for checking and for DST
            from Configurables import ProcessPhase
            ProcessPhase("MCLinks").DetectorList += self.getProp("MCLinksSequence")
            # Unpack Sim data
            GaudiSequencer("MCLinksUnpackSeq").Members += [ "UnpackMCParticle"]

            # particle gun uses MCVertex to fake a reconstructed one
            # unpacking again would lead to crash
            if "pGun" not in self.getProp("SpecialData"):
                GaudiSequencer("MCLinksUnpackSeq").Members += [ "UnpackMCVertex" ]
            else:
                # Cannot run trigger on pGun events...
                L0Conf().EnsureKnownTCK=False
            
            GaudiSequencer("MCLinksTrSeq").Members += [ "TrackAssociator" ]
            GaudiSequencer("MCLinksCaloSeq").Members += [ "CaloDigit2MCLinks2Table", "CaloClusterMCTruth", "CaloHypoMCTruth" ]

            # activate all configured checking (uses MC truth)
            self.configureCheck( histOpt == "Expert" )

            # data on demand needed to pack RichDigitSummary for DST, when reading unpacked DIGI
            # Also needed to unpack MCHit containers when expert checking enabled
            ApplicationMgr().ExtSvc += [ "DataOnDemandSvc" ]

        # ROOT persistency for histograms
        ApplicationMgr().HistogramPersistency = "ROOT"
        from Configurables import RootHistCnv__PersSvc
        RootHistCnv__PersSvc('RootHistCnv').ForceAlphaIds = True

        if histOpt == "None" or histOpt == "":
            # HistogramPersistency still needed to read in CaloPID DLLs.
            # so do not set ApplicationMgr().HistogramPersistency = "NONE"
            return

        # Pass expert checking option to RecSys and RecMoni
        if histOpt == "Expert":
            DstConf().EnablePackingChecks = True

        # Use a default histogram file name if not already set
        if not HistogramPersistencySvc().isPropertySet( "OutputFile" ):
            histosName   = self.getProp("DatasetName")
            if histosName == "": histosName = "Brunel"
            if self.getProp( "RecL0Only" ): histosName += '-L0Yes'
            if (self.evtMax() > 0): histosName += '-' + str(self.evtMax()) + 'ev'
            if histOpt == "Expert": histosName += '-expert'
            histosName += '-histos.root'
            HistogramPersistencySvc().OutputFile = histosName
            
        #reconfigure decoders to point to default location, if required!
        if DecodeRawEvent().isPropertySet("OverrideInputs") and DecodeRawEvent().getProp("OverrideInputs") is not None:
            #do nothing, it's been configured by someone else!
            pass
        elif self.isPropertySet("SplitRawEventInput") and self.getProp("SplitRawEventInput") is not None:
            #print "WAAAAH Overriding RawEvent Locations"
            DecodeRawEvent().setProp("OverrideInputs",self.getProp("SplitRawEventInput"))
        elif inputType in [ "MDF", "DIGI", "XDIGI" ]:
            #set to the default of what comes out of Moore
            DecodeRawEvent().setProp("OverrideInputs","Moore")
        #remember that the default is a long list of locations,
        #starting with places which only exist _after_ brunel has run!

        # Following needed to build RecSummary, even if tracking is skipped.
        if self.getProp("SkipTracking"):
            from TrackSys import RecoTracking
            RecoTracking.DecodeTracking(["FastVelo"])
#from Configurables import GaudiSequencer, FTNtupleMaker, NTupleSvc
#NTupleSvc().Output = ["FILE1 DATAFILE='mytupleFile.root' TYP='ROOT' OPT='NEW'"]
#GaudiSequencer("MoniFTSeq").Members += [FTNtupleMaker()]
### end

s = SimConf()
SimConf().Detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon']
SimConf().EnableUnpack = True
SimConf().EnablePack = False

d = DigiConf()
DigiConf().Detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon']
DigiConf().EnableUnpack = True
DigiConf().EnablePack = False

dre = DecodeRawEvent()
dre.DataOnDemand = True

lhcbApp = LHCbApp()
lhcbApp.Simulation = True

IOHelper('ROOT').inputFiles([cfg.file])

# Configuration done, run time!
appMgr = GP.AppMgr()
evt = appMgr.evtsvc()
det = appMgr.detsvc()
hist = appMgr.histSvc()

hist.dump()
Пример #20
0
from Configurables import DaVinci
from StrippingConf.Configuration import StrippingConf

#
#Raw event juggler to split Other/RawEvent into Velo/RawEvent and Tracker/RawEvent
#
from Configurables import RawEventJuggler

juggler = RawEventJuggler(DataOnDemand=True, Input=0.3, Output=4.2)

#
#Fix for TrackEff lines
#
from Configurables import DecodeRawEvent

DecodeRawEvent().setProp("OverrideInputs", 4.2)

# Specify the name of your configuration
my_wg = 'B2CC'  #FOR LIAISONS

# NOTE: this will work only if you inserted correctly the
# default_config dictionary in the code where your LineBuilder
# is defined.
from StrippingSelections import buildersConf

confs = buildersConf()
from StrippingSelections.Utils import lineBuilder, buildStreams

streams = buildStreams(confs, WGs=my_wg)

leptonicMicroDSTname = 'Leptonic'
Пример #21
0
#DDDBConf(InitialTime = 'now')
privatedb_path = "/afs/cern.ch/user/h/hschindl/public/DDDB_HC/"
DDDBConf(DbRoot=privatedb_path + "lhcb.xml")
from Configurables import LHCbApp
app = LHCbApp()
app.DataType = "2015"
app.CondDBtag = "cond-20150828"

EventSelector().PrintFreq = 100000
EventSelector().Input = listOfFiles

# Set up the sequence of algorithms to be run.
mainSeq = GaudiSequencer("MainSeq")

from Configurables import DecodeRawEvent
DecodeRawEvent()

#from Configurables import createODIN
#mainSeq.Members += [createODIN()]

from Configurables import HltRoutingBitsFilter
physFilter = HltRoutingBitsFilter("PhysFilter")
physFilter.RequireMask = [0x0, 0x4, 0x0]
mainSeq.Members += [physFilter]

from Configurables import HCRawBankDecoder
decoder = HCRawBankDecoder()
from Configurables import GaudiSequencer
mainSeq.Members += [decoder]