def checkForMissingData():

    from Configurables import GaudiSequencer, TESCheck

    from Configurables import DataOnDemandSvc
    dod = DataOnDemandSvc()
    dod.Dump = True

    trigEvTES = "Trigger/RawEvent"
    from Configurables import RawEventSelectiveCopy
    trigRawBankCopy = RawEventSelectiveCopy('CopyTriggerRawEvent')
    trigRawBankCopy.RawBanksToCopy = [
        'ODIN', 'HltSelReports', 'HltDecReports', 'HltRoutingBits',
        'HltVertexReports', 'L0Calo', 'L0CaloFull', 'L0DU', 'L0Muon',
        'L0MuonProcCand', 'L0PU'
    ]
    trigRawBankCopy.OutputRawEventLocation = trigEvTES

    dod.AlgMap[trigEvTES] = trigRawBankCopy

    recSumTES = "Rec/Summary"
    from Configurables import RecSummaryAlg
    summary = RecSummaryAlg("CreateRecSummary")

    dod.AlgMap[recSumTES] = summary
Ejemplo n.º 2
0
    def _gen_postconfig_():
        """
        specific post-config action for (x)GEN-files 
        """
        logger.info('Start post-config action for (x)gen-files')
        from Configurables import DataOnDemandSvc
        dod = DataOnDemandSvc()
        from copy import deepcopy
        algs = deepcopy(dod.AlgMap)
        bad = set()
        for key in algs:
            if 0 <= key.find('Rec'): bad.add(key)
            elif 0 <= key.find('Raw'): bad.add(key)
            elif 0 <= key.find('DAQ'): bad.add(key)
            elif 0 <= key.find('Trigger'): bad.add(key)
            elif 0 <= key.find('Phys'): bad.add(key)
            elif 0 <= key.find('Prev/'): bad.add(key)
            elif 0 <= key.find('Next/'): bad.add(key)
            elif 0 <= key.find('/MC/') and 'GEN' == ext: bad.add(key)

        for b in bad:
            logger.debug('Remove key from DataOnDemand actions %s' % key)
            del algs[b]

        logger.info('Remove %d keys from DataOnDemand actions ' % len(bad))
        dod.AlgMap = algs

        from Configurables import EventClockSvc, CondDB
        EventClockSvc(EventTimeDecoder="FakeEventTime")
        logger.info('Use fake event time decoder for (x)gen-files')
        CondDB(IgnoreHeartBeat=True)
        logger.info('Ignore Heart-beat for (x)gen-files')
Ejemplo n.º 3
0
 def unpackMC(self):
     """
     Do MC unpacking
     """
     DataOnDemandSvc().NodeMap['/Event/MC']   = 'DataObject'
     DataOnDemandSvc().AlgMap["MC/Particles"] = "UnpackMCParticle"
     DataOnDemandSvc().AlgMap["MC/Vertices"]  = "UnpackMCVertex"
Ejemplo n.º 4
0
def configureL0FromRawBank(trunk):
    """
    Build L0 Muon, Calo candidates and L0DUReport from raw banks on-demand. Puts data in trunk + <standard locations>
    """

    from Configurables import L0Conf
    L0Conf().FullL0MuonDecoding = True
    L0Conf().EnableL0DecodingOnDemand = True

    from Configurables import (L0MuonCandidatesFromRaw,
                               L0CaloCandidatesFromRaw, L0DUFromRawAlg)

    locationRoot = fixTrunk(trunk)
    name = trunkName(trunk)

    DataOnDemandSvc().AlgMap[locationRoot +
                             "Trig/L0/MuonBCSU"] = L0MuonCandidatesFromRaw(
                                 name="L0MuonFromRaw" + name,
                                 RootInTES=locationRoot)
    DataOnDemandSvc().AlgMap[locationRoot +
                             "Trig/L0/FullCalo"] = L0CaloCandidatesFromRaw(
                                 name="L0CaloFromRaw" + name,
                                 RootInTES=locationRoot)

    DataOnDemandSvc().AlgMap[locationRoot +
                             "Trig/L0/L0DUReport"] = L0DUFromRawAlg(
                                 name="L0DUFromRaw" + name,
                                 RootInTES=locationRoot)
Ejemplo n.º 5
0
    def configureMC(self):
        """
        Define DaVinciAssociators. Do MC unpacking.
        """
        from Configurables import CaloAssociatorsConf
        
        CaloAssociatorsConf ( EnableMCOnDemand = True, OutputLevel = self.getProp("OutputLevel") )

        from CaloKernel.ConfUtils import getAlgo
        from Configurables        import NeutralPP2MC

        ## offline neutral protoparticles 
        alg1 = getAlgo (
            NeutralPP2MC                    , ## type 
            'NeutralPP2MC'                  , ## base-name 
            'Offline'                       , ## context 
            'Relations/Rec/ProtoP/Neutrals' , ## base-location
            True                            ) ## on-demand 

        ## hlt neutral protoparticles 
        alg2 = getAlgo (
            NeutralPP2MC                    , ## type 
            'NeutralPP2MC'                  , ## base-name 
            'Hlt'                           , ## context 
            'Relations/Rec/ProtoP/Neutrals' , ## base-location 
            True                            ) ## on-demand
        
        from Configurables import DataOnDemandSvc
        DataOnDemandSvc().AlgMap['/Event/Relations/Rec/ProtoP/Charged' ]  = 'ChargedPP2MC' 
        DataOnDemandSvc().AlgMap['/Event/Relations/Rec/ProtoP/Upstream' ] = 'ChargedPP2MC' 

        self.unpackMC()
Ejemplo n.º 6
0
def totalSilence ( lst = [ 'RootCnvSvc'               ,
                           'IOManagerSvc'             ,
                           'RootHistSvc'              ,
                           'LHCb::RawDataCnvSvc'      ,
                           'HcalDet.Quality'          ,
                           'EcalDet.Quality'          ,
                           'MagneticFieldSvc'         ,
                           'PropertyConfigSvc'        ,
                           'ToolSvc.L0DUConfig'       ,
                           'ToolSvc.L0CondDBProvider' , 
                           'L0MuonFromRaw'            ,
                           'IntegrateBeamCrossing'    ] , dod = True ) :
    
    from Configurables import MessageSvc, DataOnDemandSvc, ToolSvc 
    from Configurables import Gaudi__RootCnvSvc    as RootCnvSvc 
    from Configurables import Gaudi__IODataManager as IODataManager
    from Configurables import LHCb__RawDataCnvSvc  as RawDataCnvSvc 
    
    msg = MessageSvc()
    msg.OutputLevel = 5
    
    ToolSvc           (                                  OutputLevel = 5 )
    RootCnvSvc        ( "RootCnvSvc"                   , OutputLevel = 6 )
    RawDataCnvSvc     (                                  OutputLevel = 6 )
    
    IODataManager     ( 'IODataManager'                , OutputLevel = 6 ,
                        AgeLimit = 1 , UseGFAL = False )
    
    if dod :
        DataOnDemandSvc   ( Dump = True  )
    else :
        DataOnDemandSvc   ( Dump = False , OutputLevel = 6 )
        msg.setError += [ 'DataOnDemandSvc' ] 
        
    msg.setError += lst
Ejemplo n.º 7
0
def doIt():
    """
    specific post-config action for (x)GEN-files 
    """
    extension = "xgen"
    ext = extension.upper()

    from Configurables import DataOnDemandSvc
    dod  = DataOnDemandSvc ()
    from copy import deepcopy 
    algs = deepcopy ( dod.AlgMap ) 
    bad  = set() 
    for key in algs :
        if     0 <= key.find ( 'Rec'     )                  : bad.add ( key )
        elif   0 <= key.find ( 'Raw'     )                  : bad.add ( key )
        elif   0 <= key.find ( 'DAQ'     )                  : bad.add ( key )
        elif   0 <= key.find ( 'Trigger' )                  : bad.add ( key )
        elif   0 <= key.find ( 'Phys'    )                  : bad.add ( key )
        elif   0 <= key.find ( 'Prev/'   )                  : bad.add ( key )
        elif   0 <= key.find ( 'Next/'   )                  : bad.add ( key )
        elif   0 <= key.find ( '/MC/'    ) and 'GEN' == ext : bad.add ( key )
    for b in bad :
        del algs[b]

    dod.AlgMap = algs

    from Configurables import EventClockSvc, CondDB 
    EventClockSvc ( EventTimeDecoder = "FakeEventTime" )
    CondDB  ( IgnoreHeartBeat = True )
Ejemplo n.º 8
0
def _dod(myCombiner):
    "Add an arbitrary alg to the DOD svc"
    from Configurables import DataOnDemandSvc
    ApplicationMgr().ExtSvc = ["DataOnDemandSvc"] + [
        svc for svc in ApplicationMgr().ExtSvc if svc is not "DataOnDemandSvc"
    ]
    DataOnDemandSvc().AlgMap["DAQ/RawEvent"] = myCombiner
    DataOnDemandSvc().NodeMap["DAQ"] = "DataObject"
Ejemplo n.º 9
0
 def _decReports(self):
     """
     Make L0 reports look just like HLT reports
     """
     from Configurables import L0SelReportsMaker, L0DecReportsMaker
     DataOnDemandSvc().AlgMap["HltLikeL0/DecReports"] = L0DecReportsMaker(
         OutputLevel=4)
     DataOnDemandSvc().AlgMap["HltLikeL0/SelReports"] = L0SelReportsMaker(
         OutputLevel=4)
     from Configurables import L0Conf
     L0Conf().FullL0MuonDecoding = True
     L0Conf().EnableL0DecodingOnDemand = True
Ejemplo n.º 10
0
    def MakeVeloParticles(self, name, particle, protoParticlesMaker):

        particleMaker = NoPIDsParticleMaker("For%sParticleMaker%s" %
                                            (self.name, name),
                                            Particle=particle)
        particleMaker.Input = self.VeloProtoOutputLocation

        DataOnDemandSvc().AlgMap.update({
            "/Event/Phys/" + particleMaker.name() + '/Particles':
            particleMaker.getFullName(),
            "/Event/Phys/" + particleMaker.name() + '/Vertices':
            particleMaker.getFullName()
        })

        AllVeloParticles = Selection("For%sSelAllVeloParts%s" %
                                     (self.name, name),
                                     Algorithm=particleMaker,
                                     RequiredSelections=[protoParticlesMaker],
                                     InputDataSetter=None)

        ### filter on the IP of the velo tracks
        return Selection(
            "For%sSelVeloParts%s" % (self.name, name),
            Algorithm=FilterDesktop(
                name + "For%sFilterVeloTrackIP%s" % (self.name, name),
                Code="(MIPDV(PRIMARY) > %(VeloMINIP)s)" % self.__confdict__),
            RequiredSelections=[AllVeloParticles])
Ejemplo n.º 11
0
 def _configureDataOnDemand(self) :
     if 'DataOnDemandSvc' in ApplicationMgr().ExtSvc : 
         ApplicationMgr().ExtSvc.pop('DataOnDemandSvc')
     else: 
         from Configurables import DataOnDemandSvc
         dod = DataOnDemandSvc()
     if dod not in ApplicationMgr().ExtSvc :
         ApplicationMgr().ExtSvc.append( dod ) 
Ejemplo n.º 12
0
    def _makeUnpacker(self, type, name, slot, tesLoc):

        unp = type(name + slot)
        if slot != '': unp.RootInTES = slot
        if self.isPropertySet("DataUnpackingSeq"):
            self.getProp("DataUnpackingSeq").Members += [unp]
        else:
            output = self.dodLocation(slot, 'MC', tesLoc)
            DataOnDemandSvc().AlgMap[output] = unp
Ejemplo n.º 13
0
def configureL0ReportDecoding(trunk):
    """
    Create L0DecReports and L0SelReports on-demand. Places the reports in trunk + HltLikeL0/DecReports and trunk + HltLikeL0/SelReports respectively.
    """

    from Configurables import L0DecReportsMaker, L0SelReportsMaker

    locationRoot = fixTrunk(trunk)
    name = trunkName(trunk)

    DataOnDemandSvc().AlgMap[locationRoot +
                             "HltLikeL0/DecReports"] = L0DecReportsMaker(
                                 name="L0DecReportsMaker" + name,
                                 RootInTES=locationRoot)
    DataOnDemandSvc().AlgMap[locationRoot +
                             "HltLikeL0/SelReports"] = L0SelReportsMaker(
                                 name="L0SelReportsMaker" + name,
                                 RootInTES=locationRoot)
Ejemplo n.º 14
0
def makeAllParticles(name, trackcont, particle):
    particleMaker = NoPIDsParticleMaker(name, Particle=particle)
    particleMaker.Input = "Rec/ProtoP/" + trackcont + "ProtoPMaker"
    DataOnDemandSvc().AlgMap.update({
        "/Event/Phys/" + particleMaker.name() + '/Particles':
        particleMaker.getFullName(),
        "/Event/Phys/" + particleMaker.name() + '/Vertices':
        particleMaker.getFullName()
    })
Ejemplo n.º 15
0
def updateDoD(alg, hat='Phys/'):
    """
    Update Data-On-Demand service
    """
    _parts = {hat + alg.name() + '/Particles': alg}
    _particles.update(_parts)

    dod = DataOnDemandSvc()
    dod.AlgMap.update({hat + alg.name() + '/Particles': alg.getFullName()})
    return _parts
Ejemplo n.º 16
0
def onDemand(location, alg, context=''):
    """
    Configure Data-On-Demand service

    >>> alg = ...       # get the algorithm
    >>> tes = ...       # get TES-location
    >>> context = ...   # get the context 
    >>>  onDemand ( tes , alg , context )
    
    """
    if not location: return

    _loc = location
    ## Context- dependent alg name and TES path
    _alg = alg.getFullName()
    _cont = context
    if ('' != context and 'OFFLINE' != _cont.upper()):
        _pref = context + "/"
        _loc = _loc.replace('Rec/', _pref, 1)
        _suf = 'For' + context.replace('/', '_')
        if _alg.find(_suf) == -1:
            _alg += _suf

    dod = DataOnDemandSvc()

    _locs = [_loc]
    if 0 != _loc.find('/Event/'): _locs.append('/Event/' + _loc)

    ## location
    for l in _locs:
        if dod.AlgMap.has_key(l):
            prev = dod.AlgMap[l]
            if prev != _alg:
                log.warning(
                    'Data-On-Demand: replace action for location "%s" from "%s" to "%s" '
                    % (l, prev, _alg))
        elif __caloOnDemand.has_key(l):
            prev = __caloOnDemand[l]
            if prev != _alg:
                log.warning(
                    'Calo-On-Demand: replace action for location "%s" from "%s" to "%s" '
                    % (l, prev, _alg))

    log.debug(
        'Data-On-Demand: define the action for location "%s" to be "%s"' %
        (_loc, _alg))
    dod.AlgMap.update({_loc: _alg})
    log.debug(
        'Calo-On-Demand: define the action for location "%s" to be "%s"' %
        (_loc, _alg))
    __caloOnDemand.update({_loc: _alg})
Ejemplo n.º 17
0
def removeUnpacking():
    from Configurables import DataOnDemandSvc, CaloProcessor
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/ProtoP/Neutrals", None)
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/ProtoP/Charged", None)
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/Calo/Electrons", None)
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/Calo/Photons", None)
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/Calo/MergedPi0s", None)
    DataOnDemandSvc().AlgMap.pop("/Event/Rec/Calo/SplitPhotons", None)
Ejemplo n.º 18
0
def makeMyMuons(name, protoParticlesMaker):
   """
     Make Particles out of the muon ProtoParticles
   """
   particleMaker =  NoPIDsParticleMaker(name+"ParticleMaker" , Particle = "Muon")
   particleMaker.Input = "Rec/ProtoP/"+name+"ProtoPMaker/ProtoParticles"
   #particleMaker.OutputLevel = 0

   DataOnDemandSvc().AlgMap.update( {
           "/Event/Phys/" + particleMaker.name() + '/Particles' : particleMaker.getFullName(),
           "/Event/Phys/" + particleMaker.name() + '/Vertices'  : particleMaker.getFullName() 
   } )


   return Selection(name+"SelVeloMuonParts", Algorithm = particleMaker, RequiredSelections = [protoParticlesMaker], InputDataSetter=None)
Ejemplo n.º 19
0
    def _configureDataOnDemand(self):
        if not self.getProp("EnableDataOnDemand"):
            if 'DataOnDemandSvc' in ApplicationMgr().ExtSvc:
                ApplicationMgr().ExtSvc.pop('DataOnDemandSvc')
            from Gaudi.Configuration import appendPostConfigAction

            def disableFaultHandler():
                from Configurables import EventDataSvc
                EventDataSvc().EnableFaultHandler = False

            appendPostConfigAction(disableFaultHandler)
        else:
            from Configurables import DataOnDemandSvc
            dod = DataOnDemandSvc()
            if dod not in ApplicationMgr().ExtSvc:
                ApplicationMgr().ExtSvc.append(dod)
Ejemplo n.º 20
0
def makeMyParticles(name, trackcont, particle):
    # set up particle maker
    #particleMaker =  NoPIDsParticleMaker(name , Particle = particle)
    particleMaker = CombinedParticleMaker(name, Particle=particle)
    #       particleMaker.OutputLevel = 0
    particleMaker.Input = "Rec/ProtoP/" + trackcont + "ProtoPMaker"
    selector = trackSelector(particleMaker)
    # protoparticle filter:
    fltr = protoFilter(particleMaker, ProtoParticleCALOFilter, 'Pion')
    fltr.Selection = [""]
    DataOnDemandSvc().AlgMap.update({
        "/Event/Phys/" + particleMaker.name() + '/Particles':
        particleMaker.getFullName(),
        "/Event/Phys/" + particleMaker.name() + '/Vertices':
        particleMaker.getFullName()
    })
Ejemplo n.º 21
0
    def __apply_configuration__(self):
        """
        Apply configuration for Analysis
        """
        log.info("Applying Analysis configuration")
        log.info( self )
        GaudiKernel.ProcessJobOptions.PrintOff()
        
        if ( self.getProp("Simulation") ):
            self.configureMC()

        # Setup DataOnDemand, and make sure ToolSvc is done before hand
        ApplicationMgr().ExtSvc += [ ToolSvc(), DataOnDemandSvc() ]
                    
        self.tagging()
        self.standardParticles()
Ejemplo n.º 22
0
    def __apply_configuration__(self):

        #print "WAAAAAAAAAAAAAHHHHHHHHHHHHHHHH"

        ############## Set other properties ###########
        self._safeSet(LHCbApp(), [
            'EvtMax', 'SkipEvents', 'Simulation', 'DataType', 'CondDBtag',
            'DDDBtag'
        ])

        ApplicationMgr().AppName = "L0App within Moore"

        ############## The raw event ##################

        from Configurables import DataOnDemandSvc
        dod = DataOnDemandSvc()
        if dod not in ApplicationMgr().ExtSvc:
            ApplicationMgr().ExtSvc.append(dod)

        DecodeRawEvent().DataOnDemand = True

        importOptions("$L0TCK/L0DUConfig.opts")

        self._setRawEventLocations()

        ############## The L0 Part  ###################

        from DAQSys.Decoders import DecoderDB
        l0du = DecoderDB["L0DUFromRawTool"]
        l0du.Properties["StatusOnTES"] = False

        #configure L0 Sequence
        l0seq = GaudiSequencer("L0")
        ApplicationMgr().TopAlg += [l0seq]
        L0TCK = '0x%s' % self.getProp('TCK')[-4:]

        L0Conf().setProp("TCK", L0TCK)
        L0Conf().setProp("L0Sequencer", l0seq)
        self.setOtherProps(L0Conf(), ["DataType"])

        if (self.getProp("ReplaceL0Banks")):
            L0Conf().setProp("ReplaceL0BanksWithEmulated", True)
        else:
            L0Conf().setProp("SimulateL0", True)

        #done, that was quite easy, now for the output files
        self._configureOutput()
Ejemplo n.º 23
0
    def __apply_configuration__(self):
        """
        DaVinci configuration
        """
        log.info("Applying DaVinci configuration")
        log.info(self)

        self._checkOptions()

        self._setAncestorDepth()

        ApplicationMgr().TopAlg = [self.sequence()]
        self._configureSubPackages()
        importOptions(
            "$STDOPTS/PreloadUnits.opts")  # to get units in .opts files
        inputType = self._configureInput()
        self.sequence().Members = [self._dvInit()]
        self.sequence().Members += [self._filteredEventSeq()]

        # _lumi handles event count, lumi integration and merging independently
        self.sequence().Members += self._lumi()

        # Dec reports
        self._decReports()

        # Printouts ...
        verbosePrint = self.getProp("VerboseMessages")
        from Configurables import LoKiSvc
        LoKiSvc().Welcome = verbosePrint
        from Configurables import DataOnDemandSvc
        DataOnDemandSvc().Dump = verbosePrint
        if not verbosePrint:
            msgSvc = getConfigurable("MessageSvc")
            msgSvc.setWarning += ['RFileCnv']

        self._defineMonitors()
        self._defineEvents()
        self._defineInput()
        self._rootFiles()

        # Add main sequence to TopAlg
        self._mainSequence()
        # monitoring
        self._moniSequence()

        self._root_in_Tes()
Ejemplo n.º 24
0
                def _ANNPIDReCalib_():

                    from Configurables import (DstConf, DataOnDemandSvc,
                                               ChargedProtoANNPIDConf,
                                               ChargedProtoParticleMapper,
                                               ApplicationVersionFilter)

                    # Sequence to fill
                    annPIDSeq = GaudiSequencer("ANNPIDSeq")

                    # Only rerun on Reco14 (Run1) and Reco15 (2015) samples
                    recoRegex = "(v43r2|v47r9|v48r2)(.*)"
                    annPIDSeq.Members += [
                        ApplicationVersionFilter(
                            name="ANNPIDRecoVersionFilter",
                            HeaderLocation="Rec/Header",
                            VersionRegex=recoRegex)
                    ]

                    # ANN PID Configurable
                    annPIDConf = ChargedProtoANNPIDConf("ReDoANNPID")

                    # Configure Configurable for recalibration of the DST charged protos
                    annPIDConf.DataType = self.getProp("DataType")
                    annPIDConf.RecoSequencer = annPIDSeq
                    annPIDConf.applyConf()

                    # Update the DoD sequence to run this at the end
                    chargedLoc = "/Event/Rec/ProtoP/Charged"
                    if chargedLoc in DataOnDemandSvc().AlgMap.keys():
                        chargedSeq = DataOnDemandSvc().AlgMap[chargedLoc]
                        chargedSeq.Members += [annPIDSeq]

                    # Now for uDSTs. Update the DoD mappers to run a custom one
                    # for charged Protos, and includes the recalibration
                    cppmapper = ChargedProtoParticleMapper(
                        "UnpackChargedPPsMapper")
                    # Clone the settings from the DST configurable
                    cppmapper.ANNPIDTune = annPIDConf.tune(annPIDConf.DataType)
                    cppmapper.TrackTypes = annPIDConf.TrackTypes
                    cppmapper.PIDTypes = annPIDConf.PIDTypes
                    # Again, only rerun the ANNPID on Reco14 data
                    cppmapper.VersionRegex = recoRegex
                    # Update the DoD mapper lists
                    DataOnDemandSvc().NodeMappingTools = [
                        cppmapper
                    ] + DataOnDemandSvc().NodeMappingTools
                    DataOnDemandSvc().AlgMappingTools = [
                        cppmapper
                    ] + DataOnDemandSvc().AlgMappingTools
Ejemplo n.º 25
0
def makeMyKaons(name, trackcont, particle):
    # set up particle maker
    #particleMaker =  NoPIDsParticleMaker(name , Particle = particle)
    particleMaker = CombinedParticleMaker(name, Particle=particle)
    #particleMaker =  BestPIDParticleMaker(name , Particle = particle)
    #particleMaker.addTool(ProtoParticleCALOFilter(Selection = ["RequiresDet='RICH' CombDLL(k-pi)>'-5.0'"],name="muon"))
    #particleMaker.Particles = [ "kaon" ]
    particleMaker.Input = "Rec/ProtoP/" + trackcont + "ProtoPMaker"
    #particleMaker.OutputLevel = 0
    selector = trackSelector(particleMaker)
    # protoparticle filter:
    fltr = protoFilter(particleMaker, ProtoParticleCALOFilter, 'Kaon')
    fltr.Selection = ["RequiresDet='RICH' CombDLL(k-pi)>'-5.0'"]
    #fltr = protoFilter ( particleMaker , ProtoParticleCALOFilter, 'Kaon' )
    #fltr.Selection = [ "RequiresDet='RICH' CombDLL(k-pi)>'-5.0'" ]
    DataOnDemandSvc().AlgMap.update({
        "/Event/Phys/" + particleMaker.name() + '/Particles':
        particleMaker.getFullName(),
        "/Event/Phys/" + particleMaker.name() + '/Vertices':
        particleMaker.getFullName()
    })
Ejemplo n.º 26
0
def configureHltReportDecoding(trunk):
    """
    Create HltDecReports and HltSelReports from DAQ/RawEvent banks.
    Fetches DAQ/RawEvent from trunk+DAQ/RawEvent, places reports in trunk + Hlt/DecReports and trunk + Hlt/SelReports respectively. Action is on-demand via the DataOnDemandSvs.
    """

    locationRoot = fixTrunk(trunk)

    name = trunkName(trunk)

    rawEventLoc = locationRoot + "DAQ/RawEvent"
    #decReportLoc = locationRoot + "Hlt/DecReports"
    #selReportLoc = locationRoot + "Hlt/SelReports"

    ApplicationMgr().ExtSvc += [DataOnDemandSvc()]
    from DAQSys.Decoders import DecoderDB
    from DAQSys.DecoderClass import decodersForBank
    for bank in ["Sel", "Dec", "Vertex", "Track"]:
        for d in decodersForBank(DecoderDB, "Hlt" + bank + "Reports"):
            d.overrideInputs(rawEventLoc)
            d.overrideOutputs([locationRoot + loc for loc in d.listOutputs()])
Ejemplo n.º 27
0
def selMuonPParts(name, trackingSeq):
   """
       Make ProtoParticles out of VeloMuon tracks
   """
   unpacker = UnpackTrack(name+"UnpackTrack")
   unpacker.InputName="pRec/VeloMuon/Tracks"
   unpacker.OutputName="Rec/VeloMuon/Tracks"

   veloprotos = ChargedProtoParticleMaker(name+"ProtoPMaker")
   veloprotos.Inputs = ["Rec/VeloMuon/Tracks"]
   veloprotos.Output = "Rec/ProtoP/"+name+"ProtoPMaker/ProtoParticles"
   veloprotos.addTool( DelegatingTrackSelector, name="TrackSelector" )
   tracktypes = [ "Long" ]
   #veloprotos.OutputLevel =0
   #if (trackcont == "Best") :
   #	tracktypes = [ "Long" ]
   veloprotos.TrackSelector.TrackTypes = tracktypes
   selector = veloprotos.TrackSelector
   for tsname in tracktypes:
   	selector.addTool(TrackSelector,name=tsname)
   	ts = getattr(selector,tsname)
   	# Set Cuts
   	ts.TrackTypes = [tsname]

#        
   DataOnDemandSvc().AlgMap.update( {
                "/Event/Rec/VeloMuon/Tracks" : unpacker.getFullName(),
		} )

   veloprotoseq = GaudiSequencer(name+"ProtoPSeq")
   veloprotoseq.Members += [ veloprotos ]

   return GSWrapper(name="WrappedVeloMuonProtoPSeqFor" + name,
                    sequencer=veloprotoseq,
                    output='Rec/ProtoP/' + name +'ProtoPMaker/ProtoParticles',
                    requiredSelections = [ trackingSeq])
Ejemplo n.º 28
0
 def _dataOnDemand(self, rootintes):
     """Configure the DataOnDemand service for L0."""
     from Configurables import DataOnDemandSvc
     log.info("L0 on demand activated (rootInTES : %s)" % (rootintes))
     DataOnDemandSvc().AlgMap[
         rootintes +
         "Trig/L0/MuonCtrl"] = "L0MuonCandidatesFromRaw/" + L0MuonFromRawAlgName + rootintes
     if self.getProp("FullL0MuonDecoding"):
         DataOnDemandSvc().AlgMap[
             rootintes +
             "Trig/L0/MuonData"] = "L0MuonCandidatesFromRaw/" + L0MuonFromRawAlgName + rootintes
         DataOnDemandSvc().AlgMap[
             rootintes +
             "Trig/L0/MuonBCSU"] = "L0MuonCandidatesFromRaw/" + L0MuonFromRawAlgName + rootintes
     DataOnDemandSvc().AlgMap[
         rootintes +
         "Trig/L0/Calo"] = "L0CaloCandidatesFromRaw/" + L0CaloFromRawAlgName + rootintes
     DataOnDemandSvc().AlgMap[
         rootintes +
         "Trig/L0/FullCalo"] = "L0CaloCandidatesFromRaw/" + L0CaloFromRawAlgName + rootintes
     DataOnDemandSvc().AlgMap[
         rootintes +
         "Trig/L0/L0DUReport"] = "L0DUFromRawAlg/" + L0DUFromRawAlgName + rootintes
Ejemplo n.º 29
0
def ConfigureMoore():
    config = Swimming()
    from Swimming.HltTransforms import getTransform
    thisTransform = getTransform(
        config.getProp('TransformName'),
        config.getProp('Hlt1Triggers') + config.getProp('Hlt2Triggers'))

    from Configurables import (HltConfigSvc, EventNodeKiller,
                               HltMoveVerticesForSwimming, Moore)

    #Global configuration
    mykiller = EventNodeKiller("killHlt")
    mykiller.Nodes = config.getProp('HltNodesToKill')
    deathstar = GaudiSequencer("killHltSeq")
    deathstar.Members = [mykiller]
    from Swimming import MooreSetup
    #

    dddb = config.getProp('DDDBtag')
    conddb = config.getProp('CondDBtag')
    tck = config.getProp('TCK')
    run = config.getProp('RunNumber')
    if not dddb and not conddb and not tck and run:
        import shelve
        tag_db = os.path.expandvars(config.getProp('TagDatabase'))
        if not os.path.exists(tag_db):
            raise OSError, "Tag database file %s does not exist" % config.getProp(
                'TagDatabase')
        tag_db = shelve.open(tag_db, 'r')
        info = tag_db['info']
        tags = info[run]
        Moore().DDDBtag = tags['DDDBtag']
        Moore().CondDBtag = tags['CondDBtag']
        Moore().InitialTCK = tags['TCK']
        Swimming().TCK = tags['TCK']
    elif dddb and conddb and tck and not run:
        Moore().DDDBtag = dddb
        Moore().CondDBtag = conddb
        Moore().InitialTCK = tck
    else:
        raise TypeError, 'You must specify either the CondDB tag, DDDB tag and TCK and _not_ the run number' + \
              ' or only the run number.'

    Moore().Simulation = config.getProp('Simulation')
    Moore().DataType = config.getProp('DataType')
    Moore().outputFile = config.getProp('OutputFile')
    Moore().WriteFSR = config.getProp('WriteFSR')
    Moore().Persistency = config.getProp('Persistency')
    Moore().WriterRequires = []
    # Add extra locations to writer
    from Configurables import InputCopyStream
    writer = InputCopyStream('Writer')
    writer.ItemList = [config.getProp('SwimmingPrefix') + '/Reports#1']
    writer.OptItemList = list(
        set([
            l + '/P2TPRelations#1'
            for l in config.getProp('OffCands').values()
        ]))

    #
    # Define the TCK transformation
    #
    HltConfigSvc().ApplyTransformation = thisTransform
    from pprint import pprint
    pprint(HltConfigSvc().ApplyTransformation)
    #
    # Define the swimming algorithm
    #
    myswimmer = HltMoveVerticesForSwimming("HltMovePVs4Swimming")
    myswimmer.SwimmingDistance = 0.0
    loc = config.getProp(
        'SwimmingPrefix')  # TODO check differences with trunk more carefully
    myswimmer.Bcontainer = loc
    myswimmer.InputSelection = config.getProp('OnlinePV')
    myswimmer.OutputSelection = config.getProp('OutPVSel')
    myswimmer.OutputLevel = 4

    # Configure an extra TisTos Tool and some decoder algos to debug TisTos issues
    prefix = config.getProp('SwimmingPrefix')
    from Configurables import HltDecReportsDecoder, HltSelReportsDecoder
    decoders = [(HltDecReportsDecoder, [('OutputHltDecReportsLocation',
                                         'Hlt/DecReports')]),
                (HltSelReportsDecoder,
                 [('OutputHltSelReportsLocation', 'Hlt/SelReports'),
                  ('HltDecReportsLocation', 'Hlt/DecReports')])]
    from Configurables import TriggerTisTos
    ToolSvc().addTool(TriggerTisTos, 'SwimmingDebugTisTos')
    ToolSvc().SwimmingDebugTisTos.TOSFracMuon = 0.0
    ToolSvc().SwimmingDebugTisTos.TOSFracTT = 0.0
    for conf, d in decoders:
        configurable = conf('Swimming' + d[0][1].split('/')[-1])
        print configurable
        try:
            configurable.InputRawEventLocation = 'PrevTrig/RawEvent'
        except:
            configurable.RawEventLocations = [
                'PrevTrig/RawEvent'
            ] + configurable.RawEventLocations
        output = None
        for prop, loc in d:
            if not output: output = prefix + '/' + loc
            setattr(configurable, prop, prefix + '/' + loc)
        DataOnDemandSvc().AlgMap[output] = configurable
        prop = d[0][0][6:]
        print prop, output
        setattr(ToolSvc().SwimmingDebugTisTos, prop, output)

    class Deathstar(object):
        def __init__(self, seq):
            self._seq = seq

        def insert(self):
            ApplicationMgr().TopAlg.insert(0, self._seq)

    d = Deathstar(deathstar)
    appendPostConfigAction(d.insert)
Ejemplo n.º 30
0
    def __apply_configuration__(self):
        #check the DB is self-consistent!
        from DAQSys.DecoderClass import validate
        validate(self.__db__())
        #only override input locations (if I was asked to)
        self.overrideIfRequired(setup=True)
        #if I was asked to override the locations, I must also propagate this to the configurables, not just the db
        #so I must setup the configurables. If someone else wants different properties,
        #they should have edited the database, not edited the configurables directly, or they should
        #add a postConfigAction to do what they want to change

        #if ODIN is active, then configure the EventTimeDecoder
        from DAQSys.DecoderClass import decodersForBank
        if self.getProp("EvtClockBank") is not None and len(
                self.getProp("EvtClockBank")):
            SetEvtClock(self.getProp("EvtClockBank"), self.__db__())

        if not self.isPropertySet("Sequencer") and not self.getProp(
                "DataOnDemand"):
            #then I'm not doing anything else
            return
        if self.isPropertySet("Sequencer") and self.getProp("DataOnDemand"):
            raise ValueError(
                "You asked me to do the DoD service *and* a sequencer, but it only make sense to do one of these"
            )
        #if DoD, check that no active algs want to write to the same location...
        for k, v in self.__db__().items():
            if not v.Active:
                continue
            thedecoder = v.setup()

            #either add to a sequence, respecting dependencies
            if self.isPropertySet("Sequencer"):
                if self.getProp("Sequencer").Members is None:
                    self.getProp("Sequencer").Members = []
                if thedecoder in self.getProp("Sequencer").Members:
                    continue

                #add any requirements first!
                for alg in v.listRequired():
                    depdecoder = self.__db__()[alg].setup()
                    if depdecoder not in self.getProp("Sequencer").Members:
                        self.getProp("Sequencer").Members.append(depdecoder)

                self.getProp("Sequencer").Members.append(thedecoder)
            #or DoD
            if self.getProp("DataOnDemand"):
                if DataOnDemandSvc().AlgMap is None or type(
                        DataOnDemandSvc().AlgMap) is not dict:
                    DataOnDemandSvc().AlgMap = {}
                locs = v.listOutputs()
                for loc in locs:
                    if loc in DataOnDemandSvc().AlgMap:
                        testname = DataOnDemandSvc().AlgMap[loc]
                        if type(testname) is not str:
                            testname = testname.getFullName()
                        #handle default names!
                        if testname == v.FullName or (
                                testname.split("/")[0]
                                == testname.split("/")[-1]
                                and v.FullName.split("/")[0]
                                == v.FullName.split("/")[-1]
                                and testname.split("/")[0]
                                == v.FullName.split("/")[0]):
                            print "# WARNING: something else configured a decoder already, " + loc + " " + testname
                        else:
                            raise AttributeError(
                                "At least two different active algs want to write to the same location. Check your DecoderDB! "
                                + loc + ": " + testname + " & " + v.FullName)
                    DataOnDemandSvc().AlgMap[loc] = thedecoder
Ejemplo n.º 31
0
        "pion_cuts  = in_range ( 300 * MeV , PT , 120 * GeV ) & ( CLONEDIST > 5000 ) & ( TRCHI2DOF < 5 ) ",
        "gamma_cuts = in_range ( 300 * MeV , PT ,  10 * GeV )  ",
        "pions      = SOURCE ( '/Event/Phys/StdAllNoPIDsPions/Particles'  ,  pion_cuts ) ",
        "gammas     = SOURCE ( '/Event/Phys/StdLooseAllPhotons/Particles' , gamma_cuts ) ",
    ],
    Variables={
        "px_c": " pions  >> sum ( PX ) ",
        "py_c": " pions  >> sum ( PY ) ",
        "px_g": " gammas >> sum ( PX ) ",
        "py_g": " gammas >> sum ( PY ) ",
        "n_c": " pions  >> SIZE       ",
        "g_c": " gammas >> SIZE       ",
    }
)
from Configurables import DataOnDemandSvc
dod = DataOnDemandSvc()
dod.AlgMap['/Event/Counters/CharmEW'] = cnt


def myMicroDSTStreamConf(pack=True):
    conf = stripMicroDSTStreamConf(pack)
    conf.extraItems += ['/Event/Counters#1']
    conf.extraItems += ['/Event/Counters/CharmEW#1']
    return conf

# Configuration of SelDSTWriter
SelDSTWriterConf = {'default':    myMicroDSTStreamConf(pack=False)}
SelDSTWriterElements = {'default': stripMicroDSTElements(pack=False)}

udstWriter = SelDSTWriter(
    "MyDSTWriter",
Ejemplo n.º 32
0
def configure(datafiles,
              catalogs=[],
              castor=False,
              params={}):
    """
    Job configuration 
    """

    ## needed for job configuration
    from Configurables import DaVinci

    the_year = "2011"

    from BenderTools.Parser import hasInFile

    if params:
        the_year = params['Year']
        logger.info('Year is set from params to be %s ' % the_year)
    else:
        if hasInFile(datafiles, 'Collision11'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Collision12'):
            the_year = '2012'
        elif hasInFile(datafiles, 'Collision13'):
            the_year = '2013'
        elif hasInFile(datafiles, 'Stripping17'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Stripping13'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Stripping15'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Stripping19'):
            the_year = '2012'
        elif hasInFile(datafiles, 'Stripping20r1'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Stripping20r1p1'):
            the_year = '2011'
        elif hasInFile(datafiles, 'Stripping20r0p1'):
            the_year = '2012'
        elif hasInFile(datafiles, 'MC11'):
            the_year = '2011'
        logger.info('Year is set from files  to be %s ' % the_year)

    #
    # check
    #
    if '2011' == the_year and hasInFile(datafiles, 'Collision12'):
        raise AttributeError, 'Invalid Year %s ' % the_year
    if '2012' == the_year and hasInFile(datafiles, 'Collision11'):
        raise AttributeError, 'Invalid Year %s ' % the_year

    logger.info('Use the Year = %s ' % the_year)

    W_Location = '/Event/AllStreams/Phys/WMuLine/Particles'
    from PhysSelPython.Wrappers import AutomaticData
    W_Strip = AutomaticData(Location=W_Location)

    EW_preambulo = [
        "pion_cuts  = in_range ( 300 * MeV , PT , 10 * GeV ) & ( CLONEDIST > 5000 ) & ( TRCHI2DOF < 5 ) & ( TRGHOSTPROB < 0.5 ) & ( PERR2/P2 < 0.05**2 ) ",
        "ptCone_    =  SUMCONE (   0.25 , PT , '/Event/Phys/StdAllLoosePions/Particles'               )",
        "ptCone_2   =  SUMCONE (   0.25 , PT , '/Event/Phys/StdAllLoosePions/Particles'   , pion_cuts )",
        "etCone_    =  SUMCONE (   0.25 , PT , '/Event/Phys/StdLooseAllPhotons/Particles'             )",
        "ptCone     =    SINFO (  55001 , ptCone_  , True ) ",
        "ptCone2    =    SINFO (  55003 , ptCone_2 , True ) ",
        "etCone     =    SINFO (  55002 , etCone_  , True ) ",
    ]

    # ========================================================================
    # good W
    # ========================================================================
    from GaudiConfUtils.ConfigurableGenerators import FilterDesktop
    gW = FilterDesktop(
        Preambulo=EW_preambulo,
        Code="""
        in_range ( 15 * GeV , PT , 100 * GeV ) &
        ( -1e+10 * GeV < ptCone  ) &
        ( -1e+10 * GeV < ptCone2 ) &
        ( -1e+10 * GeV < etCone  ) 
        """
    )
    from PhysSelPython.Wrappers import Selection
    W_Data = Selection(
        'W',
        Algorithm=gW,
        RequiredSelections=[W_Strip]
    )

    from PhysSelPython.Wrappers import SelectionSequence
    seq = SelectionSequence("Wseq", TopSelection=W_Data)

    # counters
    from Configurables import LoKi__CounterAlg as CounterAlg
    cnt = CounterAlg(
        'CharmEWCounters',
        Location="Counters/CharmEW",
        Preambulo=[
            "from LoKiPhys.decorators import *",
            "from LoKiCore.functions  import *",
            "pion_cuts  = in_range ( 300 * MeV , PT , 120 * GeV ) & ( CLONEDIST > 5000 ) & ( TRCHI2DOF < 5 ) ",
            "gamma_cuts = in_range ( 300 * MeV , PT ,  10 * GeV )  ",
            "pions      = SOURCE ( '/Event/Phys/StdAllNoPIDsPions/Particles'  ,  pion_cuts ) ",
            "gammas     = SOURCE ( '/Event/Phys/StdLooseAllPhotons/Particles' , gamma_cuts ) ",
        ],
        Variables={
            "px_c": " pions  >> sum ( PX ) ",
            "py_c": " pions  >> sum ( PY ) ",
            "px_g": " gammas >> sum ( PX ) ",
            "py_g": " gammas >> sum ( PY ) ",
            "n_c": " pions  >> SIZE       ",
            "g_c": " gammas >> SIZE       ",
        }
    )
    from Configurables import DataOnDemandSvc
    dod = DataOnDemandSvc()
    dod.AlgMap['/Event/Counters/CharmEW'] = cnt

    # ========================================================================
    # prefilters for drastical speedup in the reading of input data
    # ========================================================================
    from PhysConf.Filters import LoKi_Filters
    fltrs = LoKi_Filters(
        STRIP_Code=" HLT_PASS_RE ( 'Stripping.*WMuLine.*Decision' ) "
    )

    davinci = DaVinci(
        EventPreFilters=fltrs.filters('Filters'),  # PREFILTERS
        DataType=the_year,
        InputType='DST',
        Simulation=True,
        PrintFreq=10000,
        EvtMax=-1,
        #
        HistogramFile='MCW_Histos.root',
        TupleFile='MCW.root',
        #
    )

    # connect to DaVinci
    from Configurables import GaudiSequencer
    davinci.UserAlgorithms = [
        GaudiSequencer('MySeq', Members=[seq.sequence(), 'MCW'])
    ]

    #
    # take care abotu DB-tags:
    #
    # try to get the tags from Rec/Header
    from BenderTools.GetDBtags import getDBTags
    tags = getDBTags(
        datafiles[0],
        castor
    )
    logger.info('Extract tags from DATA : %s' % tags)
    if tags.has_key('DDDB') and tags['DDDB']:
        davinci.DDDBtag = tags['DDDB']
        logger.info('Set DDDB    %s ' % davinci.DDDBtag)
    if tags.has_key('CONDDB') and tags['CONDDB']:
        davinci.CondDBtag = tags['CONDDB']
        logger.info('Set CONDDB  %s ' % davinci.CondDBtag)
    if tags.has_key('SIMCOND') and tags['SIMCOND']:
        davinci.CondDBtag = tags['SIMCOND']
        logger.info('Set SIMCOND %s ' % davinci.CondDBtag)

    #
    # remove excessive printout
    #
    from Configurables import MessageSvc
    msg = MessageSvc()
    msg.setError += ['HcalDet.Quality',
                     'EcalDet.Quality',
                     'MagneticFieldSvc',
                     'PropertyConfigSvc',
                     'ToolSvc.L0DUConfig',
                     'ToolSvc.L0CondDBProvider',
                     'L0MuonFromRaw',
                     'IntegrateBeamCrossing']

    #
    # come back to Bender
    #
    setData(datafiles, catalogs, castor)

    #
    # start Gaudi
    #
    gaudi = appMgr()

    #
    # more silence
    #
    _a = gaudi.tool('ToolSvc.L0DUConfig')
    _a.OutputLevel = 4

    alg = MCW(
        'MCW',
        Inputs=[seq.outputLocation()],
        PP2MCs=['Relations/Rec/ProtoP/Charged']
    )

    return SUCCESS