示例#1
0
def getOnlineDBReader(ym_tuple, granularity='YEARLY', connStrFunc=None):
    cnstr = ''
    ymstr = ''
    if granularity == 'YEARLY':
        ymstr = "%04d" % ym_tuple[0]
        cnstr = connStrFunc((ym_tuple[0], 13))
    else:
        ymstr = "%04d%02d" % ym_tuple
        cnstr = connStrFunc(ym_tuple)

    ptnm = "ONLINE_" + ymstr
    accSvc = CondDBAccessSvc(ptnm, ConnectionString=cnstr)
    dblayers = [accSvc]
    LoadCALIBDB = os.environ.get('LoadCALIBDB')
    if ym_tuple[
            0] < 2015 or LoadCALIBDB is not "OFFLINE":  # For datatype before 2015, no CALIBOFF layer is needed
        return accSvc
    dbpath = os.environ["SQLITEDBPATH"]
    layer = 'CALIBOFF'
    if exists(join(dbpath, layer + '.db')):
        # Put the discovered layer on top
        cfg = getConfigurable(layer, CondDBAccessSvc)
        try:
            cfg.ConnectionString
        except AttributeError:  # Set up connection for the 1st time
            cfg = CondDBAccessSvc("CALIBOFF",
                                  ConnectionString=cnstr.replace(
                                      'ONLINE-%s.db/ONLINE' % ymstr,
                                      "%s.db/%s" % (layer, layer)),
                                  CacheHighLevel=200)
        dblayers.insert(0, cfg)

    if (len(dblayers) == 1): return accSvc  # In case no CALIBOFF.db is found
    return CondDBLayeringSvc("ONLINELAYER_" + ymstr, Layers=dblayers)
def add_data(job_name, job_id):
    IOHelper('ROOT').inputFiles(glob(join('output/scenarios', job_name, 'hists', str(job_id), 'Brunel.xdst')))

    CondDB().Upgrade = True
    if job_name == 'Original_DB':
        lhcbApp.DDDBtag = "dddb-20160304"
        lhcbApp.CondDBtag = "sim-20150716-vc-md100"
    else:
        CondDB().addLayer(dbFile=join(os.getcwd(), 'output/DDDB.db'), dbName="DDDB")
        CondDB().addLayer(dbFile=join(os.getcwd(), 'output/SIMCOND.db'), dbName="SIMCOND")
        alignment_conditions = CondDBAccessSvc("AlignmentConditions")
        alignment_conditions.ConnectionString = "sqlite_file:{}/output/scenarios/{}/Alignment_SIMCOND.db/SIMCOND".format(os.getcwd(), job_name)
        CondDB().addLayer(alignment_conditions)
def configure(version):
    from Gaudi.Configuration import (importOptions, ApplicationMgr, MessageSvc)
    from Configurables import DDDBConf, CondDB, CondDBAccessSvc
    dddbConf = DDDBConf()
    cdb = CondDB()

    DBs = []
    for i in range(3):
        data = {"name": "TESTDB%d" % i}
        DBs.append(
            CondDBAccessSvc(
                data["name"],
                ConnectionString="sqlite_file:../data/%(name)s.db/%(name)s" %
                data))

    cdb.PartitionConnectionString["DDDB"] = DBs[0].ConnectionString
    cdb.Tags["DDDB"] = ""
    if version == 1:
        cdb.addAlternative(DBs[1], '/AutoMap/FolderSet2')
    elif version == 2:
        cdb.addAlternative(DBs[2], '/AutoMap/FolderSet3')
    elif version == 3:
        cdb.addAlternative(DBs[1], '/AutoMap/FolderSet2/ObjectA')
    elif version == 4:
        cdb.addLayer(DBs[1])
        cdb.addLayer(DBs[2])
    elif version != 0:
        raise RuntimeError("Invalid version number")

    ApplicationMgr(TopAlg=["LoadDDDB"], EvtSel="NONE")
def configure(use_case="simple"):
    # Common configuration
    import Gaudi.Configuration
    from Configurables import (ApplicationMgr, MessageSvc, DDDBConf, CondDB,
                               CondDBAccessSvc, UpdateManagerSvc)
    DDDBConf()  # detector description
    localDb = CondDBAccessSvc(
        "VeloAlignCondTestDB",
        ConnectionString="sqlite_file:../data/VeloAlignCondTest.db/DDDB",
        DefaultTAG="simple")
    CondDB().addLayer(localDb)  # use local DB

    ApplicationMgr(TopAlg=[], EvtSel="NONE")
    #MessageSvc(OutputLevel = 1)

    if use_case == "simple":
        # nothing to do more
        return
    elif use_case == "override_motion_system":
        # Example of how to move the Velo
        UpdateManagerSvc().ConditionsOverride.append(
            "Conditions/Online/Velo/MotionSystem := double ResolPosY = 100")
    elif use_case == "override_alignment":
        UpdateManagerSvc().ConditionsOverride.append(
            "Conditions/Alignment/Velo/VeloLeft := double_v dPosXYZ = 0 100 0")
    elif use_case == "override_alignment2":
        UpdateManagerSvc().ConditionsOverride.append(
            "Conditions/Alignment/Velo/VeloLeft := double_v YOffsetCoeffs = 20 -5"
        )
        UpdateManagerSvc().ConditionsOverride.append(
            "Conditions/Alignment/Velo/VeloRight := double_v YOffsetCoeffs = 0 1"
        )
示例#5
0
def startGaudiInspect(aligndb=""):
    global appMgr

    LHCbApp().DDDBtag = ""
    LHCbApp().CondDBtag = ""

    if aligndb:
        from Configurables import (CondDB, CondDBAccessSvc)
        counter = 1
        for db in aligndb:
            alignCond = CondDBAccessSvc('AlignCond' + str(counter))
            alignCond.ConnectionString = 'sqlite_file:' + db + '/LHCBCOND'
            CondDB().addLayer(alignCond)
            counter += 1

    appConf = ApplicationMgr(OutputLevel=INFO, AppName='myBrunel')
    appMgr = GaudiPython.AppMgr()
示例#6
0
def getAnyDBReader(layer='CALIBOFF', svc=CondDBAccessSvc):
    CacheHighLevel = 200
    if layer == 'DDDB': CacheHighLevel = 1700
    # Put the discovered layer on top
    cfg = getConfigurable(layer, svc)
    if svc is not CondDBAccessSvc: return cfg
    try:
        cfg.ConnectionString
    except AttributeError:  # Set up connection for the 1st time
        connstr = "sqlite_file:$SQLITEDBPATH/%s.db/%s" % (layer, layer)
        if layer == 'DQFLAGS':
            cfg = CondDBAccessSvc(layer,
                                  ConnectionString=connstr,
                                  CacheLowLevel=5,
                                  CacheHighLevel=10)
        else:
            cfg = CondDBAccessSvc(layer,
                                  ConnectionString=connstr,
                                  CacheHighLevel=CacheHighLevel)
    return cfg
示例#7
0
def initialise():

    if not globals()['initialised']:

        import os

        # Check results dir
        if not os.path.exists("results"): os.mkdir("results")

        from ROOT import gROOT
        # No info messages
        gROOT.ProcessLine("gErrorIgnoreLevel = kWarning;")
        # Batch mode (no TCanvas)
        gROOT.SetBatch(True)

        import GaudiPython

        # Initialise a few things
        from Configurables import DDDBConf, CondDB, LHCbApp, CondDBAccessSvc
        cDB = CondDB()

        #DDDBConf(DataType = "2009")
        #LHCbApp().DDDBtag   = "head-20110303"
        #LHCbApp().CondDBtag = "head-20110524"

        #DDDBConf(DataType = "2010")
        #LHCbApp().DDDBtag   = "head-20110303"
        #LHCbApp().CondDBtag = "head-20110524"

        #DDDBConf(DataType = "2011")
        #LHCbApp().DDDBtag   = "head-20110722"
        #LHCbApp().CondDBtag = "head-20110722"
        #LHCbApp().CondDBtag = "HEAD"

        DDDBConf(DataType="2012")
        LHCbApp().DDDBtag = "head-20120413"
        LHCbApp().CondDBtag = "cond-20120730"
        CondDB().addLayer(
            CondDBAccessSvc(
                "2012Aerogel",
                ConnectionString="sqlite_file:2012Aerogel.db/LHCBCOND",
                DefaultTAG="HEAD"))

        # Set message level to info and above only
        msgSvc().setOutputLevel(3)

        # Finally, initialize GaudiPython
        GaudiPython.AppMgr().initialize()

        # Initialise various DeRich objects
        loadRichDet()

        # flag as done
        globals()['initialised'] = True
示例#8
0
def startInspect():
  global appMgr
  
  LHCbApp().DDDBtag   = "head-20100518"
  LHCbApp().CondDBtag = "head-20100518"

  try:
    if alignDB:
      from Configurables import ( CondDB, CondDBAccessSvc )
      counter = 1
      for db in alignDB:
	alignCond = CondDBAccessSvc( 'AlignCond' + str(counter) )
	alignCond.ConnectionString = 'sqlite_file:' + db + '/LHCBCOND'
	CondDB().addLayer( alignCond )
	counter += 1
  except:
    pass

  appConf = ApplicationMgr( OutputLevel = INFO, AppName = 'myBrunel' )
  appMgr = GaudiPython.AppMgr()
  print "/dd/Structure/LHCb/BeforeMagnetRegion/TT"
 def test_010_addCondDBLayer_1(self):
     """Add one layer from CondDBAccessSvc instance""" 
     # Add the layer
     layer = CondDBAccessSvc("layer")
     self.CondDB.addLayer(layer)
     
     applyConfigurableUsers()
     
     reader = allConfigurables["CondDBCnvSvc"].CondDBReader
     # check if we have a layering svc
     self.assertEquals(reader.__class__.__name__, "CondDBLayeringSvc")
     # check for the new layer...
     self.assertEqual(reader.Layers[0], layer)
     # ... plus the original one
     self.assertEqualsConfig(reader.Layers[1], orig_reader)
    def test_020_addCondDBAlternative_1(self):
        """Add one alternative from CondDBAccessSvc instance"""
        # Add the alternative
        alternative = CondDBAccessSvc("alternative")
        self.CondDB.addAlternative(alternative, "/Test")

        applyConfigurableUsers()
        
        reader = allConfigurables["CondDBCnvSvc"].CondDBReader
        # the reader should not have changed
        self.assertEqualsConfig(reader, orig_reader)
        # correct size?
        self.assertEquals(len(reader.Alternatives), len(orig_dict) + 1)
        # check the previous alternatives
        for k in orig_dict:
            self.assertEqualsConfig(reader.Alternatives[k], orig_dict[k])
        # plus the new one
        self.assertEqualsConfig(reader.Alternatives["/Test"], alternative)
示例#11
0
 def _checkOverrideArgs(self, accessSvc, connStr, dbFile, dbName):
     """
     Check if the accessSvc is a valid CondDBReader or build one using the
     other arguments.
     """
     kwargs = {
         "accessSvc": accessSvc,
         "connStr": connStr,
         "dbFile": dbFile,
         "dbName": dbName
     }
     if accessSvc is None:
         if not connStr:
             if dbFile:
                 if not dbName:
                     dbName = os.path.basename(dbFile)
                     m = re.match(r'([A-Z][A-Z0-9_]{0,7})(_\w+)?.db',
                                  dbName)
                     if m:
                         dbName = m.group(1)
                     else:
                         raise ValueError('invalid arguments %r' % kwargs)
                 connStr = "sqlite_file:%s/%s" % (dbFile, dbName)
             else:
                 raise ValueError('invalid arguments %r' % kwargs)
             name = dbName
         else:
             name = connStr.rsplit('/')[-1]
             if not re.match(r'[A-Z][A-Z0-9_]{0,7}', name):
                 name = 'CondDBAccessSvc'
         # make a unique name for the configurable
         name = "automatic_" + name
         name_format = name + '_%d'
         i = 0
         while name in allConfigurables:
             i += 1
             name = name_format % i
         accessSvc = CondDBAccessSvc(name, ConnectionString=connStr)
     elif type(accessSvc
               ) not in __CondDBReaders__:  # Check for supported types
         raise TypeError("'%s' not supported as CondDBReader" %
                         accessSvc.__class__.__name__)
     return accessSvc
    def test_020_addCondDBAlternative_2(self):
        """Replace one alternative from CondDBAccessSvc instance"""
        path = orig_dict.keys()[0]
        
        # Add the alternative
        alternative = CondDBAccessSvc("alternative")
        self.CondDB.addAlternative(alternative, path)

        applyConfigurableUsers()
        
        reader = allConfigurables["CondDBCnvSvc"].CondDBReader
        # the reader should not have changed
        self.assertEqualsConfig(reader, orig_reader)
        # correct size?
        self.assertEquals(len(reader.Alternatives), len(orig_dict))
        # check the previous alternatives
        for k in orig_dict:
            if k != path:
                self.assertEqualsConfig(reader.Alternatives[k], orig_dict[k])
            else:
                self.assertEqualsConfig(reader.Alternatives[k], alternative)
示例#13
0
from TAlignment.SurveyConstraints import SurveyConstraints  # *
from GaudiConf import IOHelper
from Configurables import CaloDigitConf, CaloProcessor, GlobalRecoConf

print os.listdir(os.getcwd())

importOptions('$STDOPTS/PreloadUnits.opts')

# Load the velo conditions
CondDB().addLayer(
    dbFile="/pc2014-data3/cburr/hybrid-distortions/try_aligning/DDDB.db",
    dbName="DDDB")
CondDB().addLayer(
    dbFile="/pc2014-data3/cburr/hybrid-distortions/try_aligning/SIMCOND.db",
    dbName="SIMCOND")
alignment_conditions = CondDBAccessSvc("AlignmentConditions")
alignment_conditions.ConnectionString = "sqlite_file:/pc2014-data3/cburr/hybrid-distortions/try_aligning/Alignment_SIMCOND.db/SIMCOND"
CondDB().addLayer(alignment_conditions)

LHCbApp().Simulation = True
LHCbApp().DataType = 'Upgrade'
CondDB().Upgrade = True

detectors = [
    'VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet',
    'Tr'
]
LHCbApp().Detectors = detectors

CondDB().LoadCALIBDB = 'HLT1'
from Configurables import Escher, LHCbApp, TrackSys, EventClockSvc

#-- File catalogs. First one is read-write
FileCatalog().Catalogs = ["xmlcatalog_file:MyCatalog.xml"]

#-- Use latest database tags for real data
LHCbApp().DDDBtag = "default"
LHCbApp().CondDBtag = "default"
LHCbApp().DDDBtag = "HEAD"
LHCbApp().CondDBtag = "HEAD"

#-- Set a reasonable time for the first event
EventClockSvc().InitialTime = 1260350949785664000

from Configurables import (CondDB, CondDBAccessSvc)
otCalib = CondDBAccessSvc('OTCalib')
#otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ConditionsOTCalibration.db/LHCBCOND'
#otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/CalibrationOTQuarterT0s_071209.db/LHCBCOND'
otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/Collision09_OT_ModuleT0s_220110_sigma4ns.db/LHCBCOND'
CondDB().addLayer(otCalib)

# Latest cosmic run, with CALO, OT and (!!) RICH2 (35569 events)
Escher().DatasetName = 'collisions'
Escher().InputType = 'MDF'

#EventSelector().Input.append("DATA='castor:/castor/cern.ch/grid/lhcb/data/2009/RAW/FULL/LHCb/BEAM1/62558/062558_0000000001.raw'  SVC='LHCb::MDFSelector'")
#import os
#runnr = os.environ['COSMICSRUNNR']

runnrs = ['63596']
示例#15
0
def extractAlignmentParameters(
        elementsWithTESAndCondDBNodes,
        since,
        until,
        valueExtractor=lambda detElm: getGlobalPositionFromGeometryInfo(
            detElm.geometry()),
        DDDBtag="default",
        CondDBtag="default",
        alignDBs=[]):
    """
    The method talking to the detector svc

    Extract from all DetectorElements down from each element in elementsWithTESAndCondDBNodes
    ( format { elm : ( detTES, [ condDBNode ] ) } ), alignment parameters using valueExtractor,
    for all iovs between since and until (datetimes), using the CondDBNodes.
    The default database is configured with database tags DDDBtag and CondDBtag,
    and all alignDBs [ (tag, connectString) ] are added as layers to the CondDB.

    Returns a dict { element : [ ( (iovBegin, iovEnd), alignmentTree ) ] }
    """
    # Static configuration of the application manager
    from Configurables import LHCbApp, ApplicationMgr
    from LHCbKernel.Configuration import FATAL, ERROR, WARNING, INFO, DEBUG, VERBOSE

    ApplicationMgr().AppName = "AlignmentCollector"
    ApplicationMgr().OutputLevel = ERROR

    LHCbApp().DDDBtag = DDDBtag
    LHCbApp().CondDBtag = CondDBtag

    # >>> This part stolen from Det/DetCond/tests/scripts/getIOVs.py
    ApplicationMgr().TopAlg = ["LoadDDDB"]

    from Configurables import EventClockSvc, FakeEventTime

    ecs = EventClockSvc(
        InitialTime=toTimeStamp(datetime(2010, 1, 1, 12, tzinfo=pytz.utc)))
    ecs.addTool(FakeEventTime, "EventTimeDecoder")
    ecs.EventTimeDecoder.StartTime = ecs.InitialTime
    ecs.EventTimeDecoder.TimeStep = toTimeStamp(timedelta(days=1))
    # <<< + "lhcbcond" below

    layers = ["LHCBCOND"]

    if len(alignDBs) > 0:
        from Configurables import CondDB, CondDBAccessSvc
        for i, (connectString, tag) in enumerate(alignDBs):
            layerName = "AlignCond%i" % i
            alignCond = CondDBAccessSvc(layerName)
            alignCond.ConnectionString = connectString
            alignCond.DefaultTAG = tag
            CondDB().addLayer(alignCond)
            layers.append(layerName)

    # run a gaudi application
    from GaudiPython import AppMgr, gbl
    gaudi = AppMgr()
    gaudi.createSvc("UpdateManagerSvc")
    updateManagerSvc = gaudi.service("UpdateManagerSvc",
                                     interface="IUpdateManagerSvc")
    gaudi.initialize()

    conddbReaders = list(
        gaudi.service(name, gbl.ICondDBReader) for name in reversed(layers))

    detDataSvc = updateManagerSvc.detDataSvc()

    alignmentTrees = dict(
        (detName, []) for detName in elementsWithTESAndCondDBNodes.iterkeys())

    for detName, (detPath,
                  condNodes) in elementsWithTESAndCondDBNodes.iteritems():
        ### get the IOVs for all elements, and combine them
        timeLine = [("gap", (since, until))]
        for layerReader in conddbReaders:
            timeLineUpdated = list(timeLine)
            alreadyInserted = 0
            for i, (typ, (gapBegin, gapEnd)) in enumerate(timeLine):
                if typ == "gap":
                    iovs = combinedIOVs([
                        list((max(toDateTime(iov.since.ns()), gapBegin),
                              min(toDateTime(iov.until.ns()), gapEnd))
                             for iov in layerReader.getIOVs(
                                 node,
                                 gbl.ICondDBReader.IOV(
                                     gbl.Gaudi.Time(toTimeStamp(gapBegin)),
                                     gbl.Gaudi.Time(toTimeStamp(gapEnd))), 0))
                        for node in TrackingAlignmentCondDBNodes[detName]
                    ])
                    if len(iovs) != 0:
                        updatedTimeSlice = list(
                            ("iov", (begin, end)) for begin, end in iovs)
                        if updatedTimeSlice[0][1][0] > gapBegin:
                            updatedTimeSlice.insert(
                                0,
                                ("gap", (gapBegin, updatedTimeSlice[0][1][0])))
                        if updatedTimeSlice[-1][1][1] < gapEnd:
                            updatedTimeSlice.append(
                                ("gap", (updatedTimeSlice[-1][1][1], gapEnd)))
                        timeLineUpdated[i + alreadyInserted:i +
                                        alreadyInserted + 1] = updatedTimeSlice
                        alreadyInserted += len(updatedTimeSlice) - 1
            logging.debug("timeline after adding %s : %s" %
                          (layerReader, timeLine))
            timeLine = timeLineUpdated
        iovs = list(timespan for typ, timespan in timeLine if typ == "iov")

        ### For every IOV, extract the parameters
        for begin, end in iovs:
            detDataSvc.setEventTime(
                gbl.Gaudi.Time(toTimeStamp(begin + (end - begin) / 2)))
            updateManagerSvc.newEvent()
            motionSystem = None if detName != "Velo" else gaudi.detSvc(
            ).getObject("/dd/Conditions/Online/Velo/MotionSystem")
            logging.info("Extracting parameters for %s between %s and %s" %
                         (detName, begin, end))
            detTree = getAlignableTreeFromDetectorElement(
                gaudi.detSvc().getObject(detPath),
                nodeValue=valueExtractor,
                parentName=detPath,
                motionSystem=motionSystem)
            detTree.name = detName

            alignmentTrees[detName].append(((begin, end), detTree))

    gaudi.finalize()

    gaudi.exit()

    return alignmentTrees
## @file
#  Small script forcing a time-out in the access to the
from Gaudi.Configuration import *
from Configurables import CondDB, CondDBAccessSvc, DDDBConf

DDDBConf()

DDDB = CondDBAccessSvc("DDDB")
DDDB.ConnectionTimeOut = 5

#MessageSvc(OutputLevel = ERROR)

import GaudiPython
app = GaudiPython.AppMgr()
app.initialize()
app.start()

import time
app.detSvc()["/dd"]  # access the DB
print "TEST ===> start"
time.sleep(6)  # wait enough
print "TEST ===> end"
示例#17
0
from Configurables import Brunel
from Configurables import CondDB
from Configurables import CondDBAccessSvc
from Configurables import L0Conf
from Gaudi.Configuration import GaudiSequencer


Brunel().InputType = "DIGI"
Brunel().WithMC = True
CondDB().Upgrade = True
Brunel().Detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']
Brunel().DataType = "Upgrade"
Brunel().OutputType = 'XDST'

# As we haven't ran Moore
L0Conf().EnsureKnownTCK = False

GaudiSequencer("CheckPatSeq").Members = [
    "PrChecker",
    "TrackIPResolutionChecker",
    "VPClusterMonitor"
]

CondDB().addLayer(dbFile="check_positions/DDDB.db", dbName="DDDB")
CondDB().addLayer(dbFile="check_positions/SIMCOND.db", dbName="SIMCOND")
alignment_conditions = CondDBAccessSvc("AlignmentConditions")
alignment_conditions.ConnectionString = "sqlite_file:check_positions/Alignment_SIMCOND.db/SIMCOND"
CondDB().addLayer(alignment_conditions)
示例#18
0
#importOptions("$ESCHERROOT/options/DC06-Data.py")

#from Configurables import Escher
#escher = Escher()

# mae sure that the algorithms know how many iterations are coming
from Configurables import TAlignment
TAlignment().NumIterations = opts.numiter
TAlignment().UpdateInFinalize = False

# set the database layer
if opts.aligndb:
    counter = 1
    for db in opts.aligndb:
        from Configurables import (CondDB, CondDBAccessSvc)
        alignCond = CondDBAccessSvc('AlignCond' + str(counter))
        if opts.lhcbcondtag:
            alignCond.ConnectionString = 'sqlite_file:' + db + '/LHCBCOND'
        else:
            alignCond.ConnectionString = 'sqlite_file:' + db + '/CALIBOFF'
        CondDB().addLayer(alignCond)
        counter += 1

if opts.dddb:
    counter = 1
    for db in opts.dddb:
        from Configurables import (CondDB, CondDBAccessSvc)
        alignCond = CondDBAccessSvc('AlignDDDB' + str(counter))
        alignCond.ConnectionString = 'sqlite_file:' + db + '/DDDB'
        CondDB().addLayer(alignCond)
        counter += 1
示例#19
0
from Configurables import Brunel
from Configurables import CondDB
from Configurables import CondDBAccessSvc
from Configurables import L0Conf
from Gaudi.Configuration import GaudiSequencer

Brunel().InputType = "DIGI"
Brunel().WithMC = True
CondDB().Upgrade = True
Brunel().Detectors = [
    'VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet',
    'Tr'
]
Brunel().DataType = "Upgrade"
Brunel().OutputType = 'XDST'

# As we haven't ran Moore
L0Conf().EnsureKnownTCK = False

GaudiSequencer("CheckPatSeq").Members = [
    "PrChecker", "TrackIPResolutionChecker", "VPClusterMonitor"
]

CondDB().addLayer(dbFile="output/DDDB.db", dbName="DDDB")
CondDB().addLayer(dbFile="output/SIMCOND.db", dbName="SIMCOND")
alignment_conditions = CondDBAccessSvc("AlignmentConditions")
alignment_conditions.ConnectionString = "sqlite_file:output/scenarios/{{ scenario }}/Alignment_SIMCOND.db/SIMCOND"
CondDB().addLayer(alignment_conditions)
示例#20
0
    "SIMCOND": theApp.getProp('CondDBtag'),
    "ONLINE": 'fake'
}


def overrideONLINEWithSnapshot():
    ONLINE_2015 = CondDBAccessSvc("ONLINE_2015")
    ONLINE_2015.ConnectionString = "sqlite_file:ONLINE-2015.db/ONLINE"
    conddb.addLayer(ONLINE_2015)


appendPostConfigAction(overrideONLINEWithSnapshot)

conddb.addLayer(
    CondDBAccessSvc(
        "t0_db",
        ConnectionString="sqlite_file:OTIS_t0_2015_07_21_154882.db/LHCBCOND"))

#import AllHlt1
#conddb.RunChangeHandlerConditions = AllHlt1.ConditionMap

# Further selection
theApp.VetoHltErrorEvents = False
from Configurables import L0Conf

L0Conf().EnsureKnownTCK = False

from Configurables import TrajOTProjector

Projector = TrajOTProjector("OTProjector", UseDrift=True)
示例#21
0
def rawDataToNtuple(options):
    #	print options
    required_options = [
        "runNumber", "start", "end", "outputdir", "nEvtsPerStep", "totsteps"
    ]

    for check_opts in required_options:
        if not options.has_key(check_opts):
            print "Please specify minimal options!"
            print "Option \'" + check_opts + "\' is missing!"
            sys.exit()

    start = options["start"]
    end = options["end"]
    runNumber = options["runNumber"]
    outputdir = options["outputdir"]
    totsteps = options["totsteps"]
    nEvtsPerStep = options["nEvtsPerStep"]

    from Configurables import DDDBConf, CondDB, CondDBAccessSvc, NTupleSvc, EventClockSvc, Brunel, LHCbApp
    #	if options.has_key("IgnoreHeartBeat"):
    #		CondDB().IgnoreHeartBeat = options["IgnoreHeartBeat"]

    if options.has_key("addCondDBLayer"):
        altag = "HEAD"
        if options.has_key("addCondDBLayer_tag"):
            altag = options["addCondDBLayer_tag"]
        CondDB().addLayer(
            CondDBAccessSvc("myCond",
                            ConnectionString="sqlite_file:" +
                            options["addCondDBLayer"] + "/LHCBCOND",
                            DefaultTAG=altag))

    # Need this line so as to not get db errors- should be fixed properly at some point
    CondDB().IgnoreHeartBeat = True
    CondDB().EnableRunStampCheck = False

    #	customDBs = glob.glob('/group/rich/ActiveDBSlices/*.db')
    #	for db in customDBs:
    #		CondDB().addLayer( CondDBAccessSvc(os.path.basename(db), ConnectionString="sqlite_file:"+db+"/LHCBCOND", DefaultTAG="HEAD") )

    #	importOptions('$STDOPTS/DecodeRawEvent.py')
    #importOptions("$STDOPTS/RootHist.opts")
    #importOptions("$STDOPTS/RawDataIO.opts")
    #DEBUG by DisplayingHitMaps=False
    from Configurables import MDMRich1Algorithm
    mdmAlg = MDMRich1Algorithm("Rich1MDCS")
    mdmAlg.NumberOfEventsPerStep = nEvtsPerStep
    mdmAlg.StoreHistos = False
    mdmAlg.DEBUG = False

    if options.has_key("StoreHistos"):
        mdmAlg.StoreHistos = options["StoreHistos"]

    if options.has_key("DEBUG"):
        mdmAlg.DEBUG = options["DEBUG"]

    print "start step: " + str(start)
    print "stop step: " + str(end)
    print "processing " + str(nEvtsPerStep * (end - start)) + " events"

    tuplestring = "NTuple_Run%i_Steps%04d-%04d.root" % (runNumber, start, end)

    if options.has_key("TupleName"):
        tuplestring = options["TupleName"]

    histoname = "Histos_Run%i_Steps%04d-%04d.root" % (runNumber, start, end)

    if options.has_key("HistoName"):
        histoname = options["HistoName"]

    if outputdir != "":
        tuplestring = "%s/%s" % (outputdir, tuplestring)
        histoname = "%s/%s" % (outputdir, histoname)

    tuplename = "RICHTUPLE1 DATAFILE=\'%s\' TYP=\'ROOT\' OPT=\'NEW\'" % (
        tuplestring)

    # Currently put in manually. Edit here to use correct db and conddb tags
    LHCbApp().DDDBtag = "dddb-20150724"
    LHCbApp().CondDBtag = "cond-20160123"

    if options.has_key("DDDBtag"):
        LHCbApp().DDDBtag = options["DDDBtag"]
    if options.has_key("CondDBtag"):
        LHCbApp().CondDBtag = options["CondDBtag"]

    #customDBs = glob.glob('/group/rich/ActiveDBSlices/*.db')
    #for db in customDBs:
    #	CondDB().addLayer( CondDBAccessSvc(os.path.basename(db), ConnectionString="sqlite_file:"+db+"/LHCBCOND", DefaultTAG="HEAD") )

    ApplicationMgr().TopAlg += [mdmAlg]
    ApplicationMgr().ExtSvc += ['DataOnDemandSvc']
    ApplicationMgr().EvtMax = end * nEvtsPerStep

    # Timing information for application
    from Configurables import AuditorSvc, SequencerTimerTool
    ApplicationMgr().ExtSvc += ['AuditorSvc']
    ApplicationMgr().AuditAlgorithms = True
    AuditorSvc().Auditors += ['TimingAuditor']
    SequencerTimerTool().OutputLevel = 4

    LHCbApp().TimeStamp = True

    HistogramPersistencySvc().OutputFile = histoname
    NTupleSvc().Output = [tuplename]
    EventSelector().PrintFreq = 10
    EventSelector().PrintFreq = nEvtsPerStep
    EventSelector().FirstEvent = start * nEvtsPerStep
    print "First event: " + str(start * nEvtsPerStep)
    print "Last event: " + str(end * nEvtsPerStep)

    #get data, will look in local cluster first, then on castor
    isLocal = True
    if options.has_key("isLocal"):
        isLocal = options["isLocal"]
    DATA_and_Year = (getData(runNumber, start, end, totsteps, isLocal))

    DATA = DATA_and_Year["DATA"]
    if not len(DATA) > 0:
        print "Data not found in local, switching to CASTOR"
        DATA_and_Year = getData(runNumber, start, end, totsteps, not isLocal)
        DATA = DATA_and_Year["DATA"]
    if not len(DATA) > 0:
        print "DATA not found anywhere!"
        sys.exit()
    LHCbApp.DataType = str(DATA_and_Year["year"])

    EventSelector().Input = DATA
    EventClockSvc().EventTimeDecoder = "OdinTimeDecoder"

    appMgr = GaudiPython.AppMgr()
    appMgr.HistogramPersistency = "ROOT"
    #appMgr.OutputLevel=DEBUG
    evtSvc = appMgr.evtSvc()

    esel = appMgr.evtsel()
    esel.PrintFreq = nEvtsPerStep
    appMgr.initialize()

    appMgr.run(nEvtsPerStep * (end - start))
    appMgr.stop()
    appMgr.finalize()
示例#22
0
#LHCbApp().CondDBtag = "head-20090402"

LHCbApp().DDDBtag = "default"
LHCbApp().CondDBtag = "default"
LHCbApp().DDDBtag = "HEAD"
LHCbApp().CondDBtag = "HEAD"

from Configurables import EventClockSvc
EventClockSvc().InitialTime = 1270079584012864000
from Configurables import (CondDB, CondDBAccessSvc)
cdb = CondDB()
#cdb.PartitionConnectionString["ONLINE"] = "sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201004.db/ONLINE"
#cdb.Tags["ONLINE"] = ""

# maybe it works if we read it as a layer?
myOnline = CondDBAccessSvc('MyOnline')
myOnline.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201004.db/ONLINE'
CondDB().addLayer(myOnline)

otCalib = CondDBAccessSvc('OTCalib')
#otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/ModuleT0s_2.7ns_3.35ns_180310.db/LHCBCOND'
otCalib.ConnectionString = 'sqlite_file:/afs/cern.ch/user/a/akozlins/public/OT/LHCBCOND/ModuleT0s_69648_140410.db/LHCBCOND'
CondDB().addLayer(otCalib)

ttPitchFix = CondDBAccessSvc('TTPitchFix')
ttPitchFix.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/TTPitchFix.db/DDDB'
CondDB().addLayer(ttPitchFix)

import os
path = '/castor/cern.ch/grid/lhcb/data/2010/RAW/FULL/LHCb/COLLISION10/'
runnrs = ['69355']
#LHCbApp().CondDBtag = "head-20090402"

LHCbApp().DDDBtag = "default"
LHCbApp().CondDBtag = "default"
#LHCbApp().DDDBtag   = "HEAD"
#LHCbApp().CondDBtag = "HEAD"
LHCbApp().DDDBtag = 'head-20110823'
LHCbApp().CondDBtag = 'head-20110901'

from Configurables import (CondDB, CondDBAccessSvc)
cdb = CondDB()
#cdb.PartitionConnectionString["ONLINE"] = "sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-201103.db/ONLINE"
#cdb.Tags["ONLINE"] = "fake"

# maybe it works if we read it as a layer?
myOnline = CondDBAccessSvc('MyOnline')
myOnline.ConnectionString = 'sqlite_file:/afs/cern.ch/user/w/wouter/public/AlignDB/ONLINE-2011.db/ONLINE'
CondDB().addLayer(myOnline)
#importOptions("$APPCONFIGOPTS/DisableLFC.py")
cdb.UseOracle = False
cdb.DisableLFC = True

import os
runnr = os.environ['RUNNR']

filenames = ['/pool/spool/wouter/dimuons_%s.dst' % runnr]
for f in filenames:
    fullname = "DATAFILE='" + f + "' TYP='POOL_ROOTTREE' OPT='READ'"
    EventSelector().Input.append(fullname)

print "EvenSelector.Input:", EventSelector().Input
示例#24
0
# Add sqlite database to CondDB and turn on MoEDAL geometry

############################################################################
## Add geometry data
## Two options:
## Overwrite entire DDDB with file contents
#cdb = CondDB()
#cbd.PartitionConnectionString["DDDB"] = "sqlite_file:$HOME/LHCb_software/mkingtest.db/DDDB"
#cdb.Tags[DDDB] = "DC06"

## Add file contents as layer (should overwrite existing entries in same location)
CondDB().addLayer(
    CondDBAccessSvc(
        "MoEDAL_DDDB",
        ConnectionString=
        "sqlite_file:GEOMETRY_DB_FILE_LOCATION/GEOMETRY_DB_FILENAME/DDDB",
        DefaultTAG="HEAD"))

############################################################################
## Switch on geometry for MoEDAL detectors
geo = GiGaInputStream('Geo')

geo.StreamItems += [
    "/dd/Structure/LHCb/BeforeMagnetRegion/MoEDAL/VacTankCoverPipes"
]
geo.StreamItems += [
    "/dd/Structure/LHCb/BeforeMagnetRegion/MoEDAL/VacTankCoverHead"
]

## When using tags above or equal to 3.0.0, please activate the VacTankTopFlanges
示例#25
0
TrackSys().ExcludedLayers = ["T3X2"]
#TrackSys().TrackPatRecAlgorithms = TrackSys().DefaultPatRecAlgorithmsRun2
#VetraRecoConf().Sequence = [ "Decoding" ] + RecSysConf().DefaultTrackingSubdets # = ["Decoding", "VELO","TT","IT","OT","Tr","Vertex"]
VetraRecoConf().Sequence = [
    "Decoding", "VELO", "TT", "IT", "OT", "TrHLT1", "Vertex", "TrHLT2"
]
VetraRecoConf().TrackPatRecAlgorithms = TrackSys().DefaultPatRecAlgorithmsRun2

print '----------------------------------------------------------------------------------------------------\n\n\n\n\n'
print TrackSys().DefaultPatRecAlgorithms
print RecSysConf().DefaultTrackingSubdets
print '----------------------------------------------------------------------------------------------------\n\n\n\n\n'

from Configurables import CondDB, CondDBAccessSvc, CondDBTimeSwitchSvc
connection = "sqlite_file:$STSQLDDDBROOT/db/STCOND.db/COND"
CondDB().addLayer(CondDBAccessSvc("COND", ConnectionString=connection))
CondDB().IgnoreHeartBeat = True
CondDB().EnableRunStampCheck = False

#importOptions('$HOME/cmtuser/Vetra_v15r0/ST/STVetraAnalysis/options/TTEmulator.py')
#importOptions('$STVETRAANALYSISROOT/options/TTEmulator.py')
importOptions('$STTELL1ALGORITHMSROOT/options/ITEmulator.py')

from Configurables import STNZSResolution, STADCTrackMonitor

#ODINChecker = STODINCheck()
#ODINChecker.OutputLevel = generalOutputLevel
#ODINChecker.ODINData = "DAQ/ODIN";

#GaudiSequencer('ODINPreChecker').Members = [ODINChecker]
示例#26
0
def overrideONLINEWithSnapshot():
    ONLINE_2015 = CondDBAccessSvc("ONLINE_2015")
    ONLINE_2015.ConnectionString = "sqlite_file:ONLINE-2015.db/ONLINE"
    conddb.addLayer(ONLINE_2015)
示例#27
0
#set COND-tag
condtag = "/SIMCOND"
#if opts.simtag else "/CALIBOFF"
if opts.simtag:
    condtag = "/SIMCOND"
elif opts.lhcbcondtag:
    condtag = "/CALIBOFF"
else:
    condtag = "/LHCBCOND"

# set the database layer
if opts.aligndb:
    counter = 1
    for db in opts.aligndb:
        from Configurables import (CondDB, CondDBAccessSvc)
        alignCond = CondDBAccessSvc('AlignCond' + str(counter))
        alignCond.ConnectionString = 'sqlite_file:' + db + condtag
        CondDB().addLayer(alignCond)
        counter += 1
    print 'added databases: ', opts.aligndb

if opts.dddb:
    counter = 1
    for db in opts.dddb:
        from Configurables import (CondDB, CondDBAccessSvc)
        alignCond = CondDBAccessSvc('AlignDDDB' + str(counter))
        alignCond.ConnectionString = 'sqlite_file:' + db + '/DDDB'
        CondDB().addLayer(alignCond)
        counter += 1
    print 'added databases: ', opts.dddb
示例#28
0
    "sim-20130722-vc-md100",
    "Others": ["VP_Compact_MicroChannel+UT", "FT_MonoLayer"],
    "DBpath":
    "/afs/cern.ch/user/t/thead/public/velo_sim/myDDDB-LHCb-Upgrade-VP-Aug2013.heinrich",
}

LHCbApp().Simulation = True
CondDB().Upgrade = True
LHCbApp().DDDBtag = opts['DDDB']
LHCbApp().CondDBtag = opts['CondDB']
CondDB().AllLocalTagsByDataType = opts['Others']

ApplicationMgr().ExtSvc += ["ToolSvc", "DataOnDemandSvc"]

myAccessSvc = CondDBAccessSvc("mySuperHyperAccessSvc",
                              ConnectionString="sqlite_file:" +
                              opts['DBpath'] + ".db/DDDB",
                              CacheHighLevel=2000)
CondDB().addLayer(accessSvc=myAccessSvc)

DecodeRawEvent().DataOnDemand = True

# configuration ends here, now starts the execution bit


def event_summaries(fname, N, tests, results):
    """Execute all `tests` on `N` events in `fname`

    For each event a list of the return values of each
    test is send to `results`.
    """
    app_mgr = GaudiPython.AppMgr()
TAlignment().WriteCondSubDetList = ['Muon', 'OT']
TAlignment().CondFilePrefix = nameCond + sample + '_Xml/'
TAlignment().OutputLevel = outputlevel  #
TAlignment().UseLocalFrame = uselocalframe
elementtool = GetElementsToBeAligned("GetElementsToBeAligned")
elementtool.UseLocalFrame = False

# To load CondDB Layers -------------

from Configurables import (CondDBAccessSvc, CondDB)
# Muon
#MUONAlignmentCondition = CondDBAccessSvc("MuonAlignmentCondition")
#MUONAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/LHCBCOND-LocalCollisionNNet2.db/LHCBCOND'

# OT
OTAlignmentCondition = CondDBAccessSvc("OTAlignmentCondition")
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/ScenarioSurveyXYZ.db/LHCBCOND'
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/ScenarioSurveyAll.db/LHCBCOND'
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/OTCosmicsCFrameAlignmentTxTyTz2009.db/LHCBCOND'
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/OTCosmicsModulesTxCFramesTxTyTzRz2009.db/LHCBCOND'
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/VeloOTTxTyFieldOff.db/LHCBCOND'
#OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/VeloOTTxTyModulesTxFieldOff.db/LHCBCOND'
OTAlignmentCondition.ConnectionString = 'sqlite_file:/afs/cern.ch/user/s/svecchi/public/VeloOTTxTyModulesTxITTxTyRzTTTxFieldOff_20100115.db/LHCBCOND'
if sample == 'Mcarlo':
    print sample
else:
    #CondDB().addLayer( MUONAlignmentCondition )
    CondDB().addLayer(OTAlignmentCondition)

# ================================================================================================
# add all 'private' reconstruction to this sequence which is automatically added to the alignment sequence.
示例#30
0
    def __apply_configuration__(self):
        """
        Converts the high-level information passed as properties into low-level configuration.
        """
        # special case for online
        if self.getProp('UseDBSnapshot'): self._configureDBSnapshot()

        # In the Online/Upgrade/Simulation environment, LoadCALIBDB should be defaulted to HLT1
        if self.getProp("Online") or self.getProp('Upgrade') or self.getProp(
                'Simulation'):
            self._properties["LoadCALIBDB"].setDefault("HLT1")
        # Set up environment variables for loading CALIBOFF layers, must be before loading any tags
        LoadCALIBDB = self.getProp('LoadCALIBDB')
        loadcaliboptions = ["HLT1", "OFFLINE"]
        if LoadCALIBDB not in loadcaliboptions:
            raise ValueError(
                "'%s' is not a valid LoadCALIBDB value. Allowed: %s" %
                (LoadCALIBDB, loadcaliboptions))
        if LoadCALIBDB is "OFFLINE" and not exists(
                join(os.environ["SQLITEDBPATH"], "CALIBOFF.db")):
            LoadCALIBDB = "HLT1"  # When CALIBOFF.db is not there, reset the option
        os.environ['LoadCALIBDB'] = LoadCALIBDB

        # Set the usage of the latest global/local tags
        old_latest_Tags_prop = self.getProp(
            "UseLatestTags")  # it is deprecated
        latest_GTags_prop = self.getProp("LatestGlobalTagByDataTypes")
        if not latest_GTags_prop:  # if property not set
            latest_GTags_prop = self.getProp("LatestGlobalTagByDataType")
        latest_LTags_prop = self.getProp("LatestLocalTagsByDataType")
        all_LTags_prop = self.getProp("AllLocalTagsByDataType")

        if old_latest_Tags_prop:
            if latest_GTags_prop or latest_LTags_prop:
                log.warning(
                    "The property 'UseLatestTags' is deprecated:"
                    "'LatestGlobalTagByDataType(s)' and 'LatestLocalTagsByDataType'"
                    " will be used instead.")
            else:
                latest_GTags_prop = old_latest_Tags_prop[0]
                if type(old_latest_Tags_prop[-1]) != bool or \
                   (type(old_latest_Tags_prop[-1]) == bool and not old_latest_Tags_prop[1]):
                    latest_LTags_prop = old_latest_Tags_prop[0]

        if latest_GTags_prop:
            datatype = latest_GTags_prop
            if self.getProp("Tags"):
                self.Tags = {}
            self._useLatestTags(datatype, OnlyGlobalTags=True)
            log.warning(
                "Default global tags will be overridden with the latest ones"
                " available for '%s' data type: %s" %
                (datatype, self.getProp("Tags")))

        if latest_LTags_prop:
            datatypes = latest_LTags_prop
            #if self.getProp("LocalTags"):
            #    self.LocalTags = {}
            self._useLatestTags(datatypes, OnlyLocalTags=True)
            log.warning(
                "Latest unbound local tags on top of the latest global tags"
                " of %s data type(s) are added: %s" %
                (datatypes, self.getProp("LocalTags")))

        if all_LTags_prop:
            datatypes = all_LTags_prop
            self._useAllLocalTags(datatypes)
            log.warning("ALL local tags of %s data type(s) are added: %s" %
                        (datatypes, self.getProp("LocalTags")))

        # Import SQLDDDB specific info
        if self.getProp("UseOracle"):
            CondDBAccessSvc("ONLINE", ConnectionString="CondDBOnline/ONLINE")
            if self.getProp("DisableLFC"):
                COOLConfSvc(UseLFCReplicaSvc=False)
        elif self.getProp('UseDBSnapshot'):
            CondDBAccessSvc("ONLINE")
        else:
            configureOnlineSnapshots()
#            importOptions("$SQLDDDBROOT/options/SQLDDDB.py")

#########################################################################
# Access to ConditionsDB
##########################################################################
        conns = self.getProp("PartitionConnectionString")
        tags = self.getProp("Tags")
        # DB partitions
        partition = {}
        parttypes = [("DDDB", CondDBAccessSvc), ("LHCBCOND", CondDBAccessSvc),
                     ("ONLINE", CondDBTimeSwitchSvc),
                     ("SIMCOND", CondDBAccessSvc),
                     ("DQFLAGS", CondDBAccessSvc)]
        if LoadCALIBDB is "OFFLINE":
            # CALIBOFF not needed for the upgrade
            parttypes += [("CALIBOFF", CondDBAccessSvc)]

        for (p, t) in parttypes:
            partition[p] = getAnyDBReader(p, t)
            # Override connection strings:
            if p in conns:
                if type(partition[p]) is CondDBAccessSvc:
                    partition[p].ConnectionString = conns[p]
                    del conns[p]

            # Override connection strings for Upgrade case
            if self.getProp('Simulation') and self.getProp('Upgrade') and type(
                    partition[p]) is CondDBAccessSvc:
                partition[p].ConnectionString = os.path.join(
                    'sqlite_file:$SQLITEUPGRADEDBPATH', p + '.db', p)
            # Override tags
            if p in tags and p != "ONLINE":
                partition[p].DefaultTAG = tags[p]
                del tags[p]
            # Set the query granularity
            if p != "CALIBOFF":
                self.propagateProperty("QueryGranularity", partition[p])
            if type(partition[p]) is CondDBTimeSwitchSvc:  # also online
                for r in partition[p].Readers:
                    config = allConfigurables[eval(
                        r.split(':')[0]).split("/")[1]]
                    if isinstance(config, CondDBAccessSvc):
                        self.propagateProperty("QueryGranularity", config)
                    # Pass along the configuration for the layered DBs
                    elif isinstance(config, CondDBLayeringSvc):
                        for ly in config.Layers:
                            if isinstance(ly, CondDBAccessSvc):
                                self.propagateProperty("QueryGranularity", ly)

        if conns:
            log.warning(
                "Cannot override the connection strings of the partitions %r",
                conns.keys())
        if tags and tags.keys() != ['ONLINE']:
            log.warning("Cannot set the tag for partitions %r", tags.keys())

        # In the Online environment, IgnoreHeartBeat should be defaulted to True
        if self.getProp("Online"):
            self._properties["IgnoreHeartBeat"].setDefault(True)
        if not self.getProp("IgnoreHeartBeat"):
            if isinstance(partition["ONLINE"], CondDBAccessSvc):
                self.propagateProperty("HeartBeatCondition",
                                       partition["ONLINE"])
            elif isinstance(partition["ONLINE"], CondDBTimeSwitchSvc):
                # Add the heart beat conditions to the latest snapshot only since the
                # others are limited but valid by construction.
                if partition["ONLINE"].Readers:
                    latest = partition["ONLINE"].Readers[-1]
                    config = allConfigurables[eval(
                        latest.split(':')[0]).split("/")[1]]
                    if isinstance(config, CondDBAccessSvc):
                        self.propagateProperty("HeartBeatCondition", config)
                    # Pass along the configuration for the layered DBs
                    elif isinstance(config, CondDBLayeringSvc):
                        for ly in config.Layers:
                            #Only apply HeartBeatCondition for ONLINE
                            if isinstance(ly, CondDBAccessSvc) and ly.getName(
                            ).startswith("ONLINE_"):
                                self.propagateProperty("HeartBeatCondition",
                                                       ly)

        if not self.getProp("Simulation"):
            # Standard configurations
            #  - Reconstruction / analysis
            disp = CondDBDispatcherSvc("MainCondDBReader",
                                       MainAccessSvc=partition["DDDB"],
                                       Alternatives={
                                           "/Conditions":
                                           partition["LHCBCOND"],
                                           "/Conditions/Online":
                                           partition["ONLINE"],
                                           "/Conditions/DQ":
                                           partition["DQFLAGS"]
                                       })
        else:
            #  - Simulation
            disp = CondDBDispatcherSvc(
                "SimulationCondDBReader",
                MainAccessSvc=partition["DDDB"],
                Alternatives={"/Conditions": partition["SIMCOND"]})
        CondDBCnvSvc(CondDBReader=disp)

        if not (self.getProp("Online") or self.getProp("Simulation")):
            self._properties["EnableRunStampCheck"].setDefault(True)
        if self.getProp("EnableRunStampCheck"):
            from Configurables import RunStampCheck
            rsc = RunStampCheck()
            self.propagateProperty("RunStampCondition", rsc)
            ApplicationMgr().ExtSvc.append(rsc)

        # Load the CALIBOFF layer above everything if it exists
#        if len([x for x in parttypes if x[0] == 'CALIBOFF']):
#            self._addLayer(getAnyDBReader('CALIBOFF'))

        localTags = self.getProp("LocalTags")
        not_applied = []
        for p in localTags:
            if p in partition:
                taglist = list(localTags[p])
                taglist.reverse(
                )  # we need to stack the in reverse order to use the first as on top of the others
                i = 0  # counter
                if p is "CALIBOFF":
                    if LoadCALIBDB is not "OFFLINE":
                        raise ValueError(
                            "invalid argument LoadCALIBDB set at '%s' instead of 'OFFLINE' for accessing local tags for CALIBOFF.db"
                            % LoadCALIBDB)
                    pcolayers = []
                    for t in taglist:
                        pcolayers.append(partition[p].clone("CALIBOFF_%d" % i,
                                                            DefaultTAG=t))
                        i += 1
                        for r in partition["ONLINE"].Readers:
                            config = allConfigurables[eval(
                                r.split(':')[0]).split("/")[1]]
                            if isinstance(config, CondDBLayeringSvc):
                                config.Layers = pcolayers + config.Layers
                elif type(partition[p]) is not CondDBTimeSwitchSvc:
                    for t in taglist:
                        self._addLayer(partition[p].clone("%s_%d" % (p, i),
                                                          DefaultTAG=t))
                        i += 1
                else:
                    not_applied.append(p)
            else:
                not_applied.append(p)
        if not_applied:
            log.warning("Cannot set the local tags for partitions %r",
                        not_applied)

        # Modify partitions to use local copies of the DBs
        # before adding user layers and alternatives, which should be already local.
        # This is a no-operation if the property is not set
        self.__make_sqlite_local_copy__(CondDBCnvSvc())

        # Add layers and alternatives
        call = {
            self.LAYER: self._addLayer,
            self.ALTERNATIVE: self._addAlternative
        }
        for override in self.getProp("Overrides"):
            apply(call[override[0]], override[1:])

        # Add the logger
        filename = self.getProp("LogFile")
        if filename:
            cnvSvc = allConfigurables["CondDBCnvSvc"]
            cnvSvc.CondDBReader = CondDBLogger(
                LoggedReader=cnvSvc.CondDBReader, LogFile=filename)

        # Suppress pointless warning from COOL_2_5_0
        msgSvc = getConfigurable("MessageSvc")
        msgSvc.setError.append("RelationalDatabase")

        # Set up Virtual File System service, can be used by ParticlePropertySvc
        from Gaudi.Configuration import VFSSvc
        from Configurables import CondDBEntityResolver
        VFSSvc().FileAccessTools.append(CondDBEntityResolver())