예제 #1
0
def getOnlineDBReader(ym_tuple, granularity='YEARLY', connStrFunc=None):
    cnstr = ''
    ymstr = ''
    if granularity == 'YEARLY':
        ymstr = "%04d" % ym_tuple[0]
        cnstr = connStrFunc((ym_tuple[0], 13))
    else:
        ymstr = "%04d%02d" % ym_tuple
        cnstr = connStrFunc(ym_tuple)

    ptnm = "ONLINE_" + ymstr
    accSvc = CondDBAccessSvc(ptnm, ConnectionString=cnstr)
    dblayers = [accSvc]
    LoadCALIBDB = os.environ.get('LoadCALIBDB')
    if ym_tuple[
            0] < 2015 or LoadCALIBDB is not "OFFLINE":  # For datatype before 2015, no CALIBOFF layer is needed
        return accSvc
    dbpath = os.environ["SQLITEDBPATH"]
    layer = 'CALIBOFF'
    if exists(join(dbpath, layer + '.db')):
        # Put the discovered layer on top
        cfg = getConfigurable(layer, CondDBAccessSvc)
        try:
            cfg.ConnectionString
        except AttributeError:  # Set up connection for the 1st time
            cfg = CondDBAccessSvc("CALIBOFF",
                                  ConnectionString=cnstr.replace(
                                      'ONLINE-%s.db/ONLINE' % ymstr,
                                      "%s.db/%s" % (layer, layer)),
                                  CacheHighLevel=200)
        dblayers.insert(0, cfg)

    if (len(dblayers) == 1): return accSvc  # In case no CALIBOFF.db is found
    return CondDBLayeringSvc("ONLINELAYER_" + ymstr, Layers=dblayers)
예제 #2
0
    def _action():
        """
        Reset all DaVinci sequences
        """
        from Gaudi.Configuration import allConfigurables
        from Gaudi.Configuration import getConfigurable
        for seq in ('DaVinciInitSeq', 'DaVinciMainSequence', 'DaVinciSequence',
                    'MonitoringSequence', 'FilteredEventSeq'):

            if not seq in allConfigurables: continue
            cSeq = getConfigurable(seq)
            if cSeq and hasattr(cSeq, 'Members'):
                logger.info('Reset the sequence %s' % cSeq.name())
                cSeq.Members = []

            ## reset the list of top-level algorithms
            from Configurables import ApplicationMgr
            a = ApplicationMgr()
            a.TopAlg = []
            a.OutputLevel = options.OutputLevel

            from Configurables import MessageSvc
            m = MessageSvc(OutputLevel=options.OutputLevel)

            from GaudiConf import IOHelper
            ioh = IOHelper()
            ioh.setupServices()
예제 #3
0
    def _silent_action_ () :


        from Gaudi.Configuration import allConfigurables
        from Gaudi.Configuration import getConfigurable
        keys = allConfigurables.keys()
        s1 = set( keys )
        s2 = set( lst )
        s  = s1.intersection()  
        for i in s :
            c = getConfigurable( i )
            if c and hasattr ( c , 'OutputLevel' ) :
                c.OutputLevel = 4
예제 #4
0
def getAnyDBReader(layer='CALIBOFF', svc=CondDBAccessSvc):
    CacheHighLevel = 200
    if layer == 'DDDB': CacheHighLevel = 1700
    # Put the discovered layer on top
    cfg = getConfigurable(layer, svc)
    if svc is not CondDBAccessSvc: return cfg
    try:
        cfg.ConnectionString
    except AttributeError:  # Set up connection for the 1st time
        connstr = "sqlite_file:$SQLITEDBPATH/%s.db/%s" % (layer, layer)
        if layer == 'DQFLAGS':
            cfg = CondDBAccessSvc(layer,
                                  ConnectionString=connstr,
                                  CacheLowLevel=5,
                                  CacheHighLevel=10)
        else:
            cfg = CondDBAccessSvc(layer,
                                  ConnectionString=connstr,
                                  CacheHighLevel=CacheHighLevel)
    return cfg
예제 #5
0
def killDAQ(nodes=['/Event/DAQ', '/Event/pRec'], logger=None):
    """
    Configure node killing agents for uDST processing
    """
    #
    if not logger: logger = log
    #
    from Configurables import EventNodeKiller
    killer = EventNodeKiller("KillDAQ")

    for node in nodes:
        if node in killer.Nodes: continue
        killer.Nodes.append(node)

    try:
        from Gaudi.Configuration import getConfigurable
        conf = getConfigurable('DaVinciEventInitSeq')
        conf.Members.insert(0, killer)
        logger.info("Add           killer agent for: %s" % killer.Nodes)
    except:
        logger.error("Failed to add killer agent for: %s" % killer.Nodes)
예제 #6
0
    def __apply_configuration__(self):
        """
        Converts the high-level information passed as properties into low-level configuration.
        """
        # special case for online
        if self.getProp('UseDBSnapshot'): self._configureDBSnapshot()

        # In the Online/Upgrade/Simulation environment, LoadCALIBDB should be defaulted to HLT1
        if self.getProp("Online") or self.getProp('Upgrade') or self.getProp(
                'Simulation'):
            self._properties["LoadCALIBDB"].setDefault("HLT1")
        # Set up environment variables for loading CALIBOFF layers, must be before loading any tags
        LoadCALIBDB = self.getProp('LoadCALIBDB')
        loadcaliboptions = ["HLT1", "OFFLINE"]
        if LoadCALIBDB not in loadcaliboptions:
            raise ValueError(
                "'%s' is not a valid LoadCALIBDB value. Allowed: %s" %
                (LoadCALIBDB, loadcaliboptions))
        if LoadCALIBDB is "OFFLINE" and not exists(
                join(os.environ["SQLITEDBPATH"], "CALIBOFF.db")):
            LoadCALIBDB = "HLT1"  # When CALIBOFF.db is not there, reset the option
        os.environ['LoadCALIBDB'] = LoadCALIBDB

        # Set the usage of the latest global/local tags
        old_latest_Tags_prop = self.getProp(
            "UseLatestTags")  # it is deprecated
        latest_GTags_prop = self.getProp("LatestGlobalTagByDataTypes")
        if not latest_GTags_prop:  # if property not set
            latest_GTags_prop = self.getProp("LatestGlobalTagByDataType")
        latest_LTags_prop = self.getProp("LatestLocalTagsByDataType")
        all_LTags_prop = self.getProp("AllLocalTagsByDataType")

        if old_latest_Tags_prop:
            if latest_GTags_prop or latest_LTags_prop:
                log.warning(
                    "The property 'UseLatestTags' is deprecated:"
                    "'LatestGlobalTagByDataType(s)' and 'LatestLocalTagsByDataType'"
                    " will be used instead.")
            else:
                latest_GTags_prop = old_latest_Tags_prop[0]
                if type(old_latest_Tags_prop[-1]) != bool or \
                   (type(old_latest_Tags_prop[-1]) == bool and not old_latest_Tags_prop[1]):
                    latest_LTags_prop = old_latest_Tags_prop[0]

        if latest_GTags_prop:
            datatype = latest_GTags_prop
            if self.getProp("Tags"):
                self.Tags = {}
            self._useLatestTags(datatype, OnlyGlobalTags=True)
            log.warning(
                "Default global tags will be overridden with the latest ones"
                " available for '%s' data type: %s" %
                (datatype, self.getProp("Tags")))

        if latest_LTags_prop:
            datatypes = latest_LTags_prop
            #if self.getProp("LocalTags"):
            #    self.LocalTags = {}
            self._useLatestTags(datatypes, OnlyLocalTags=True)
            log.warning(
                "Latest unbound local tags on top of the latest global tags"
                " of %s data type(s) are added: %s" %
                (datatypes, self.getProp("LocalTags")))

        if all_LTags_prop:
            datatypes = all_LTags_prop
            self._useAllLocalTags(datatypes)
            log.warning("ALL local tags of %s data type(s) are added: %s" %
                        (datatypes, self.getProp("LocalTags")))

        # Import SQLDDDB specific info
        if self.getProp("UseOracle"):
            CondDBAccessSvc("ONLINE", ConnectionString="CondDBOnline/ONLINE")
            if self.getProp("DisableLFC"):
                COOLConfSvc(UseLFCReplicaSvc=False)
        elif self.getProp('UseDBSnapshot'):
            CondDBAccessSvc("ONLINE")
        else:
            configureOnlineSnapshots()
#            importOptions("$SQLDDDBROOT/options/SQLDDDB.py")

#########################################################################
# Access to ConditionsDB
##########################################################################
        conns = self.getProp("PartitionConnectionString")
        tags = self.getProp("Tags")
        # DB partitions
        partition = {}
        parttypes = [("DDDB", CondDBAccessSvc), ("LHCBCOND", CondDBAccessSvc),
                     ("ONLINE", CondDBTimeSwitchSvc),
                     ("SIMCOND", CondDBAccessSvc),
                     ("DQFLAGS", CondDBAccessSvc)]
        if LoadCALIBDB is "OFFLINE":
            # CALIBOFF not needed for the upgrade
            parttypes += [("CALIBOFF", CondDBAccessSvc)]

        for (p, t) in parttypes:
            partition[p] = getAnyDBReader(p, t)
            # Override connection strings:
            if p in conns:
                if type(partition[p]) is CondDBAccessSvc:
                    partition[p].ConnectionString = conns[p]
                    del conns[p]

            # Override connection strings for Upgrade case
            if self.getProp('Simulation') and self.getProp('Upgrade') and type(
                    partition[p]) is CondDBAccessSvc:
                partition[p].ConnectionString = os.path.join(
                    'sqlite_file:$SQLITEUPGRADEDBPATH', p + '.db', p)
            # Override tags
            if p in tags and p != "ONLINE":
                partition[p].DefaultTAG = tags[p]
                del tags[p]
            # Set the query granularity
            if p != "CALIBOFF":
                self.propagateProperty("QueryGranularity", partition[p])
            if type(partition[p]) is CondDBTimeSwitchSvc:  # also online
                for r in partition[p].Readers:
                    config = allConfigurables[eval(
                        r.split(':')[0]).split("/")[1]]
                    if isinstance(config, CondDBAccessSvc):
                        self.propagateProperty("QueryGranularity", config)
                    # Pass along the configuration for the layered DBs
                    elif isinstance(config, CondDBLayeringSvc):
                        for ly in config.Layers:
                            if isinstance(ly, CondDBAccessSvc):
                                self.propagateProperty("QueryGranularity", ly)

        if conns:
            log.warning(
                "Cannot override the connection strings of the partitions %r",
                conns.keys())
        if tags and tags.keys() != ['ONLINE']:
            log.warning("Cannot set the tag for partitions %r", tags.keys())

        # In the Online environment, IgnoreHeartBeat should be defaulted to True
        if self.getProp("Online"):
            self._properties["IgnoreHeartBeat"].setDefault(True)
        if not self.getProp("IgnoreHeartBeat"):
            if isinstance(partition["ONLINE"], CondDBAccessSvc):
                self.propagateProperty("HeartBeatCondition",
                                       partition["ONLINE"])
            elif isinstance(partition["ONLINE"], CondDBTimeSwitchSvc):
                # Add the heart beat conditions to the latest snapshot only since the
                # others are limited but valid by construction.
                if partition["ONLINE"].Readers:
                    latest = partition["ONLINE"].Readers[-1]
                    config = allConfigurables[eval(
                        latest.split(':')[0]).split("/")[1]]
                    if isinstance(config, CondDBAccessSvc):
                        self.propagateProperty("HeartBeatCondition", config)
                    # Pass along the configuration for the layered DBs
                    elif isinstance(config, CondDBLayeringSvc):
                        for ly in config.Layers:
                            #Only apply HeartBeatCondition for ONLINE
                            if isinstance(ly, CondDBAccessSvc) and ly.getName(
                            ).startswith("ONLINE_"):
                                self.propagateProperty("HeartBeatCondition",
                                                       ly)

        if not self.getProp("Simulation"):
            # Standard configurations
            #  - Reconstruction / analysis
            disp = CondDBDispatcherSvc("MainCondDBReader",
                                       MainAccessSvc=partition["DDDB"],
                                       Alternatives={
                                           "/Conditions":
                                           partition["LHCBCOND"],
                                           "/Conditions/Online":
                                           partition["ONLINE"],
                                           "/Conditions/DQ":
                                           partition["DQFLAGS"]
                                       })
        else:
            #  - Simulation
            disp = CondDBDispatcherSvc(
                "SimulationCondDBReader",
                MainAccessSvc=partition["DDDB"],
                Alternatives={"/Conditions": partition["SIMCOND"]})
        CondDBCnvSvc(CondDBReader=disp)

        if not (self.getProp("Online") or self.getProp("Simulation")):
            self._properties["EnableRunStampCheck"].setDefault(True)
        if self.getProp("EnableRunStampCheck"):
            from Configurables import RunStampCheck
            rsc = RunStampCheck()
            self.propagateProperty("RunStampCondition", rsc)
            ApplicationMgr().ExtSvc.append(rsc)

        # Load the CALIBOFF layer above everything if it exists
#        if len([x for x in parttypes if x[0] == 'CALIBOFF']):
#            self._addLayer(getAnyDBReader('CALIBOFF'))

        localTags = self.getProp("LocalTags")
        not_applied = []
        for p in localTags:
            if p in partition:
                taglist = list(localTags[p])
                taglist.reverse(
                )  # we need to stack the in reverse order to use the first as on top of the others
                i = 0  # counter
                if p is "CALIBOFF":
                    if LoadCALIBDB is not "OFFLINE":
                        raise ValueError(
                            "invalid argument LoadCALIBDB set at '%s' instead of 'OFFLINE' for accessing local tags for CALIBOFF.db"
                            % LoadCALIBDB)
                    pcolayers = []
                    for t in taglist:
                        pcolayers.append(partition[p].clone("CALIBOFF_%d" % i,
                                                            DefaultTAG=t))
                        i += 1
                        for r in partition["ONLINE"].Readers:
                            config = allConfigurables[eval(
                                r.split(':')[0]).split("/")[1]]
                            if isinstance(config, CondDBLayeringSvc):
                                config.Layers = pcolayers + config.Layers
                elif type(partition[p]) is not CondDBTimeSwitchSvc:
                    for t in taglist:
                        self._addLayer(partition[p].clone("%s_%d" % (p, i),
                                                          DefaultTAG=t))
                        i += 1
                else:
                    not_applied.append(p)
            else:
                not_applied.append(p)
        if not_applied:
            log.warning("Cannot set the local tags for partitions %r",
                        not_applied)

        # Modify partitions to use local copies of the DBs
        # before adding user layers and alternatives, which should be already local.
        # This is a no-operation if the property is not set
        self.__make_sqlite_local_copy__(CondDBCnvSvc())

        # Add layers and alternatives
        call = {
            self.LAYER: self._addLayer,
            self.ALTERNATIVE: self._addAlternative
        }
        for override in self.getProp("Overrides"):
            apply(call[override[0]], override[1:])

        # Add the logger
        filename = self.getProp("LogFile")
        if filename:
            cnvSvc = allConfigurables["CondDBCnvSvc"]
            cnvSvc.CondDBReader = CondDBLogger(
                LoggedReader=cnvSvc.CondDBReader, LogFile=filename)

        # Suppress pointless warning from COOL_2_5_0
        msgSvc = getConfigurable("MessageSvc")
        msgSvc.setError.append("RelationalDatabase")

        # Set up Virtual File System service, can be used by ParticlePropertySvc
        from Gaudi.Configuration import VFSSvc
        from Configurables import CondDBEntityResolver
        VFSSvc().FileAccessTools.append(CondDBEntityResolver())
예제 #7
0
    def __make_sqlite_local_copy__(self,
                                   accsvc,
                                   local_dir=None,
                                   force_copy=None):
        if isinstance(accsvc, str):
            # convert the string in an actual configurable instance
            # This is both for backward compatibility and CondDBTimeSwitchSvc
            if "/" in accsvc:
                tp, name = accsvc.split("/", 1)
            else:
                tp = name = accsvc
            accsvc = getConfigurable(name, tp)
        if local_dir is None:
            local_dir = self.getProp("SQLiteLocalCopiesDir")
        if force_copy is None:
            force_copy = self.getProp("OverwriteSQLiteLocalCopy")
        # If the directory for the local copies is not specified, we do nothing
        if not local_dir:
            return accsvc
        # Check if we are using Oracle or SQLite
#        if self.getProp("UseOracle"):
#            log.warning("Conflicting properties in CondDB Configurable: "
#                        "ignoring SQLiteLocalCopiesDir because UseOracle is set to True")
#            return accsvc
# Modify partitions to use local copies of the DBs
        newaccsvc = accsvc  # fallback return value (no change)
        if isinstance(accsvc, CondDBAccessSvc):
            # replace the reader with another
            m = re.match(r"^sqlite_file:(.*)/([_0-9A-Z]{1,8})$",
                         accsvc.getProp("ConnectionString"))
            if not m:  # not SQLite connection string
                return accsvc
            newaccsvc = CondDBSQLiteCopyAccSvc(accsvc.name() + "_local")
            newaccsvc.OriginalFile = m.group(1)
            newaccsvc.DestinationFile = os.path.join(
                local_dir, os.path.basename(m.group(1)))
            newaccsvc.DBName = m.group(2)
            newaccsvc.ForceCopy = force_copy
            newaccsvc.IgnoreCopyError = not force_copy  # ignore copy errors if we do not overwrite (needed for local tags)
            if hasattr(accsvc, "CacheHighLevel"):
                newaccsvc.CacheHighLevel = accsvc.CacheHighLevel
        elif isinstance(accsvc, CondDBDispatcherSvc):
            # use the same dispatcher replacing its content
            mainAccSvc = accsvc.getProp("MainAccessSvc")
            accsvc.MainAccessSvc = self.__make_sqlite_local_copy__(
                mainAccSvc, local_dir)
            alternatives = accsvc.getProp("Alternatives")
            for alt in alternatives.keys():
                accsvc.Alternatives[alt] = \
                   self.__make_sqlite_local_copy__(alternatives[alt], local_dir)
        elif isinstance(accsvc, CondDBLayeringSvc):
            # use the same layering service replacing its content
            new_layers = []
            for layer in accsvc.getProp("Layers"):
                new_layers.append(
                    self.__make_sqlite_local_copy__(layer, local_dir))
            accsvc.Layers = new_layers
        elif isinstance(accsvc, CondDBTimeSwitchSvc):
            # use the same time switcher replacing its content,
            # but we need to parse its options (in format "'%s':(%d,%d)")
            readers_list = accsvc.getProp("Readers")
            new_readers = []
            for line in readers_list:
                # Parse the line for the reader (it looks like "'name':(0,1)")
                r, iov = map(eval, line.rsplit(":"))
                new_reader = self.__make_sqlite_local_copy__(r, local_dir)
                new_readers.append("'%s':(%d,%d)" %
                                   (new_reader.getFullName(), iov[0], iov[1]))
            accsvc.Readers = new_readers
        elif isinstance(accsvc, CondDBLogger):
            # use the same logger replacing its content
            logged = accsvc.getProp("LoggedReader")
            accsvc.LoggedReader = self.__make_sqlite_local_copy__(
                logged, local_dir)
        elif isinstance(accsvc, CondDBCnvSvc):
            # use the same conversion service replacing its content
            reader = accsvc.getProp("CondDBReader")
            accsvc.CondDBReader = self.__make_sqlite_local_copy__(
                reader, local_dir)
        return newaccsvc