def testConfigRoot(self): full = DatastoreConfig(self.configFile) config = DatastoreConfig(self.configFile, mergeDefaults=False) newroot = "/random/location" self.datastoreType.setConfigRoot(newroot, config, full) if self.rootKeys: for k in self.rootKeys: self.assertIn(newroot, config[k])
def __init__(self, config: Union[Config, str], bridgeManager: DatastoreRegistryBridgeManager, butlerRoot: str = None): super().__init__(config, bridgeManager) # Scan for child datastores and instantiate them with the same registry self.datastores = [] for c in self.config["datastores"]: c = DatastoreConfig(c) datastoreType = doImport(c["cls"]) datastore = datastoreType(c, bridgeManager, butlerRoot=butlerRoot) log.debug("Creating child datastore %s", datastore.name) self.datastores.append(datastore) # Name ourself based on our children if self.datastores: # We must set the names explicitly self._names = [d.name for d in self.datastores] childNames = ",".join(self.names) else: childNames = "(empty@{})".format(time.time()) self._names = [childNames] self.name = "{}[{}]".format(type(self).__qualname__, childNames) # We declare we are ephemeral if all our child datastores declare # they are ephemeral isEphemeral = True for d in self.datastores: if not d.isEphemeral: isEphemeral = False break self.isEphemeral = isEphemeral # per-datastore override constraints if "datastore_constraints" in self.config: overrides = self.config["datastore_constraints"] if len(overrides) != len(self.datastores): raise DatastoreValidationError( f"Number of registered datastores ({len(self.datastores)})" " differs from number of constraints overrides" f" {len(overrides)}") self.datastoreConstraints = [ Constraints(c.get("constraints"), universe=bridgeManager.universe) for c in overrides ] else: self.datastoreConstraints = (None, ) * len(self.datastores) log.debug("Created %s (%s)", self.name, ("ephemeral" if self.isEphemeral else "permanent"))
def setUpClass(cls): # Storage Classes are fixed for all datastores in these tests scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml") cls.storageClassFactory = StorageClassFactory() cls.storageClassFactory.addFromConfig(scConfigFile) # Read the Datastore config so we can get the class # information (since we should not assume the constructor # name here, but rely on the configuration file itself) datastoreConfig = DatastoreConfig(cls.configFile) cls.datastoreType = doImport(datastoreConfig["cls"]) cls.universe = DimensionUniverse.fromConfig()
def setUpClass(cls): if lsst.afw is None: raise unittest.SkipTest("afw not available.") # Base classes need to know where the test directory is cls.testDir = TESTDIR # Storage Classes are fixed for all datastores in these tests scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml") cls.storageClassFactory = StorageClassFactory() cls.storageClassFactory.addFromConfig(scConfigFile) # Read the Datastore config so we can get the class # information (since we should not assume the constructor # name here, but rely on the configuration file itself) datastoreConfig = DatastoreConfig(cls.configFile) cls.datastoreType = doImport(datastoreConfig["cls"])
def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: """Set any filesystem-dependent config options for child Datastores to be appropriate for a new empty repository with the given root. Parameters ---------- root : `str` Filesystem path to the root of the data repository. config : `Config` A `Config` to update. Only the subset understood by this component will be updated. Will not expand defaults. full : `Config` A complete config with all defaults expanded that can be converted to a `DatastoreConfig`. Read-only and will not be modified by this method. Repository-specific options that should not be obtained from defaults when Butler instances are constructed should be copied from ``full`` to ``config``. overwrite : `bool`, optional If `False`, do not modify a value in ``config`` if the value already exists. Default is always to overwrite with the provided ``root``. Notes ----- If a keyword is explicitly defined in the supplied ``config`` it will not be overridden by this method if ``overwrite`` is `False`. This allows explicit values set in external configs to be retained. """ # Extract the part of the config we care about updating datastoreConfig = DatastoreConfig(config, mergeDefaults=False) # And the subset of the full config that we can use for reference. # Do not bother with defaults because we are told this already has # them. fullDatastoreConfig = DatastoreConfig(full, mergeDefaults=False) # Loop over each datastore config and pass the subsets to the # child datastores to process. containerKey = cls.containerKey for idx, (child, fullChild) in enumerate( zip(datastoreConfig[containerKey], fullDatastoreConfig[containerKey])): childConfig = DatastoreConfig(child, mergeDefaults=False) fullChildConfig = DatastoreConfig(fullChild, mergeDefaults=False) datastoreClass = doImport(fullChildConfig["cls"]) newroot = "{}/{}_{}".format(root, datastoreClass.__qualname__, idx) datastoreClass.setConfigRoot(newroot, childConfig, fullChildConfig, overwrite=overwrite) # Reattach to parent datastoreConfig[containerKey, idx] = childConfig # Reattach modified datastore config to parent # If this has a datastore key we attach there, otherwise we assume # this information goes at the top of the config hierarchy. if DatastoreConfig.component in config: config[DatastoreConfig.component] = datastoreConfig else: config.update(datastoreConfig) return