Beispiel #1
0
    def determineCalibClass(cls, metadata, message):
        """Attempt to find calibration class in metadata.

        Parameters
        ----------
        metadata : `dict` or `lsst.daf.base.PropertyList`
            Metadata possibly containing a calibration class entry.
        message : `str`
            Message to include in any errors.

        Returns
        -------
        calibClass : `object`
            The class to use to read the file contents.  Should be an
            `lsst.ip.isr.IsrCalib` subclass.

        Raises
        ------
        ValueError :
            Raised if the resulting calibClass is the base
            `lsst.ip.isr.IsrClass` (which does not implement the
            content methods).
        """
        calibClassName = metadata.get("CALIBCLS")
        calibClass = doImport(
            calibClassName) if calibClassName is not None else cls
        if calibClass is IsrCalib:
            raise ValueError(
                f"Cannot use base class to read calibration data: {message}")
        return calibClass
Beispiel #2
0
    def makeStorage(
        self,
        db: Database,
        *,
        context: Optional[StaticTablesContext] = None,
    ) -> GovernorDimensionRecordStorage:
        """Make storage record.

        Constructs the `DimensionRecordStorage` instance that should
        be used to back this element in a registry.

        Parameters
        ----------
        db : `Database`
            Interface to the underlying database engine and namespace.
        context : `StaticTablesContext`, optional
            If provided, an object to use to create any new tables.  If not
            provided, ``db.ensureTableExists`` should be used instead.

        Returns
        -------
        storage : `GovernorDimensionRecordStorage`
            Storage object that should back this element in a registry.
        """
        from ...registry.interfaces import GovernorDimensionRecordStorage
        cls = doImport(self._storage["cls"])
        assert issubclass(cls, GovernorDimensionRecordStorage)
        return cls.initialize(db, self, context=context, config=self._storage)
Beispiel #3
0
def _importPlugin(pluginName):
    """Import a plugin that contains Click commands.

    Parameters
    ----------
    pluginName : `str`
        An importable module whose __all__ parameter contains the commands
        that can be called.

    Returns
    -------
    An imported module or None
        The imported module, or None if the module could not be imported.

    Notes
    -----
    A cache is used in order to prevent repeated reports of failure
    to import a module that can be triggered by ``butler --help``.
    """
    try:
        return doImport(pluginName)
    except Exception as err:
        log.warning("Could not import plugin from %s, skipping.", pluginName)
        log.debug("Plugin import exception: %s\nTraceback:\n%s", err,
                  "".join(traceback.format_tb(err.__traceback__)))
        return None
Beispiel #4
0
    def readFitsWithOptions(cls, filename, options):
        """Build an instance of this class with options.

        Parameters
        ----------
        filename : `str`
            Name of the file to read
        options : `PropertyList`
            Collection of metadata parameters
        """
        # To avoid problems since this is no longer an abstract method
        if cls is not StampsBase:
            raise NotImplementedError(
                f"Please implement specific FITS reader for class {cls}")

        # Load metadata to get class
        metadata = afwFits.readMetadata(filename, hdu=0)
        type_name = metadata.get("STAMPCLS")
        if type_name is None:
            raise RuntimeError(
                f"No class name in file {filename}. Unable to instantiate correct"
                " stamps subclass. Is this an old version format Stamps file?")

        # Import class and override `cls`
        stamp_type = doImport(type_name)
        cls = stamp_type

        return cls.readFitsWithOptions(filename, options)
Beispiel #5
0
    def addTask(self, task: Union[PipelineTask, str], label: str):
        """Add a new task to the pipeline, or replace a task that is already
        associated with the supplied label.

        Parameters
        ----------
        task: `PipelineTask` or `str`
            Either a derived class object of a `PipelineTask` or a string
            corresponding to a fully qualified `PipelineTask` name.
        label: `str`
            A label that is used to identify the `PipelineTask` being added
        """
        if isinstance(task, str):
            taskName = task
        elif issubclass(task, PipelineTask):
            taskName = f"{task.__module__}.{task.__qualname__}"
        else:
            raise ValueError(
                "task must be either a child class of PipelineTask or a string containing"
                " a fully qualified name to one")
        if not label:
            # in some cases (with command line-generated pipeline) tasks can
            # be defined without label which is not acceptable, use task
            # _DefaultName in that case
            if isinstance(task, str):
                task = doImport(task)
            label = task._DefaultName
        self._pipelineIR.tasks[label] = pipelineIR.TaskIR(label, taskName)
Beispiel #6
0
def getClassOf(typeOrName: Union[Type, str]) -> Type:
    """Given the type name or a type, return the python type.

    If a type name is given, an attempt will be made to import the type.

    Parameters
    ----------
    typeOrName : `str` or Python class
        A string describing the Python class to load or a Python type.

    Returns
    -------
    type_ : `type`
        Directly returns the Python type if a type was provided, else
        tries to import the given string and returns the resulting type.

    Notes
    -----
    This is a thin wrapper around `~lsst.utils.doImport`.
    """
    if isinstance(typeOrName, str):
        cls = doImport(typeOrName)
    else:
        cls = typeOrName
    return cls
Beispiel #7
0
    def _importGraphFixup(self, args):
        """Import/instantiate graph fixup object.

        Parameters
        ----------
        args : `argparse.Namespace`
            Parsed command line.

        Returns
        -------
        fixup : `ExecutionGraphFixup` or `None`

        Raises
        ------
        ValueError
            Raised if import fails, method call raises exception, or returned
            instance has unexpected type.
        """
        if args.graph_fixup:
            try:
                factory = doImport(args.graph_fixup)
            except Exception as exc:
                raise ValueError("Failed to import graph fixup class/method") from exc
            try:
                fixup = factory()
            except Exception as exc:
                raise ValueError("Failed to make instance of graph fixup") from exc
            if not isinstance(fixup, ExecutionGraphFixup):
                raise ValueError("Graph fixup is not an instance of ExecutionGraphFixup class")
            return fixup
Beispiel #8
0
    def makeStorage(
        self,
        db: Database, *,
        context: Optional[StaticTablesContext] = None,
        governors: NamedKeyMapping[GovernorDimension, GovernorDimensionRecordStorage],
    ) -> DatabaseDimensionRecordStorage:
        """Construct the `DimensionRecordStorage` instance that should
        be used to back this element in a registry.

        Parameters
        ----------
        db : `Database`
            Interface to the underlying database engine and namespace.
        context : `StaticTablesContext`, optional
            If provided, an object to use to create any new tables.  If not
            provided, ``db.ensureTableExists`` should be used instead.
        governors : `NamedKeyMapping`
            Mapping from `GovernorDimension` to the record storage backend for
            that dimension, containing all governor dimensions.

        Returns
        -------
        storage : `DatabaseDimensionRecordStorage`
            Storage object that should back this element in a registry.
        """
        from ...registry.interfaces import DatabaseDimensionRecordStorage
        cls = doImport(self._storage["cls"])
        assert issubclass(cls, DatabaseDimensionRecordStorage)
        return cls.initialize(db, self, context=context, config=self._storage, governors=governors)
Beispiel #9
0
    def standardize(self, mapper, item, dataId):
        """Default standardization function for calibration datasets.

        If the item is of a type that should be standardized, the base class
        ``standardizeExposure`` method is called, otherwise the item is returned
        unmodified.

        Parameters
        ----------
        mapping : `lsst.obs.base.Mapping`
            Mapping object to pass through.
        item : object
            Will be standardized if of type lsst.afw.image.Exposure,
            lsst.afw.image.DecoratedImage, lsst.afw.image.Image
            or lsst.afw.image.MaskedImage

        dataId : `dict`
            Dataset identifier

        Returns
        -------
        `lsst.afw.image.Exposure` or item
            The standardized object.
        """
        if issubclass(doImport(self.python),
                      (Exposure, MaskedImage, Image, DecoratedImage)):
            return mapper._standardizeExposure(self,
                                               item,
                                               dataId,
                                               filter=self.setFilter)
        return item
Beispiel #10
0
 def assemblerClass(self):
     """Class to use to (dis)assemble an object from components."""
     if self._assembler is not None:
         return self._assembler
     if self._assemblerClassName is None:
         return None
     self._assembler = doImport(self._assemblerClassName)
     return self._assembler
Beispiel #11
0
 def delegateClass(self) -> Optional[Type]:
     """Class to use to delegate type-specific actions."""
     if self._delegate is not None:
         return self._delegate
     if self._delegateClassName is None:
         return None
     self._delegate = doImport(self._delegateClassName)
     return self._delegate
Beispiel #12
0
    def toExpandedPipeline(self) -> Generator[TaskDef]:
        """Returns a generator of TaskDefs which can be used to create quantum
        graphs.

        Returns
        -------
        generator : generator of `TaskDef`
            The generator returned will be the sorted iterator of tasks which
            are to be used in constructing a quantum graph.

        Raises
        ------
        NotImplementedError
            If a dataId is supplied in a config block. This is in place for
            future use
        """
        taskDefs = []
        for label, taskIR in self._pipelineIR.tasks.items():
            taskClass = doImport(taskIR.klass)
            taskName = taskClass.__qualname__
            config = taskClass.ConfigClass()
            overrides = ConfigOverrides()
            if self._pipelineIR.instrument is not None:
                overrides.addInstrumentOverride(self._pipelineIR.instrument, taskClass._DefaultName)
            if taskIR.config is not None:
                for configIR in taskIR.config:
                    if configIR.dataId is not None:
                        raise NotImplementedError("Specializing a config on a partial data id is not yet "
                                                  "supported in Pipeline definition")
                    # only apply override if it applies to everything
                    if configIR.dataId is None:
                        if configIR.file:
                            for configFile in configIR.file:
                                overrides.addFileOverride(configFile)
                        if configIR.python is not None:
                            overrides.addPythonOverride(configIR.python)
                        for key, value in configIR.rest.items():
                            overrides.addValueOverride(key, value)
            overrides.applyTo(config)
            # This may need to be revisited
            config.validate()
            taskDefs.append(TaskDef(taskName=taskName, config=config, taskClass=taskClass, label=label))

        # lets evaluate the contracts
        if self._pipelineIR.contracts is not None:
            label_to_config = {x.label: x.config for x in taskDefs}
            for contract in self._pipelineIR.contracts:
                # execute this in its own line so it can raise a good error message if there was problems
                # with the eval
                success = eval(contract.contract, None, label_to_config)
                if not success:
                    extra_info = f": {contract.msg}" if contract.msg is not None else ""
                    raise pipelineIR.ContractError(f"Contract(s) '{contract.contract}' were not "
                                                   f"satisfied{extra_info}")

        yield from pipeTools.orderPipeline(taskDefs)
Beispiel #13
0
    def fromConfig(config, registry, butlerRoot=None):
        """Create datastore from type specified in config file.

        Parameters
        ----------
        config : `Config`
            Configuration instance.
        """
        cls = doImport(config["datastore", "cls"])
        return cls(config=config, registry=registry, butlerRoot=butlerRoot)
Beispiel #14
0
    def pytype(self) -> Type:
        """Python type associated with this `StorageClass`."""
        if self._pytype is not None:
            return self._pytype

        if hasattr(builtins, self._pytypeName):
            pytype = getattr(builtins, self._pytypeName)
        else:
            pytype = doImport(self._pytypeName)
        self._pytype = pytype
        return self._pytype
    def __init__(self,
                 config: Union[Config, str],
                 bridgeManager: DatastoreRegistryBridgeManager,
                 butlerRoot: str = None):
        super().__init__(config, bridgeManager)

        # Scan for child datastores and instantiate them with the same registry
        self.datastores = []
        for c in self.config["datastores"]:
            c = DatastoreConfig(c)
            datastoreType = doImport(c["cls"])
            datastore = datastoreType(c, bridgeManager, butlerRoot=butlerRoot)
            log.debug("Creating child datastore %s", datastore.name)
            self.datastores.append(datastore)

        # Name ourself based on our children
        if self.datastores:
            # We must set the names explicitly
            self._names = [d.name for d in self.datastores]
            childNames = ",".join(self.names)
        else:
            childNames = "(empty@{})".format(time.time())
            self._names = [childNames]
        self.name = "{}[{}]".format(type(self).__qualname__, childNames)

        # We declare we are ephemeral if all our child datastores declare
        # they are ephemeral
        isEphemeral = True
        for d in self.datastores:
            if not d.isEphemeral:
                isEphemeral = False
                break
        self.isEphemeral = isEphemeral

        # per-datastore override constraints
        if "datastore_constraints" in self.config:
            overrides = self.config["datastore_constraints"]

            if len(overrides) != len(self.datastores):
                raise DatastoreValidationError(
                    f"Number of registered datastores ({len(self.datastores)})"
                    " differs from number of constraints overrides"
                    f" {len(overrides)}")

            self.datastoreConstraints = [
                Constraints(c.get("constraints"),
                            universe=bridgeManager.universe) for c in overrides
            ]

        else:
            self.datastoreConstraints = (None, ) * len(self.datastores)

        log.debug("Created %s (%s)", self.name,
                  ("ephemeral" if self.isEphemeral else "permanent"))
Beispiel #16
0
 def __init__(self, config: Optional[RawIngestConfig] = None, *, butler: Butler, **kwds: Any):
     super().__init__(config, **kwds)
     self.butler = butler
     self.universe = self.butler.registry.dimensions
     self.instrument = doImport(self.config.instrument)()
     # For now, we get a nominal Camera from the Instrument.
     # In the future, we may want to load one from a Butler calibration
     # collection that's appropriate for the observation timestamp of
     # the exposure.
     self.camera = self.instrument.getCamera()
     self.datasetType = self.getDatasetType()
    def setUpClass(cls):
        # Storage Classes are fixed for all datastores in these tests
        scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
        cls.storageClassFactory = StorageClassFactory()
        cls.storageClassFactory.addFromConfig(scConfigFile)

        # Read the Datastore config so we can get the class
        # information (since we should not assume the constructor
        # name here, but rely on the configuration file itself)
        datastoreConfig = DatastoreConfig(cls.configFile)
        cls.datastoreType = doImport(datastoreConfig["cls"])
        cls.universe = DimensionUniverse.fromConfig()
Beispiel #18
0
    def getDatabaseClass(self) -> Type[Database]:
        """Returns the `Database` class targeted by configuration values.

        The appropriate class is determined by parsing the `db` key to extract
        the dialect, and then looking that up under the `engines` key of the
        registry config.
        """
        dialect = self.getDialect()
        if dialect not in self["engines"]:
            raise ValueError(f"Connection string dialect has no known aliases. Received: {dialect}")
        databaseClass = self["engines", dialect]
        return doImport(databaseClass)
Beispiel #19
0
def ingestRaws(repo,
               locations,
               regex,
               output_run,
               config=None,
               config_file=None,
               transfer="auto",
               processes=1,
               ingest_task="lsst.obs.base.RawIngestTask"):
    """Ingests raw frames into the butler registry

    Parameters
    ----------
    repo : `str`
        URI to the repository.
    locations : `list` [`str`]
        Files to ingest and directories to search for files that match
        ``regex`` to ingest.
    regex : `str`
        Regex string used to find files in directories listed in locations.
    output_run : `str`
        The path to the location, the run, where datasets should be put.
    config : `dict` [`str`, `str`] or `None`
        Key-value pairs to apply as overrides to the ingest config.
    config_file : `str` or `None`
        Path to a config file that contains overrides to the ingest config.
    transfer : `str` or None
        The external data transfer type, by default "auto".
    processess : `int`
        Number of processes to use for ingest.
    ingest_task : `str`
        The fully qualified class name of the ingest task to use by default
        lsst.obs.base.RawIngestTask.

    Raises
    ------
    Exception
        Raised if operations on configuration object fail.
    """
    butler = Butler(repo, writeable=True)
    TaskClass = doImport(ingest_task)
    ingestConfig = TaskClass.ConfigClass()
    ingestConfig.transfer = transfer
    configOverrides = ConfigOverrides()
    if config_file is not None:
        configOverrides.addFileOverride(config_file)
    if config is not None:
        for name, value in config.items():
            configOverrides.addValueOverride(name, value)
    configOverrides.applyTo(ingestConfig)
    ingester = TaskClass(config=ingestConfig, butler=butler)
    files = findFileResources(locations, regex)
    ingester.run(files, run=output_run, processes=processes)
Beispiel #20
0
 def makeRegistry(self) -> Registry:
     prefix = f"test_{secrets.token_hex(8).lower()}_"
     self._prefixes.append(prefix)
     config = self.makeRegistryConfig()
     # Can't use Registry.fromConfig for these tests because we don't want
     # to reconnect to the server every single time.  But we at least use
     # OracleDatabase.fromConnection rather than the constructor so
     # we can try to pass a prefix through via "+" in a namespace.
     database = OracleDatabase.fromConnection(connection=self._connection,
                                              origin=0,
                                              namespace=f"+{prefix}")
     attributes = doImport(config["managers", "attributes"])
     opaque = doImport(config["managers", "opaque"])
     dimensions = doImport(config["managers", "dimensions"])
     collections = doImport(config["managers", "collections"])
     datasets = doImport(config["managers", "datasets"])
     datastoreBridges = doImport(config["managers", "datastores"])
     return Registry(database=database,
                     attributes=attributes,
                     opaque=opaque,
                     dimensions=dimensions,
                     collections=collections,
                     datasets=datasets,
                     datastoreBridges=datastoreBridges,
                     universe=DimensionUniverse(config),
                     create=True)
Beispiel #21
0
    def pixelization(self):
        """Object that interprets skypix Dimension values
        (`lsst.sphgeom.Pixelization`).

        `None` for limited registries.
        """
        if self.limited:
            return None
        if self._pixelization is None:
            pixelizationCls = doImport(self.config["skypix", "cls"])
            self._pixelization = pixelizationCls(level=self.config["skypix",
                                                                   "level"])
        return self._pixelization
Beispiel #22
0
    def makeGraph(self, pipeline, collections, run, userQuery):
        """Create execution graph for a pipeline.

        Parameters
        ----------
        pipeline : `Pipeline`
            Pipeline definition, task names/classes and their configs.
        collections
            Expressions representing the collections to search for input
            datasets.  May be any of the types accepted by
            `lsst.daf.butler.CollectionSearch.fromExpression`.
        run : `str`, optional
            Name of the `~lsst.daf.butler.CollectionType.RUN` collection for
            output datasets, if it already exists.
        userQuery : `str`
            String which defines user-defined selection for registry, should be
            empty or `None` if there is no restrictions on data selection.

        Returns
        -------
        graph : `QuantumGraph`

        Raises
        ------
        UserExpressionError
            Raised when user expression cannot be parsed.
        OutputExistsError
            Raised when output datasets already exist.
        Exception
            Other exceptions types may be raised by underlying registry
            classes.
        """
        scaffolding = _PipelineScaffolding(pipeline, registry=self.registry)

        instrument = pipeline.getInstrument()
        if isinstance(instrument, str):
            instrument = doImport(instrument)
        if instrument is not None:
            dataId = DataCoordinate.standardize(
                instrument=instrument.getName(),
                universe=self.registry.dimensions)
        else:
            dataId = DataCoordinate.makeEmpty(self.registry.dimensions)
        with scaffolding.connectDataIds(self.registry, collections, userQuery,
                                        dataId) as commonDataIds:
            scaffolding.resolveDatasetRefs(self.registry,
                                           collections,
                                           run,
                                           commonDataIds,
                                           skipExisting=self.skipExisting)
        return scaffolding.makeQuantumGraph()
    def addInstrumentOverride(self, instrument: str, task_name: str):
        """Apply any overrides that an instrument has for a task

        Parameters
        ----------
        instrument: str
            A string containing the fully qualified name of an instrument from
            which configs should be loaded and applied
        task_name: str
            The _DefaultName of a task associated with a config, used to look
            up overrides from the instrument.
        """
        instrument_lib = doImport(instrument)()
        self._overrides.append((OverrideTypes.Instrument, (instrument_lib, task_name)))
Beispiel #24
0
 def visit(self, builder: DimensionConstructionBuilder) -> None:
     # Docstring inherited from DimensionConstructionVisitor.
     PixelizationClass = doImport(self._pixelizationClassName)
     maxLevel = self._maxLevel if self._maxLevel is not None else PixelizationClass.MAX_LEVEL
     system = SkyPixSystem(
         self.name,
         maxLevel=maxLevel,
         PixelizationClass=PixelizationClass,
     )
     builder.topology[TopologicalSpace.SPATIAL].add(system)
     for level in range(maxLevel + 1):
         dimension = system[level]
         builder.dimensions.add(dimension)
         builder.elements.add(dimension)
Beispiel #25
0
    def __call__(self, fixed: DataCoordinate) -> DimensionPacker:
        """Construct a `DimensionPacker` instance for the given fixed data ID.

        Parameters
        ----------
        fixed : `DataCoordinate`
            Data ID that provides values for the "fixed" dimensions of the
            packer.  Must be expanded with all metadata known to the
            `Registry`.  ``fixed.hasRecords()`` must return `True`.
        """
        assert fixed.graph.issuperset(self.fixed)
        if self._cls is None:
            self._cls = doImport(self._clsName)
        return self._cls(fixed, self.dimensions)
Beispiel #26
0
 def pytype(self):
     """Python type associated with this `StorageClass`."""
     if self._pytype is not None:
         return self._pytype
     # Handle case where we did get a python type not string
     if not isinstance(self._pytypeName, str):
         pytype = self._pytypeName
         self._pytypeName = self._pytypeName.__name__
     elif hasattr(builtins, self._pytypeName):
         pytype = getattr(builtins, self._pytypeName)
     else:
         pytype = doImport(self._pytypeName)
     self._pytype = pytype
     return self._pytype
Beispiel #27
0
    def fromConfig(config: Config, registry: Registry, butlerRoot: Optional[str] = None) -> 'Datastore':
        """Create datastore from type specified in config file.

        Parameters
        ----------
        config : `Config`
            Configuration instance.
        registry : `Registry`
            Registry to be used by the Datastore for internal data.
        butlerRoot : `str`, optional
            Butler root directory.
        """
        cls = doImport(config["datastore", "cls"])
        return cls(config=config, registry=registry, butlerRoot=butlerRoot)
Beispiel #28
0
def getFullTypeName(cls: Any) -> str:
    """Return full type name of the supplied entity.

    Parameters
    ----------
    cls : `type` or `object`
        Entity from which to obtain the full name. Can be an instance
        or a `type`.

    Returns
    -------
    name : `str`
        Full name of type.

    Notes
    -----
    Builtins are returned without the ``builtins`` specifier included.  This
    allows `str` to be returned as "str" rather than "builtins.str". Any
    parts of the path that start with a leading underscore are removed
    on the assumption that they are an implementation detail and the
    entity will be hoisted into the parent namespace.
    """
    # If we have an instance we need to convert to a type
    if not hasattr(cls, "__qualname__"):
        cls = type(cls)
    if hasattr(builtins, cls.__qualname__):
        # Special case builtins such as str and dict
        return cls.__qualname__

    real_name = cls.__module__ + "." + cls.__qualname__

    # Remove components with leading underscores
    cleaned_name = ".".join(c for c in real_name.split(".")
                            if not c.startswith("_"))

    # Consistency check
    if real_name != cleaned_name:
        try:
            test = doImport(cleaned_name)
        except Exception:
            # Could not import anything so return the real name
            return real_name

        # The thing we imported should match the class we started with
        # despite the clean up. If it does not we return the real name
        if test is not cls:
            return real_name

    return cleaned_name
Beispiel #29
0
def getInstanceOf(typeOrName):
    """Given the type name or a type, instantiate an object of that type.

    If a type name is given, an attempt will be made to import the type.

    Parameters
    ----------
    typeOrName : `str` or Python class
        A string describing the Python class to load or a Python type.
    """
    if isinstance(typeOrName, str):
        cls = doImport(typeOrName)
    else:
        cls = typeOrName
    return cls()
Beispiel #30
0
    def fromConfig(config, types, key, value, lengths=None, registry=None):
        """Create a `DatabaseDict` subclass instance from `config`.

        If ``config`` contains a class ``cls`` key, this will be assumed to
        be the fully-qualified name of a DatabaseDict subclass to construct.
        If not, ``registry.makeDatabaseDict`` will be called instead, and
        ``config`` must contain a ``table`` key with the name of the table
        to use.

        Parameters
        ----------
        config : `Config`
            Configuration used to identify and construct a subclass.
        types : `dict`
            A dictionary mapping `str` field names to Python type objects,
            containing all fields to be held in the database.
        key : `str`
            The name of the field to be used as the dictionary key.  Must not
            be present in ``value._fields``.
        value : `type`
            The type used for the dictionary's values, typically a
            `namedtuple`. Must have a ``_fields`` class attribute that is a
            tuple of field names (i.e., as defined by
            `~collections.namedtuple`); these field names must also appear
            in the ``types`` arg, and a ``_make`` attribute to construct it
            from a sequence of values (again, as defined by
            `~collections.namedtuple`).
        lengths : `dict`, optional
            Specific lengths of string fields.  Defaults will be used if not
            specified.
        registry : `Registry`, optional
            A registry instance from which a `DatabaseDict` subclass can be
            obtained.  Ignored if ``config["cls"]`` exists; may be None if
            it does.

        Returns
        -------
        dictionary : `DatabaseDict` (subclass)
            A new `DatabaseDict` subclass instance.
        """
        if "cls" in config:
            cls = doImport(config["cls"])
            return cls(config=config, types=types, key=key, value=value, lengths=lengths)
        else:
            table = config["table"]
            if registry is None:
                raise ValueError("Either config['cls'] or registry must be provided.")
            return registry.makeDatabaseDict(table, types=types, key=key, value=value, lengths=lengths)
Beispiel #31
0
    def getPupilFactory(self, visitInfo, pupilSize, npix, **kwargs):
        """Construct a PupilFactory.

        Parameters
        ----------
        visitInfo : `~lsst.afw.image.VisitInfo`
            VisitInfo object for a particular exposure.
        pupilSize : `float`
            Size in meters of constructed Pupil array. Note that this may be
            larger than the actual diameter of the illuminated pupil to
            accommodate zero-padding.
        npix : `int`
            Constructed Pupils will be npix x npix.
        **kwargs : `dict`
            Other keyword arguments forwarded to the PupilFactoryClass
            constructor.
        """
        cls = doImport(self.getPupilFactoryName())
        return cls(visitInfo, pupilSize, npix, **kwargs)
Beispiel #32
0
 def telescopeDiameter(self):
     cls = doImport(self.getPupilFactoryName())
     return cls.telescopeDiameter
Beispiel #33
0
    def testDoImport(self):
        c = doImport("lsst.utils.tests.TestCase")
        self.assertEqual(c, lsst.utils.tests.TestCase)

        c = doImport("lsst.utils.doImport")
        self.assertEqual(type(c), type(doImport))
        self.assertTrue(inspect.isfunction(c))

        c = doImport("lsst.utils")
        self.assertTrue(inspect.ismodule(c))

        with self.assertRaises(ImportError):
            doImport("lsst.utils.tests.TestCase.xyprint")

        with self.assertRaises(ImportError):
            doImport("lsst.utils.nothere")

        with self.assertRaises(ModuleNotFoundError):
            doImport("missing module")

        with self.assertRaises(ModuleNotFoundError):
            doImport("lsstdummy.import.fail")

        with self.assertRaises(ImportError):
            doImport("lsst.import.fail")

        with self.assertRaises(ImportError):
            doImport("lsst.utils.x")

        with self.assertRaises(TypeError):
            doImport([])