def testConstructor(self):
        """Test DatasetTypeDescriptor init
        """
        name = "testDataset"
        dimensionNames = frozenset(["label"])
        storageClassName = "Catalog"
        universe = DimensionUniverse.fromConfig()
        descriptor = pipeBase.DatasetTypeDescriptor(
            name=name,
            dimensionNames=dimensionNames,
            storageClassName=storageClassName,
            scalar=False,
            manualLoad=False)
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, name)
        self.assertEqual(datasetType.dimensions.names, dimensionNames)
        self.assertEqual(datasetType.storageClass.name, storageClassName)
        self.assertFalse(descriptor.scalar)

        descriptor = pipeBase.DatasetTypeDescriptor(
            name=name,
            dimensionNames=dimensionNames,
            storageClassName=storageClassName,
            scalar=True,
            manualLoad=False)
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, name)
        self.assertEqual(datasetType.dimensions.names, dimensionNames)
        self.assertEqual(datasetType.storageClass.name, storageClassName)
        self.assertTrue(descriptor.scalar)
Пример #2
0
    def testRegistryWithStorageClass(self):
        """Test that the registry can be given a StorageClass object.
        """
        formatterTypeName = "lsst.daf.butler.formatters.yamlFormatter.YamlFormatter"
        storageClassName = "TestClass"
        sc = StorageClass(storageClassName, dict, None)

        universe = DimensionUniverse.fromConfig()
        datasetType = DatasetType("calexp", universe.extract([]), sc)

        # Store using an instance
        self.factory.registerFormatter(sc, formatterTypeName)

        # Retrieve using the class
        f = self.factory.getFormatter(sc, self.fileDescriptor)
        self.assertIsFormatter(f)
        self.assertEqual(f.fileDescriptor, self.fileDescriptor)

        # Retrieve using the DatasetType
        f2 = self.factory.getFormatter(datasetType, self.fileDescriptor)
        self.assertIsFormatter(f2)
        self.assertEqual(f.name(), f2.name())

        # Class directly
        f2cls = self.factory.getFormatterClass(datasetType)
        self.assertIsFormatter(f2cls)

        # This might defer the import, pytest may have already loaded it
        from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
        self.assertEqual(type(f), YamlFormatter)

        with self.assertRaises(KeyError):
            # Attempt to overwrite using a different value
            self.factory.registerFormatter(storageClassName,
                                           "lsst.daf.butler.formatters.jsonFormatter.JsonFormatter")
Пример #3
0
 def setUp(self):
     self.universe = DimensionUniverse.fromConfig()
     self.given = DimensionSet(universe=self.universe, elements=["skymap"])
     self.parameters = dict(skymap="unimportant",
                            tractMax=5,
                            patchNxMax=3,
                            patchNyMax=3)
Пример #4
0
    def testDatasetConfig(self):
        """Test for a config with datasets
        """
        config = ConfigWithDatasets()
        universe = DimensionUniverse.fromConfig()

        descriptors = pipeBase.PipelineTask.getInputDatasetTypes(config)
        self.assertCountEqual(descriptors.keys(), ["input1", "input2"])
        descriptor = descriptors["input1"]
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, config.input1.name)
        self.assertCountEqual(datasetType.dimensions.names,
                              config.input1.dimensions)
        self.assertEqual(datasetType.storageClass.name,
                         config.input1.storageClass)
        self.assertFalse(descriptor.scalar)
        descriptor = descriptors["input2"]
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, config.input2.name)
        self.assertCountEqual(datasetType.dimensions.names,
                              config.input2.dimensions)
        self.assertEqual(datasetType.storageClass.name,
                         config.input2.storageClass)
        self.assertTrue(descriptor.scalar)

        descriptors = pipeBase.PipelineTask.getOutputDatasetTypes(config)
        self.assertCountEqual(descriptors.keys(), ["output"])
        descriptor = descriptors["output"]
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, config.output.name)
        self.assertCountEqual(datasetType.dimensions.names,
                              config.output.dimensions)
        self.assertEqual(datasetType.storageClass.name,
                         config.output.storageClass)
        self.assertFalse(descriptor.scalar)

        descriptors = pipeBase.PipelineTask.getInitInputDatasetTypes(config)
        self.assertCountEqual(descriptors.keys(), ["initInput"])
        descriptor = descriptors["initInput"]
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, config.initInput.name)
        self.assertEqual(len(datasetType.dimensions), 0)
        self.assertEqual(datasetType.storageClass.name,
                         config.initInput.storageClass)
        self.assertTrue(descriptor.scalar)

        descriptors = pipeBase.PipelineTask.getInitOutputDatasetTypes(config)
        self.assertCountEqual(descriptors.keys(), ["initOutput"])
        descriptor = descriptors["initOutput"]
        datasetType = descriptor.makeDatasetType(universe)
        self.assertEqual(datasetType.name, config.initOutput.name)
        self.assertEqual(len(datasetType.dimensions), 0)
        self.assertEqual(datasetType.storageClass.name,
                         config.initOutput.storageClass)
        self.assertTrue(descriptor.scalar)
    def setUpClass(cls):
        # Storage Classes are fixed for all datastores in these tests
        scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
        cls.storageClassFactory = StorageClassFactory()
        cls.storageClassFactory.addFromConfig(scConfigFile)

        # Read the Datastore config so we can get the class
        # information (since we should not assume the constructor
        # name here, but rely on the configuration file itself)
        datastoreConfig = DatastoreConfig(cls.configFile)
        cls.datastoreType = doImport(datastoreConfig["cls"])
        cls.universe = DimensionUniverse.fromConfig()
    def testFromConfig(self):
        """Test DatasetTypeDescriptor.fromConfig()
        """
        universe = DimensionUniverse.fromConfig()
        config = AddConfig()
        descriptor = pipeBase.DatasetTypeDescriptor.fromConfig(config.input)
        datasetType = descriptor.makeDatasetType(universe)
        self.assertIsInstance(descriptor, pipeBase.DatasetTypeDescriptor)
        self.assertEqual(datasetType.name, "add_input")
        self.assertFalse(descriptor.scalar)

        descriptor = pipeBase.DatasetTypeDescriptor.fromConfig(config.output)
        datasetType = descriptor.makeDatasetType(universe)
        self.assertIsInstance(descriptor, pipeBase.DatasetTypeDescriptor)
        self.assertEqual(datasetType.name, "add_output")
        self.assertFalse(descriptor.scalar)
    def testRegistryConfig(self):
        configFile = os.path.join(TESTDIR, "config", "basic",
                                  "posixDatastore.yaml")
        config = Config(configFile)
        universe = DimensionUniverse.fromConfig()
        self.factory.registerFormatters(config["datastore", "formatters"],
                                        universe=universe)

        # Create a DatasetRef with and without instrument matching the
        # one in the config file.
        dimensions = universe.extract(
            ("visit", "physical_filter", "instrument"))
        sc = StorageClass("DummySC", dict, None)
        refPviHsc = self.makeDatasetRef("pvi", dimensions, sc, {
            "instrument": "DummyHSC",
            "physical_filter": "v"
        })
        refPviHscFmt = self.factory.getFormatter(refPviHsc)
        self.assertIsInstance(refPviHscFmt, Formatter)
        self.assertIn("JsonFormatter", refPviHscFmt.name())

        refPviNotHsc = self.makeDatasetRef("pvi", dimensions, sc, {
            "instrument": "DummyNotHSC",
            "physical_filter": "v"
        })
        refPviNotHscFmt = self.factory.getFormatter(refPviNotHsc)
        self.assertIsInstance(refPviNotHscFmt, Formatter)
        self.assertIn("PickleFormatter", refPviNotHscFmt.name())

        # Create a DatasetRef that should fall back to using Dimensions
        refPvixHsc = self.makeDatasetRef("pvix", dimensions, sc, {
            "instrument": "DummyHSC",
            "physical_filter": "v"
        })
        refPvixNotHscFmt = self.factory.getFormatter(refPvixHsc)
        self.assertIsInstance(refPvixNotHscFmt, Formatter)
        self.assertIn("PickleFormatter", refPvixNotHscFmt.name())

        # Create a DatasetRef that should fall back to using StorageClass
        dimensionsNoV = universe.extract(("physical_filter", "instrument"))
        refPvixNotHscDims = self.makeDatasetRef("pvix", dimensionsNoV, sc, {
            "instrument": "DummyHSC",
            "physical_filter": "v"
        })
        refPvixNotHscDims_fmt = self.factory.getFormatter(refPvixNotHscDims)
        self.assertIsInstance(refPvixNotHscDims_fmt, Formatter)
        self.assertIn("YamlFormatter", refPvixNotHscDims_fmt.name())
    def testMap(self):
        universe = DimensionUniverse.fromConfig()
        c = CompositesMap(self.configFile, universe=universe)

        # Check that a str is not supported
        with self.assertRaises(ValueError):
            c.shouldBeDisassembled("fred")

        # These will fail (not a composite)
        sc = StorageClass("StructuredDataJson")
        d = DatasetType("dummyTrue", universe.extract([]), sc)
        self.assertFalse(sc.isComposite())
        self.assertFalse(d.isComposite())
        self.assertFalse(c.shouldBeDisassembled(d),
                         f"Test with DatasetType: {d}")
        self.assertFalse(c.shouldBeDisassembled(sc),
                         f"Test with StorageClass: {sc}")

        # Repeat but this time use a composite storage class
        sccomp = StorageClass("Dummy")
        sc = StorageClass("StructuredDataJson", components={"dummy": sccomp})
        d = DatasetType("dummyTrue", universe.extract([]), sc)
        self.assertTrue(sc.isComposite())
        self.assertTrue(d.isComposite())
        self.assertTrue(c.shouldBeDisassembled(d),
                        f"Test with DatasetType: {d}")
        self.assertFalse(c.shouldBeDisassembled(sc),
                         f"Test with StorageClass: {sc}")

        # Override with False
        d = DatasetType("dummyFalse", universe.extract([]), sc)
        self.assertFalse(c.shouldBeDisassembled(d),
                         f"Test with DatasetType: {d}")

        # DatasetType that has no explicit entry
        d = DatasetType("dummyFred", universe.extract([]), sc)
        self.assertFalse(c.shouldBeDisassembled(d),
                         f"Test with DatasetType: {d}")

        # StorageClass that will be disassembled
        sc = StorageClass("StructuredComposite", components={"dummy": sccomp})
        d = DatasetType("dummyFred", universe.extract([]), sc)
        self.assertTrue(c.shouldBeDisassembled(d),
                        f"Test with DatasetType: {d}")
    def _makeQuanta(self, config):
        """Create set of Quanta
        """
        universe = DimensionUniverse.fromConfig()
        run = Run(collection=1, environment=None, pipeline=None)

        descriptor = pipeBase.DatasetTypeDescriptor.fromConfig(config.input)
        dstype0 = descriptor.makeDatasetType(universe)
        descriptor = pipeBase.DatasetTypeDescriptor.fromConfig(config.output)
        dstype1 = descriptor.makeDatasetType(universe)

        quanta = []
        for visit in range(100):
            quantum = Quantum(run=run, task=None)
            quantum.addPredictedInput(self._makeDSRefVisit(dstype0, visit))
            quantum.addOutput(self._makeDSRefVisit(dstype1, visit))
            quanta.append(quantum)

        return quanta
Пример #10
0
    def testAddInputsOutputs(self):
        """Test of addPredictedInput() method.
        """
        quantum = Quantum(task="some.task.object", run=None)

        # start with empty
        self.assertEqual(quantum.predictedInputs, dict())
        universe = DimensionUniverse.fromConfig()
        instrument = "DummyCam"
        datasetTypeName = "test_ds"
        storageClass = StorageClass("testref_StructuredData")
        datasetType = DatasetType(datasetTypeName,
                                  universe.extract(("instrument", "visit")),
                                  storageClass)

        # add one ref
        ref = DatasetRef(datasetType, dict(instrument=instrument, visit=42))
        quantum.addPredictedInput(ref)
        self.assertIn(datasetTypeName, quantum.predictedInputs)
        self.assertEqual(len(quantum.predictedInputs[datasetTypeName]), 1)
        # add second ref
        ref = DatasetRef(datasetType, dict(instrument=instrument, visit=43))
        quantum.addPredictedInput(ref)
        self.assertEqual(len(quantum.predictedInputs[datasetTypeName]), 2)

        # mark last ref as actually used
        self.assertEqual(quantum.actualInputs, dict())
        quantum._markInputUsed(ref)
        self.assertIn(datasetTypeName, quantum.actualInputs)
        self.assertEqual(len(quantum.actualInputs[datasetTypeName]), 1)

        # add couple of outputs too
        self.assertEqual(quantum.outputs, dict())
        ref = DatasetRef(datasetType, dict(instrument=instrument, visit=42))
        quantum.addOutput(ref)
        self.assertIn(datasetTypeName, quantum.outputs)
        self.assertEqual(len(quantum.outputs[datasetTypeName]), 1)

        ref = DatasetRef(datasetType, dict(instrument=instrument, visit=43))
        quantum.addOutput(ref)
        self.assertEqual(len(quantum.outputs[datasetTypeName]), 2)
Пример #11
0
    def setUp(self):
        self.id = 0

        # Create DatasetRefs to test against constraints model
        self.universe = DimensionUniverse.fromConfig()
        dimensions = self.universe.extract(
            ("visit", "physical_filter", "instrument"))
        sc = StorageClass("DummySC", dict, None)
        self.calexpA = self.makeDatasetRef("calexp", dimensions, sc, {
            "instrument": "A",
            "physical_filter": "u"
        })

        dimensions = self.universe.extract(
            ("visit", "calibration_label", "instrument"))
        self.pviA = self.makeDatasetRef("pvi", dimensions, sc, {
            "instrument": "A",
            "visit": 1
        })
        self.pviB = self.makeDatasetRef("pvi", dimensions, sc, {
            "instrument": "B",
            "visit": 2
        })
Пример #12
0
 def setUp(self):
     self.universe = DimensionUniverse.fromConfig()
     self.given = DimensionSet(universe=self.universe, elements=["skymap"])
     self.parameters = dict(skymap="unimportant", tractMax=5, patchNxMax=3, patchNyMax=3)
 def __init__(self):
     self.datasets = {}
     self.registry = SimpleNamespace(
         dimensions=DimensionUniverse.fromConfig())
Пример #14
0
 def __init__(self):
     self._counter = 0
     self._entries = {}
     self.dimensions = DimensionUniverse.fromConfig()
Пример #15
0
 def setUp(self):
     self.universe = DimensionUniverse.fromConfig()