def makeDatasetType( self, universe: DimensionUniverse, parentStorageClass: Optional[Union[StorageClass, str]] = None ) -> DatasetType: """Construct a true `DatasetType` instance with normalized dimensions. Parameters ---------- universe : `lsst.daf.butler.DimensionUniverse` Set of all known dimensions to be used to normalize the dimension names specified in config. parentStorageClass : `lsst.daf.butler.StorageClass` or `str`, optional Parent storage class for component datasets; `None` otherwise. Returns ------- datasetType : `DatasetType` The `DatasetType` defined by this connection. """ return DatasetType( self.name, universe.extract(self.dimensions), self.storageClass, isCalibration=self.isCalibration, parentStorageClass=parentStorageClass, )
def makeDatasetType(self, universe: DimensionUniverse): """Construct a true `DatasetType` instance with normalized dimensions. Parameters ---------- universe : `lsst.daf.butler.DimensionUniverse` Set of all known dimensions to be used to normalize the dimension names specified in config. Returns ------- datasetType : `DatasetType` The `DatasetType` defined by this connection. """ return DatasetType(self.name, universe.extract(self.dimensions), self.storageClass)
def testAddInputsOutputs(self): """Test of addPredictedInput() method. """ quantum = Quantum(taskName="some.task.object", run=None) # start with empty self.assertEqual(quantum.predictedInputs, dict()) universe = DimensionUniverse() instrument = "DummyCam" datasetTypeName = "test_ds" storageClass = StorageClass("testref_StructuredData") datasetType = DatasetType(datasetTypeName, universe.extract(("instrument", "visit")), storageClass) # add one ref ref = DatasetRef(datasetType, dict(instrument=instrument, visit=42)) quantum.addPredictedInput(ref) self.assertIn(datasetTypeName, quantum.predictedInputs) self.assertEqual(len(quantum.predictedInputs[datasetTypeName]), 1) # add second ref ref = DatasetRef(datasetType, dict(instrument=instrument, visit=43)) quantum.addPredictedInput(ref) self.assertEqual(len(quantum.predictedInputs[datasetTypeName]), 2) # mark last ref as actually used self.assertEqual(quantum.actualInputs, dict()) quantum._markInputUsed(ref) self.assertIn(datasetTypeName, quantum.actualInputs) self.assertEqual(len(quantum.actualInputs[datasetTypeName]), 1) # add couple of outputs too self.assertEqual(quantum.outputs, dict()) ref = DatasetRef(datasetType, dict(instrument=instrument, visit=42)) quantum.addOutput(ref) self.assertIn(datasetTypeName, quantum.outputs) self.assertEqual(len(quantum.outputs[datasetTypeName]), 1) ref = DatasetRef(datasetType, dict(instrument=instrument, visit=43)) quantum.addOutput(ref) self.assertEqual(len(quantum.outputs[datasetTypeName]), 2)
def testConstructor(self): """Test of constructor. """ # Quantum specific arguments taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency quantum = Quantum(taskName=taskName) self.assertEqual(quantum.taskName, taskName) self.assertEqual(quantum.initInputs, {}) self.assertEqual(quantum.inputs, NamedKeyDict()) self.assertEqual(quantum.outputs, {}) self.assertIsNone(quantum.dataId) universe = DimensionUniverse() instrument = "DummyCam" datasetTypeName = "test_ds" storageClass = StorageClass("testref_StructuredData") datasetType = DatasetType(datasetTypeName, universe.extract(("instrument", "visit")), storageClass) predictedInputs = { datasetType: [ DatasetRef(datasetType, dict(instrument=instrument, visit=42)), DatasetRef(datasetType, dict(instrument=instrument, visit=43)) ] } outputs = { datasetType: [ DatasetRef(datasetType, dict(instrument=instrument, visit=42)), DatasetRef(datasetType, dict(instrument=instrument, visit=43)) ] } quantum = Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs) self.assertEqual(len(quantum.inputs[datasetType]), 2) self.assertEqual(len(quantum.outputs[datasetType]), 2)
class ConstraintsTestCase(unittest.TestCase, DatasetTestHelper): def setUp(self): self.id = 0 # Create DatasetRefs to test against constraints model self.universe = DimensionUniverse() dimensions = self.universe.extract( ("visit", "physical_filter", "instrument")) sc = StorageClass("DummySC", dict, None) self.calexpA = self.makeDatasetRef("calexp", dimensions, sc, { "instrument": "A", "physical_filter": "u" }, conform=False) dimensions = self.universe.extract(("visit", "detector", "instrument")) self.pviA = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "A", "visit": 1 }, conform=False) self.pviB = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "B", "visit": 2 }, conform=False) def testSimpleAccept(self): config = ConstraintsConfig({"accept": ["calexp", "ExposureF"]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) # Dimension accept config = ConstraintsConfig( {"accept": ["visit+physical_filter+instrument", "ExposureF"]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) config = ConstraintsConfig( {"accept": ["visit+detector+instrument", "ExposureF"]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) self.assertTrue(constraints.isAcceptable(self.pviA)) # Only accept instrument A pvi config = ConstraintsConfig({"accept": [{"instrument<A>": ["pvi"]}]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) self.assertFalse(constraints.isAcceptable(self.pviB)) # Accept PVI for instrument B but not instrument A config = ConstraintsConfig( {"accept": ["calexp", { "instrument<B>": ["pvi"] }]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) self.assertTrue(constraints.isAcceptable(self.pviB)) def testSimpleReject(self): config = ConstraintsConfig({"reject": ["calexp", "ExposureF"]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) def testAcceptReject(self): # Reject everything except calexp config = ConstraintsConfig({"accept": ["calexp"], "reject": ["all"]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) # Accept everything except calexp config = ConstraintsConfig({"reject": ["calexp"], "accept": ["all"]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) # Reject pvi but explicitly accept pvi for instrument A # Reject all instrument A but accept everything else # The reject here is superfluous config = ConstraintsConfig({ "accept": [{ "instrument<A>": ["pvi"] }], "reject": ["pvi"] }) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) self.assertFalse(constraints.isAcceptable(self.pviB)) # Accept everything except pvi from other than instrument A config = ConstraintsConfig({ "accept": ["all", { "instrument<A>": ["pvi"] }], "reject": ["pvi"] }) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) self.assertFalse(constraints.isAcceptable(self.pviB)) def testWildcardReject(self): # Reject everything config = ConstraintsConfig({"reject": ["all"]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) # Reject all instrument A but accept everything else config = ConstraintsConfig({"reject": [{"instrument<A>": ["all"]}]}) constraints = Constraints(config, universe=self.universe) self.assertFalse(constraints.isAcceptable(self.calexpA)) self.assertFalse(constraints.isAcceptable(self.pviA)) self.assertTrue(constraints.isAcceptable(self.pviB)) def testWildcardAccept(self): # Accept everything config = ConstraintsConfig({}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) # Accept everything constraints = Constraints(None, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) # Accept everything explicitly config = ConstraintsConfig({"accept": ["all"]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) # Accept all instrument A but reject everything else config = ConstraintsConfig({"accept": [{"instrument<A>": ["all"]}]}) constraints = Constraints(config, universe=self.universe) self.assertTrue(constraints.isAcceptable(self.calexpA)) self.assertTrue(constraints.isAcceptable(self.pviA)) self.assertFalse(constraints.isAcceptable(self.pviB)) def testEdgeCases(self): # Accept everything and reject everything config = ConstraintsConfig({"accept": ["all"], "reject": ["all"]}) with self.assertRaises(ValidationError): Constraints(config, universe=self.universe)
def testRegistryConfig(self): configFile = os.path.join(TESTDIR, "config", "basic", "posixDatastore.yaml") config = Config(configFile) universe = DimensionUniverse() self.factory.registerFormatters(config["datastore", "formatters"], universe=universe) # Create a DatasetRef with and without instrument matching the # one in the config file. dimensions = universe.extract( ("visit", "physical_filter", "instrument")) sc = StorageClass("DummySC", dict, None) refPviHsc = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPviHscFmt = self.factory.getFormatterClass(refPviHsc) self.assertIsFormatter(refPviHscFmt) self.assertIn("JsonFormatter", refPviHscFmt.name()) refPviNotHsc = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "DummyNotHSC", "physical_filter": "v" }, conform=False) refPviNotHscFmt = self.factory.getFormatterClass(refPviNotHsc) self.assertIsFormatter(refPviNotHscFmt) self.assertIn("PickleFormatter", refPviNotHscFmt.name()) # Create a DatasetRef that should fall back to using Dimensions refPvixHsc = self.makeDatasetRef("pvix", dimensions, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPvixNotHscFmt = self.factory.getFormatterClass(refPvixHsc) self.assertIsFormatter(refPvixNotHscFmt) self.assertIn("PickleFormatter", refPvixNotHscFmt.name()) # Create a DatasetRef that should fall back to using StorageClass dimensionsNoV = DimensionGraph(universe, names=("physical_filter", "instrument")) refPvixNotHscDims = self.makeDatasetRef("pvix", dimensionsNoV, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPvixNotHscDims_fmt = self.factory.getFormatterClass( refPvixNotHscDims) self.assertIsFormatter(refPvixNotHscDims_fmt) self.assertIn("YamlFormatter", refPvixNotHscDims_fmt.name())
class DatasetTypeTestCase(unittest.TestCase): """Test for DatasetType. """ def setUp(self): self.universe = DimensionUniverse() def testConstructor(self): """Test construction preserves values. Note that construction doesn't check for valid storageClass. This can only be verified for a particular schema. """ datasetTypeName = "test" storageClass = StorageClass("test_StructuredData") dimensions = self.universe.extract(("visit", "instrument")) datasetType = DatasetType(datasetTypeName, dimensions, storageClass) self.assertEqual(datasetType.name, datasetTypeName) self.assertEqual(datasetType.storageClass, storageClass) self.assertEqual(datasetType.dimensions, dimensions) with self.assertRaises(ValueError, msg="Construct component without parent storage class"): DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass) with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"): DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed") def testConstructor2(self): """Test construction from StorageClass name. """ datasetTypeName = "test" storageClass = StorageClass("test_constructor2") StorageClassFactory().registerStorageClass(storageClass) dimensions = self.universe.extract(("instrument", "visit")) datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2") self.assertEqual(datasetType.name, datasetTypeName) self.assertEqual(datasetType.storageClass, storageClass) self.assertEqual(datasetType.dimensions, dimensions) def testNameValidation(self): """Test that dataset type names only contain certain characters in certain positions. """ dimensions = self.universe.extract(("instrument", "visit")) goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b") badNames = ("1", "_", "a%b", "B+Z", "T[0]") # Construct storage class with all the good names included as # components so that we can test internal consistency storageClass = StorageClass("test_StructuredData", components={n: StorageClass("component") for n in goodNames}) for name in goodNames: composite = DatasetType(name, dimensions, storageClass) self.assertEqual(composite.name, name) for suffix in goodNames: full = DatasetType.nameWithComponent(name, suffix) component = composite.makeComponentDatasetType(suffix) self.assertEqual(component.name, full) self.assertEqual(component.parentStorageClass.name, "test_StructuredData") for suffix in badNames: full = DatasetType.nameWithComponent(name, suffix) with self.subTest(full=full): with self.assertRaises(ValueError): DatasetType(full, dimensions, storageClass) for name in badNames: with self.subTest(name=name): with self.assertRaises(ValueError): DatasetType(name, dimensions, storageClass) def testEquality(self): storageA = StorageClass("test_a") storageB = StorageClass("test_b") parent = StorageClass("test") dimensionsA = self.universe.extract(["instrument"]) dimensionsB = self.universe.extract(["skymap"]) self.assertEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, storageA,)) self.assertEqual(DatasetType("a", dimensionsA, "test_a",), DatasetType("a", dimensionsA, storageA,)) self.assertEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, "test_a",)) self.assertEqual(DatasetType("a", dimensionsA, "test_a",), DatasetType("a", dimensionsA, "test_a",)) self.assertEqual(DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent), DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent)) self.assertEqual(DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"), DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent")) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("b", dimensionsA, storageA,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("b", dimensionsA, "test_a",)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, storageB,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, "test_b",)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsB, storageA,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsB, "test_a",)) self.assertNotEqual(DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA), DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB)) self.assertNotEqual(DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"), DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB")) def testSorting(self): """Can we sort a DatasetType""" storage = StorageClass("test_a") dimensions = self.universe.extract(["instrument"]) d_a = DatasetType("a", dimensions, storage) d_f = DatasetType("f", dimensions, storage) d_p = DatasetType("p", dimensions, storage) sort = sorted([d_p, d_f, d_a]) self.assertEqual(sort, [d_a, d_f, d_p]) # Now with strings with self.assertRaises(TypeError): sort = sorted(["z", d_p, "c", d_f, d_a, "d"]) def testParentPlaceholder(self): """Test that a parent placeholder can be replaced.""" storageComp = StorageClass("component") storageParent = StorageClass("Parent") dimensions = self.universe.extract(["instrument"]) component = DatasetType("a.b", dimensions, storageComp, parentStorageClass=DatasetType.PlaceholderParentStorageClass) self.assertIsNotNone(component.parentStorageClass) with self.assertRaises(ValueError): component.finalizeParentStorageClass("parent") component.finalizeParentStorageClass(storageParent) self.assertEqual(component.parentStorageClass, storageParent) component = DatasetType("a.b", dimensions, storageComp, parentStorageClass=storageParent) with self.assertRaises(ValueError): # Can not replace unless a placeholder component.finalizeParentStorageClass(storageComp) datasetType = DatasetType("a", dimensions, storageParent) with self.assertRaises(ValueError): # Can not add parent if not component datasetType.finalizeParentStorageClass(storageComp) def testHashability(self): """Test `DatasetType.__hash__`. This test is performed by checking that `DatasetType` entries can be inserted into a `set` and that unique values of its (`name`, `storageClass`, `dimensions`) parameters result in separate entries (and equal ones don't). This does not check for uniformity of hashing or the actual values of the hash function. """ types = [] unique = 0 storageC = StorageClass("test_c") storageD = StorageClass("test_d") for name in ["a", "b"]: for storageClass in [storageC, storageD]: for dimensions in [("instrument", ), ("skymap", )]: datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass) datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass) types.extend((datasetType, datasetTypeCopy)) unique += 1 # datasetType should always equal its copy self.assertEqual(len(set(types)), unique) # all other combinations are unique # also check that hashes of instances constructed with StorageClass # name matches hashes of instances constructed with instances dimensions = self.universe.extract(["instrument"]) self.assertEqual(hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))) self.assertEqual(hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))) self.assertNotEqual(hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))) self.assertNotEqual(hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))) self.assertNotEqual(hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))) def testDeepCopy(self): """Test that we can copy a dataset type.""" storageClass = StorageClass("test_copy") datasetTypeName = "test" dimensions = self.universe.extract(("instrument", "visit")) datasetType = DatasetType(datasetTypeName, dimensions, storageClass) dcopy = copy.deepcopy(datasetType) self.assertEqual(dcopy, datasetType) # Now with calibration flag set datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True) dcopy = copy.deepcopy(datasetType) self.assertEqual(dcopy, datasetType) self.assertTrue(dcopy.isCalibration()) # And again with a composite componentStorageClass = StorageClass("copy_component") componentDatasetType = DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, componentStorageClass, parentStorageClass=storageClass) dcopy = copy.deepcopy(componentDatasetType) self.assertEqual(dcopy, componentDatasetType) def testPickle(self): """Test pickle support. """ storageClass = StorageClass("test_pickle") datasetTypeName = "test" dimensions = self.universe.extract(("instrument", "visit")) # Un-pickling requires that storage class is registered with factory. StorageClassFactory().registerStorageClass(storageClass) datasetType = DatasetType(datasetTypeName, dimensions, storageClass) datasetTypeOut = pickle.loads(pickle.dumps(datasetType)) self.assertIsInstance(datasetTypeOut, DatasetType) self.assertEqual(datasetType.name, datasetTypeOut.name) self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names) self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass) self.assertIsNone(datasetTypeOut.parentStorageClass) self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration()) self.assertFalse(datasetTypeOut.isCalibration()) datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True) datasetTypeOut = pickle.loads(pickle.dumps(datasetType)) self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration()) self.assertTrue(datasetTypeOut.isCalibration()) # And again with a composite componentStorageClass = StorageClass("pickle_component") StorageClassFactory().registerStorageClass(componentStorageClass) componentDatasetType = DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, componentStorageClass, parentStorageClass=storageClass) datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) self.assertIsInstance(datasetTypeOut, DatasetType) self.assertEqual(componentDatasetType.name, datasetTypeOut.name) self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names) self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass) self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass) self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name) self.assertEqual(datasetTypeOut, componentDatasetType) # Now with a string and not a real storage class to test that # pickling doesn't force the StorageClass to be resolved componentDatasetType = DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, "StrangeComponent", parentStorageClass="UnknownParent") datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) self.assertEqual(datasetTypeOut, componentDatasetType) self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName) # Now with a storage class that is created by the factory factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass") factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass") componentDatasetType = DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, factoryComponentStorageClassClass(), parentStorageClass=factoryStorageClassClass()) datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) self.assertEqual(datasetTypeOut, componentDatasetType) self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName) def test_composites(self): """Test components within composite DatasetTypes.""" storageClassA = StorageClass("compA") storageClassB = StorageClass("compB") storageClass = StorageClass("test_composite", components={"compA": storageClassA, "compB": storageClassB}) self.assertTrue(storageClass.isComposite()) self.assertFalse(storageClassA.isComposite()) self.assertFalse(storageClassB.isComposite()) dimensions = self.universe.extract(("instrument", "visit")) datasetTypeComposite = DatasetType("composite", dimensions, storageClass) datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA") datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB") self.assertTrue(datasetTypeComposite.isComposite()) self.assertFalse(datasetTypeComponentA.isComposite()) self.assertTrue(datasetTypeComponentB.isComponent()) self.assertFalse(datasetTypeComposite.isComponent()) self.assertEqual(datasetTypeComposite.name, "composite") self.assertEqual(datasetTypeComponentA.name, "composite.compA") self.assertEqual(datasetTypeComponentB.component(), "compB") self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None)) self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA")) self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass) self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass) self.assertIsNone(datasetTypeComposite.parentStorageClass)
class DatasetRefTestCase(unittest.TestCase): """Test for DatasetRef. """ def setUp(self): self.universe = DimensionUniverse() datasetTypeName = "test" self.componentStorageClass1 = StorageClass("Component1") self.componentStorageClass2 = StorageClass("Component2") self.parentStorageClass = StorageClass("Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}) dimensions = self.universe.extract(("instrument", "visit")) self.dataId = dict(instrument="DummyCam", visit=42) self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass) def testConstructor(self): """Test that construction preserves and validates values. """ # Construct an unresolved ref. ref = DatasetRef(self.datasetType, self.dataId) self.assertEqual(ref.datasetType, self.datasetType) self.assertEqual(ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId) self.assertIsInstance(ref.dataId, DataCoordinate) # Constructing an unresolved ref with run and/or components should # fail. run = "somerun" with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, run=run) # Passing a data ID that is missing dimensions should fail. with self.assertRaises(KeyError): DatasetRef(self.datasetType, {"instrument": "DummyCam"}) # Constructing a resolved ref should preserve run as well as everything # else. ref = DatasetRef(self.datasetType, self.dataId, id=1, run=run) self.assertEqual(ref.datasetType, self.datasetType) self.assertEqual(ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId) self.assertIsInstance(ref.dataId, DataCoordinate) self.assertEqual(ref.id, 1) self.assertEqual(ref.run, run) def testSorting(self): """Can we sort a DatasetRef""" ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=1)) ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10)) ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=22)) # Enable detailed diff report self.maxDiff = None # This will sort them on visit number sort = sorted([ref3, ref1, ref2]) self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}") # Now include a run ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=43), run="b", id=2) self.assertEqual(ref1.run, "b") ref4 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10), run="b", id=2) ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=4), run="a", id=1) ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=104), run="c", id=3) # This will sort them on run before visit sort = sorted([ref3, ref1, ref2, ref4]) self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}") # Now with strings with self.assertRaises(TypeError): sort = sorted(["z", ref1, "c"]) def testResolving(self): ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") unresolvedRef = ref.unresolved() self.assertIsNotNone(ref.id) self.assertIsNone(unresolvedRef.id) self.assertIsNone(unresolvedRef.run) self.assertNotEqual(ref, unresolvedRef) self.assertEqual(ref.unresolved(), unresolvedRef) self.assertEqual(ref.datasetType, unresolvedRef.datasetType) self.assertEqual(ref.dataId, unresolvedRef.dataId) reresolvedRef = unresolvedRef.resolved(id=1, run="somerun") self.assertEqual(ref, reresolvedRef) self.assertEqual(reresolvedRef.unresolved(), unresolvedRef) self.assertIsNotNone(reresolvedRef.run) def testPickle(self): ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") s = pickle.dumps(ref) self.assertEqual(pickle.loads(s), ref)
class DatasetTypeTestCase(unittest.TestCase): """Test for DatasetType. """ def setUp(self): self.universe = DimensionUniverse() def testConstructor(self): """Test construction preserves values. Note that construction doesn't check for valid storageClass. This can only be verified for a particular schema. """ datasetTypeName = "test" storageClass = StorageClass("test_StructuredData") dimensions = self.universe.extract(("instrument", "visit")) datasetType = DatasetType(datasetTypeName, dimensions, storageClass) self.assertEqual(datasetType.name, datasetTypeName) self.assertEqual(datasetType.storageClass, storageClass) self.assertEqual(datasetType.dimensions, dimensions) def testConstructor2(self): """Test construction from StorageClass name. """ datasetTypeName = "test" storageClass = StorageClass("test_constructor2") StorageClassFactory().registerStorageClass(storageClass) dimensions = self.universe.extract(("instrument", "visit")) datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2") self.assertEqual(datasetType.name, datasetTypeName) self.assertEqual(datasetType.storageClass, storageClass) self.assertEqual(datasetType.dimensions, dimensions) def testNameValidation(self): """Test that dataset type names only contain certain characters in certain positions. """ dimensions = self.universe.extract(("instrument", "visit")) storageClass = StorageClass("test_StructuredData") goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b") badNames = ("1", "_", "a%b", "B+Z", "T[0]") for name in goodNames: self.assertEqual(DatasetType(name, dimensions, storageClass).name, name) for suffix in goodNames: full = f"{name}.{suffix}" self.assertEqual(DatasetType(full, dimensions, storageClass).name, full) for suffix in badNames: full = f"{name}.{suffix}" with self.subTest(full=full): with self.assertRaises(ValueError): DatasetType(full, dimensions, storageClass) for name in badNames: with self.subTest(name=name): with self.assertRaises(ValueError): DatasetType(name, dimensions, storageClass) def testEquality(self): storageA = StorageClass("test_a") storageB = StorageClass("test_b") dimensionsA = self.universe.extract(["instrument"]) dimensionsB = self.universe.extract(["skymap"]) self.assertEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, storageA,)) self.assertEqual(DatasetType("a", dimensionsA, "test_a",), DatasetType("a", dimensionsA, storageA,)) self.assertEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, "test_a",)) self.assertEqual(DatasetType("a", dimensionsA, "test_a",), DatasetType("a", dimensionsA, "test_a",)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("b", dimensionsA, storageA,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("b", dimensionsA, "test_a",)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, storageB,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsA, "test_b",)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsB, storageA,)) self.assertNotEqual(DatasetType("a", dimensionsA, storageA,), DatasetType("a", dimensionsB, "test_a",)) def testHashability(self): """Test `DatasetType.__hash__`. This test is performed by checking that `DatasetType` entries can be inserted into a `set` and that unique values of its (`name`, `storageClass`, `dimensions`) parameters result in separate entries (and equal ones don't). This does not check for uniformity of hashing or the actual values of the hash function. """ types = [] unique = 0 storageC = StorageClass("test_c") storageD = StorageClass("test_d") for name in ["a", "b"]: for storageClass in [storageC, storageD]: for dimensions in [("instrument", ), ("skymap", )]: datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass) datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass) types.extend((datasetType, datasetTypeCopy)) unique += 1 # datasetType should always equal its copy self.assertEqual(len(set(types)), unique) # all other combinations are unique # also check that hashes of instances constructed with StorageClass # name matches hashes of instances constructed with instances dimensions = self.universe.extract(["instrument"]) self.assertEqual(hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))) self.assertEqual(hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))) self.assertNotEqual(hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))) self.assertNotEqual(hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))) self.assertNotEqual(hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))) def testPickle(self): """Test pickle support. """ storageClass = StorageClass("test_pickle") datasetTypeName = "test" dimensions = self.universe.extract(("instrument", "visit")) # Un-pickling requires that storage class is registered with factory. StorageClassFactory().registerStorageClass(storageClass) datasetType = DatasetType(datasetTypeName, dimensions, storageClass) datasetTypeOut = pickle.loads(pickle.dumps(datasetType)) self.assertIsInstance(datasetTypeOut, DatasetType) self.assertEqual(datasetType.name, datasetTypeOut.name) self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names) self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass) def test_composites(self): """Test components within composite DatasetTypes.""" storageClassA = StorageClass("compA") storageClassB = StorageClass("compB") storageClass = StorageClass("test_composite", components={"compA": storageClassA, "compB": storageClassB}) self.assertTrue(storageClass.isComposite()) self.assertFalse(storageClassA.isComposite()) self.assertFalse(storageClassB.isComposite()) dimensions = self.universe.extract(("instrument", "visit")) datasetTypeComposite = DatasetType("composite", dimensions, storageClass) datasetTypeComponentA = DatasetType("composite.compA", dimensions, storageClassA) datasetTypeComponentB = DatasetType("composite.compB", dimensions, storageClassB) self.assertTrue(datasetTypeComposite.isComposite()) self.assertFalse(datasetTypeComponentA.isComposite()) self.assertTrue(datasetTypeComponentB.isComponent()) self.assertFalse(datasetTypeComposite.isComponent()) self.assertEqual(datasetTypeComposite.name, "composite") self.assertEqual(datasetTypeComponentA.name, "composite.compA") self.assertEqual(datasetTypeComponentB.component(), "compB") self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None)) self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
class DatasetRefTestCase(unittest.TestCase): """Test for DatasetRef. """ def setUp(self): self.universe = DimensionUniverse() datasetTypeName = "test" self.componentStorageClass1 = StorageClass("Component1") self.componentStorageClass2 = StorageClass("Component2") self.parentStorageClass = StorageClass("Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}) dimensions = self.universe.extract(("instrument", "visit")) self.dataId = dict(instrument="DummyCam", visit=42) self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass) def testConstructor(self): """Test that construction preserves and validates values. """ # Construct an unresolved ref. ref = DatasetRef(self.datasetType, self.dataId) self.assertEqual(ref.datasetType, self.datasetType) self.assertEqual(ref.dataId, self.dataId, msg=ref.dataId) self.assertIsInstance(ref.dataId, DataCoordinate) self.assertIsNone(ref.components) # Constructing an unresolved ref with run and/or components should # fail. run = "somerun" with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, run=run) components = { "a": DatasetRef(self.datasetType.makeComponentDatasetType("a"), self.dataId, id=2, run=run) } with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, components=components) # Passing a data ID that is missing dimensions should fail. with self.assertRaises(KeyError): DatasetRef(self.datasetType, {"instrument": "DummyCam"}) # Constructing a resolved ref should preserve run and components, # as well as everything else. ref = DatasetRef(self.datasetType, self.dataId, id=1, run=run, components=components) self.assertEqual(ref.datasetType, self.datasetType) self.assertEqual(ref.dataId, self.dataId, msg=ref.dataId) self.assertIsInstance(ref.dataId, DataCoordinate) self.assertEqual(ref.id, 1) self.assertEqual(ref.run, run) self.assertEqual(ref.components, components) # Constructing a resolved ref with bad component storage classes # should fail. with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, id=1, run=run, components={"b": components["a"]}) # Constructing a resolved ref with unresolved components should fail. with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, id=1, run=run, components={"a": components["a"].unresolved()}) # Constructing a resolved ref with bad component names should fail. with self.assertRaises(ValueError): DatasetRef(self.datasetType, self.dataId, id=1, run=run, components={"c": components["a"]}) def testResolving(self): ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") unresolvedRef = ref.unresolved() self.assertIsNotNone(ref.id) self.assertIsNone(unresolvedRef.id) self.assertIsNone(unresolvedRef.run) self.assertIsNone(unresolvedRef.components) self.assertNotEqual(ref, unresolvedRef) self.assertEqual(ref.unresolved(), unresolvedRef) self.assertEqual(ref.datasetType, unresolvedRef.datasetType) self.assertEqual(ref.dataId, unresolvedRef.dataId) reresolvedRef = unresolvedRef.resolved(id=1, run="somerun") self.assertEqual(ref, reresolvedRef) self.assertEqual(reresolvedRef.unresolved(), unresolvedRef) self.assertIsNotNone(reresolvedRef.run) self.assertIsNotNone(reresolvedRef.components) def testPickle(self): ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") s = pickle.dumps(ref) self.assertEqual(pickle.loads(s), ref)
def testRegistryConfig(self): configFile = os.path.join(TESTDIR, "config", "basic", "posixDatastore.yaml") config = Config(configFile) universe = DimensionUniverse() self.factory.registerFormatters(config["datastore", "formatters"], universe=universe) # Create a DatasetRef with and without instrument matching the # one in the config file. dimensions = universe.extract(("visit", "physical_filter", "instrument")) sc = StorageClass("DummySC", dict, None) refPviHsc = self.makeDatasetRef("pvi", dimensions, sc, {"instrument": "DummyHSC", "physical_filter": "v"}, conform=False) refPviHscFmt = self.factory.getFormatterClass(refPviHsc) self.assertIsFormatter(refPviHscFmt) self.assertIn("JsonFormatter", refPviHscFmt.name()) refPviNotHsc = self.makeDatasetRef("pvi", dimensions, sc, {"instrument": "DummyNotHSC", "physical_filter": "v"}, conform=False) refPviNotHscFmt = self.factory.getFormatterClass(refPviNotHsc) self.assertIsFormatter(refPviNotHscFmt) self.assertIn("PickleFormatter", refPviNotHscFmt.name()) # Create a DatasetRef that should fall back to using Dimensions refPvixHsc = self.makeDatasetRef("pvix", dimensions, sc, {"instrument": "DummyHSC", "physical_filter": "v"}, conform=False) refPvixNotHscFmt = self.factory.getFormatterClass(refPvixHsc) self.assertIsFormatter(refPvixNotHscFmt) self.assertIn("PickleFormatter", refPvixNotHscFmt.name()) # Create a DatasetRef that should fall back to using StorageClass dimensionsNoV = DimensionGraph(universe, names=("physical_filter", "instrument")) refPvixNotHscDims = self.makeDatasetRef("pvix", dimensionsNoV, sc, {"instrument": "DummyHSC", "physical_filter": "v"}, conform=False) refPvixNotHscDims_fmt = self.factory.getFormatterClass(refPvixNotHscDims) self.assertIsFormatter(refPvixNotHscDims_fmt) self.assertIn("YamlFormatter", refPvixNotHscDims_fmt.name()) # Check that parameters are stored refParam = self.makeDatasetRef("paramtest", dimensions, sc, {"instrument": "DummyNotHSC", "physical_filter": "v"}, conform=False) lookup, refParam_fmt, kwargs = self.factory.getFormatterClassWithMatch(refParam) self.assertIn("writeParameters", kwargs) expected = {"max": 5, "min": 2, "comment": "Additional commentary", "recipe": "recipe1"} self.assertEqual(kwargs["writeParameters"], expected) self.assertIn("FormatterTest", refParam_fmt.name()) f = self.factory.getFormatter(refParam, self.fileDescriptor) self.assertEqual(f.writeParameters, expected) f = self.factory.getFormatter(refParam, self.fileDescriptor, writeParameters={"min": 22, "extra": 50}) self.assertEqual(f.writeParameters, {"max": 5, "min": 22, "comment": "Additional commentary", "extra": 50, "recipe": "recipe1"}) self.assertIn("recipe1", f.writeRecipes) self.assertEqual(f.writeParameters["recipe"], "recipe1") with self.assertRaises(ValueError): # "new" is not allowed as a write parameter self.factory.getFormatter(refParam, self.fileDescriptor, writeParameters={"new": 1}) with self.assertRaises(RuntimeError): # "mode" is a required recipe parameter self.factory.getFormatter(refParam, self.fileDescriptor, writeRecipes={"recipe3": {"notmode": 1}})
class FormatterFactoryTestCase(unittest.TestCase, DatasetTestHelper): """Tests of the formatter factory infrastructure. """ def setUp(self): self.id = 0 self.factory = FormatterFactory() self.universe = DimensionUniverse() self.dataId = DataCoordinate.makeEmpty(self.universe) # Dummy FileDescriptor for testing getFormatter self.fileDescriptor = FileDescriptor( Location("/a/b/c", "d"), StorageClass("DummyStorageClass", dict, None)) def assertIsFormatter(self, formatter): """Check that the supplied parameter is either a Formatter instance or Formatter class.""" if inspect.isclass(formatter): self.assertTrue(issubclass(formatter, Formatter), f"Is {formatter} a Formatter") else: self.assertIsInstance(formatter, Formatter) def testFormatter(self): """Check basic parameter exceptions""" f = DoNothingFormatter(self.fileDescriptor, self.dataId) self.assertEqual(f.writeRecipes, {}) self.assertEqual(f.writeParameters, {}) self.assertIn("DoNothingFormatter", repr(f)) with self.assertRaises(TypeError): DoNothingFormatter() with self.assertRaises(ValueError): DoNothingFormatter(self.fileDescriptor, self.dataId, writeParameters={"param1": 0}) with self.assertRaises(RuntimeError): DoNothingFormatter(self.fileDescriptor, self.dataId, writeRecipes={"label": "value"}) with self.assertRaises(NotImplementedError): f.makeUpdatedLocation(Location("a", "b")) with self.assertRaises(NotImplementedError): f.write("str") def testExtensionValidation(self): """Test extension validation""" for file, single_ok, multi_ok in ( ("e.fits", True, True), ("e.fit", False, True), ("e.fits.fz", False, True), ("e.txt", False, False), ("e.1.4.fits", True, True), ("e.3.fit", False, True), ("e.1.4.fits.gz", False, True), ): loc = Location("/a/b/c", file) for formatter, passes in ((SingleExtensionFormatter, single_ok), (MultipleExtensionsFormatter, multi_ok)): if passes: formatter.validateExtension(loc) else: with self.assertRaises(ValueError): formatter.validateExtension(loc) def testRegistry(self): """Check that formatters can be stored in the registry. """ formatterTypeName = "lsst.daf.butler.tests.deferredFormatter.DeferredFormatter" storageClassName = "Image" self.factory.registerFormatter(storageClassName, formatterTypeName) f = self.factory.getFormatter(storageClassName, self.fileDescriptor, self.dataId) self.assertIsFormatter(f) self.assertEqual(f.name(), formatterTypeName) self.assertIn(formatterTypeName, str(f)) self.assertIn(self.fileDescriptor.location.path, str(f)) fcls = self.factory.getFormatterClass(storageClassName) self.assertIsFormatter(fcls) # Defer the import so that we ensure that the infrastructure loaded # it on demand previously from lsst.daf.butler.tests.deferredFormatter import DeferredFormatter self.assertEqual(type(f), DeferredFormatter) with self.assertRaises(TypeError): # Requires a constructor parameter self.factory.getFormatter(storageClassName) with self.assertRaises(KeyError): self.factory.getFormatter("Missing", self.fileDescriptor) # Check that a bad formatter path fails storageClassName = "BadImage" self.factory.registerFormatter( storageClassName, "lsst.daf.butler.tests.deferredFormatter.Unknown") with self.assertRaises(ImportError): self.factory.getFormatter(storageClassName, self.fileDescriptor, self.dataId) def testRegistryWithStorageClass(self): """Test that the registry can be given a StorageClass object. """ formatterTypeName = "lsst.daf.butler.formatters.yaml.YamlFormatter" storageClassName = "TestClass" sc = StorageClass(storageClassName, dict, None) datasetType = DatasetType("calexp", self.universe.empty, sc) # Store using an instance self.factory.registerFormatter(sc, formatterTypeName) # Retrieve using the class f = self.factory.getFormatter(sc, self.fileDescriptor, self.dataId) self.assertIsFormatter(f) self.assertEqual(f.fileDescriptor, self.fileDescriptor) # Retrieve using the DatasetType f2 = self.factory.getFormatter(datasetType, self.fileDescriptor, self.dataId) self.assertIsFormatter(f2) self.assertEqual(f.name(), f2.name()) # Class directly f2cls = self.factory.getFormatterClass(datasetType) self.assertIsFormatter(f2cls) # This might defer the import, pytest may have already loaded it from lsst.daf.butler.formatters.yaml import YamlFormatter self.assertEqual(type(f), YamlFormatter) with self.assertRaises(KeyError): # Attempt to overwrite using a different value self.factory.registerFormatter( storageClassName, "lsst.daf.butler.formatters.json.JsonFormatter") def testRegistryConfig(self): configFile = os.path.join(TESTDIR, "config", "basic", "posixDatastore.yaml") config = Config(configFile) self.factory.registerFormatters(config["datastore", "formatters"], universe=self.universe) # Create a DatasetRef with and without instrument matching the # one in the config file. dimensions = self.universe.extract( ("visit", "physical_filter", "instrument")) sc = StorageClass("DummySC", dict, None) refPviHsc = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPviHscFmt = self.factory.getFormatterClass(refPviHsc) self.assertIsFormatter(refPviHscFmt) self.assertIn("JsonFormatter", refPviHscFmt.name()) refPviNotHsc = self.makeDatasetRef("pvi", dimensions, sc, { "instrument": "DummyNotHSC", "physical_filter": "v" }, conform=False) refPviNotHscFmt = self.factory.getFormatterClass(refPviNotHsc) self.assertIsFormatter(refPviNotHscFmt) self.assertIn("PickleFormatter", refPviNotHscFmt.name()) # Create a DatasetRef that should fall back to using Dimensions refPvixHsc = self.makeDatasetRef("pvix", dimensions, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPvixNotHscFmt = self.factory.getFormatterClass(refPvixHsc) self.assertIsFormatter(refPvixNotHscFmt) self.assertIn("PickleFormatter", refPvixNotHscFmt.name()) # Create a DatasetRef that should fall back to using StorageClass dimensionsNoV = DimensionGraph(self.universe, names=("physical_filter", "instrument")) refPvixNotHscDims = self.makeDatasetRef("pvix", dimensionsNoV, sc, { "instrument": "DummyHSC", "physical_filter": "v" }, conform=False) refPvixNotHscDims_fmt = self.factory.getFormatterClass( refPvixNotHscDims) self.assertIsFormatter(refPvixNotHscDims_fmt) self.assertIn("YamlFormatter", refPvixNotHscDims_fmt.name()) # Check that parameters are stored refParam = self.makeDatasetRef("paramtest", dimensions, sc, { "instrument": "DummyNotHSC", "physical_filter": "v" }, conform=False) lookup, refParam_fmt, kwargs = self.factory.getFormatterClassWithMatch( refParam) self.assertIn("writeParameters", kwargs) expected = { "max": 5, "min": 2, "comment": "Additional commentary", "recipe": "recipe1" } self.assertEqual(kwargs["writeParameters"], expected) self.assertIn("FormatterTest", refParam_fmt.name()) f = self.factory.getFormatter(refParam, self.fileDescriptor, self.dataId) self.assertEqual(f.writeParameters, expected) f = self.factory.getFormatter(refParam, self.fileDescriptor, self.dataId, writeParameters={ "min": 22, "extra": 50 }) self.assertEqual( f.writeParameters, { "max": 5, "min": 22, "comment": "Additional commentary", "extra": 50, "recipe": "recipe1" }) self.assertIn("recipe1", f.writeRecipes) self.assertEqual(f.writeParameters["recipe"], "recipe1") with self.assertRaises(ValueError): # "new" is not allowed as a write parameter self.factory.getFormatter(refParam, self.fileDescriptor, self.dataId, writeParameters={"new": 1}) with self.assertRaises(RuntimeError): # "mode" is a required recipe parameter self.factory.getFormatter(refParam, self.fileDescriptor, self.dataId, writeRecipes={"recipe3": { "notmode": 1 }})