コード例 #1
0
ファイル: test_testRepo.py プロジェクト: bsipocz/daf_butler
    def testRegisterMetricsExampleChained(self):
        """Regression test for registerMetricsExample having no effect
        on ChainedDatastore.
        """
        temp = makeTestTempDir(TESTDIR)
        try:
            config = lsst.daf.butler.Config()
            config[
                "datastore",
                "cls"] = "lsst.daf.butler.datastores.chainedDatastore.ChainedDatastore"
            config["datastore", "datastores"] = [{
                "cls":
                "lsst.daf.butler.datastores.fileDatastore.FileDatastore",
            }]

            repo = lsst.daf.butler.Butler.makeRepo(temp, config=config)
            butler = lsst.daf.butler.Butler(repo, run="chainedExample")
            registerMetricsExample(butler)
            addDatasetType(butler, "DummyType", {},
                           "StructuredDataNoComponents")

            data = MetricsExample(summary={})
            # Should not raise
            butler.put(data, "DummyType")
        finally:
            shutil.rmtree(temp, ignore_errors=True)
コード例 #2
0
    def setUpClass(cls):
        super().setUpClass()

        cls.CAMERA_ID = "NotACam"
        cls.VISIT_ID = 42
        cls.CHIP_ID = 5

        # makeTestRepo called in setUpClass because it's *very* slow
        cls.root = tempfile.mkdtemp()
        cls.repo = butlerTests.makeTestRepo(cls.root, {
            "instrument": [cls.CAMERA_ID],
            "visit": [cls.VISIT_ID],
            "detector": [cls.CHIP_ID],
        })

        # self.task not visible at class level
        task = cls.makeTask()
        connections = task.config.ConnectionsClass(config=task.config)

        butlerTests.addDatasetType(
            cls.repo,
            connections.measurement.name,
            connections.measurement.dimensions,
            connections.measurement.storageClass)
        butlerTests.addDatasetType(
            cls.repo,
            connections.dbInfo.name,
            connections.dbInfo.dimensions,
            connections.dbInfo.storageClass)
コード例 #3
0
    def setUpClass(cls):
        cls.root = testUtils.makeTestTempDir(
            os.path.abspath(os.path.dirname(__file__)))
        cls.addClassCleanup(testUtils.removeTestTempDir, cls.root)

        # Can't use in-memory datastore because JobReporter creates a
        # new Butler from scratch.
        cls.repo = dafButler.Butler(dafButler.Butler.makeRepo(cls.root),
                                    writeable=True)

        # White-box testing: must use real metrics, and provide datasets of
        # type metricvalue_*_*.
        butlerTests.addDataIdValue(cls.repo, "instrument", "NotACam")
        butlerTests.addDataIdValue(cls.repo, "detector", 101)
        # physical_filter needed for well-behaved visits
        butlerTests.addDataIdValue(cls.repo, "physical_filter",
                                   "k2021", band="k")
        butlerTests.addDataIdValue(cls.repo, "visit", 42)

        # Dependency on verify_metrics, but not on the code for computing
        # these metrics.
        butlerTests.addDatasetType(
            cls.repo,
            "metricvalue_pipe_tasks_CharacterizeImageTime",
            {"instrument", "visit", "detector"},
            "MetricValue")
コード例 #4
0
    def setUpClass(cls):
        """Create a new butler once only."""

        cls.storageClassFactory = StorageClassFactory()

        cls.root = tempfile.mkdtemp(dir=TESTDIR)

        data_ids = {
            "instrument": [INSTRUMENT_NAME],
            "detector": [0, 1, 2, 3, 4, 5],
            "exposure": [11, 22],
        }

        configURI = ButlerURI("resource://spherex/configs",
                              forceDirectory=True)
        butlerConfig = Config(configURI.join("butler.yaml"))
        # in-memory db is being phased out
        # butlerConfig["registry", "db"] = 'sqlite:///:memory:'
        cls.creatorButler = makeTestRepo(
            cls.root,
            data_ids,
            config=butlerConfig,
            dimensionConfig=configURI.join("dimensions.yaml"))
        for formatter in FORMATTERS:
            datasetTypeName, storageClassName = (formatter["dataset_type"],
                                                 formatter["storage_class"])
            storageClass = cls.storageClassFactory.getStorageClass(
                storageClassName)
            addDatasetType(cls.creatorButler, datasetTypeName, set(data_ids),
                           storageClass)
コード例 #5
0
    def testRunQuantum(self):
        """Test the run quantum method with a gen3 butler.
        """
        root = tempfile.mkdtemp()
        dimensions = {"instrument": ["notACam"],
                      "skymap": ["skyMap"],
                      "tract": [0, 42],
                      }
        testRepo = butlerTests.makeTestRepo(root, dimensions)
        fakesTask = CreateRandomApFakesTask()
        connections = fakesTask.config.ConnectionsClass(
            config=fakesTask.config)
        butlerTests.addDatasetType(
            testRepo,
            connections.skyMap.name,
            connections.skyMap.dimensions,
            connections.skyMap.storageClass)
        butlerTests.addDatasetType(
            testRepo,
            connections.fakeCat.name,
            connections.fakeCat.dimensions,
            connections.fakeCat.storageClass)

        dataId = {"skymap": "skyMap", "tract": 0}
        butler = butlerTests.makeTestCollection(testRepo)
        butler.put(self.simpleMap, "skyMap", {"skymap": "skyMap"})

        quantum = testUtils.makeQuantum(
            fakesTask, butler, dataId,
            {key: dataId for key in {"skyMap", "fakeCat"}})
        run = testUtils.runTestQuantum(fakesTask, butler, quantum, True)
        # Actual input dataset omitted for simplicity
        run.assert_called_once_with(tractId=dataId["tract"], skyMap=self.simpleMap)
        shutil.rmtree(root, ignore_errors=True)
コード例 #6
0
    def setUpClass(cls):
        super().setUpClass()
        # Repository should be re-created for each test case, but
        # this has a prohibitive run-time cost at present
        cls.root = tempfile.mkdtemp()

        cls.repo = butlerTests.makeTestRepo(cls.root)
        butlerTests.addDataIdValue(cls.repo, "instrument", "notACam")
        butlerTests.addDataIdValue(cls.repo, "visit", 101)
        butlerTests.addDataIdValue(cls.repo, "visit", 102)
        butlerTests.addDataIdValue(cls.repo, "skymap", "sky")
        butlerTests.addDataIdValue(cls.repo, "tract", 42)
        butlerTests.addDataIdValue(cls.repo, "patch", 0)
        butlerTests.addDataIdValue(cls.repo, "patch", 1)
        butlerTests.registerMetricsExample(cls.repo)

        for typeName in {"VisitA", "VisitB", "VisitOutA", "VisitOutB"}:
            butlerTests.addDatasetType(cls.repo, typeName,
                                       {"instrument", "visit"},
                                       "StructuredData")
        for typeName in {"PatchA", "PatchOut"}:
            butlerTests.addDatasetType(cls.repo, typeName,
                                       {"skymap", "tract", "patch"},
                                       "StructuredData")
        butlerTests.addDatasetType(cls.repo, "PatchB", {"skymap", "tract"},
                                   "StructuredData")
        for typeName in {"PixA", "PixOut"}:
            butlerTests.addDatasetType(cls.repo, typeName, {"htm7"},
                                       "StructuredData")
        butlerTests.addDatasetType(cls.repo, "VisitInitIn", set(),
                                   "StructuredData")
コード例 #7
0
ファイル: test_testRepo.py プロジェクト: lsst/daf_butler
    def testAddDatasetType(self):
        # 1 for StructuredDataNoComponents, 4 for StructuredData
        self.assertEqual(len(list(self.butler.registry.queryDatasetTypes(components=True))), 5)

        # Testing the DatasetType objects is not practical, because all tests
        # need a DimensionUniverse. So just check that we have the dataset
        # types we expect.
        self.butler.registry.getDatasetType("DataType1")
        self.butler.registry.getDatasetType("DataType2")

        with self.assertRaises(ValueError):
            addDatasetType(self.butler, "DataType3", {"4thDimension"}, "NumpyArray")
        with self.assertRaises(ValueError):
            addDatasetType(self.butler, "DataType3", {"instrument"}, "UnstorableType")
コード例 #8
0
    def testGlobDatasetType(self):
        """Test specifying dataset type."""
        # Create and register an additional DatasetType

        self.testRepo.butler.registry.insertDimensionData("visit",
                                                          {"instrument": "DummyCamComp", "id": 425,
                                                           "name": "fourtwentyfive", "physical_filter": "d-r",
                                                           "visit_system": 1})

        datasetType = addDatasetType(self.testRepo.butler,
                                     "alt_test_metric_comp",
                                     ("instrument", "visit"),
                                     "StructuredCompositeReadComp")

        self.testRepo.addDataset(dataId={"instrument": "DummyCamComp", "visit": 425}, datasetType=datasetType)

        # verify the new dataset type increases the number of tables found:
        tables = self._queryDatasets(repo=self.root)

        expectedTables = (
            AstropyTable(array((
                ("test_metric_comp", "ingest/run", "1", "R", "DummyCamComp", "d-r", "1", "423"),
                ("test_metric_comp", "ingest/run", "2", "R", "DummyCamComp", "d-r", "1", "424"))),
                names=("type", "run", "id", "band", "instrument", "physical_filter", "visit_system", "visit")
            ),
            AstropyTable(array((
                ("alt_test_metric_comp", "ingest/run", "3", "R", "DummyCamComp", "d-r", "1", "425"))),
                names=("type", "run", "id", "band", "instrument", "physical_filter", "visit_system", "visit")
            )
        )

        self.assertAstropyTablesEqual(tables, expectedTables)
コード例 #9
0
    def setUpClass(cls):
        """Create a new butler once only."""

        cls.storageClassFactory = StorageClassFactory()

        cls.root = tempfile.mkdtemp(dir=TESTDIR)

        dataIds = {
            "instrument": ["DummyCam"],
            "physical_filter": ["d-r"],
            "visit": [42, 43, 44],
        }

        # Ensure that we test in a directory that will include some
        # metacharacters
        subdir = "sub?#dir"
        butlerRoot = os.path.join(cls.root, subdir)

        cls.creatorButler = makeTestRepo(butlerRoot,
                                         dataIds,
                                         config=Config.fromYaml(BUTLER_CONFIG))

        # Create dataset types used by the tests
        for datasetTypeName, storageClassName in (
            ("calexp", "ExposureF"),
            ("unknown", "ExposureCompositeF"),
            ("testCatalog", "SourceCatalog"),
            ("lossless", "ExposureF"),
            ("uncompressed", "ExposureF"),
            ("lossy", "ExposureF"),
        ):
            storageClass = cls.storageClassFactory.getStorageClass(
                storageClassName)
            addDatasetType(cls.creatorButler, datasetTypeName, set(dataIds),
                           storageClass)

        # And some dataset types that have no dimensions for easy testing
        for datasetTypeName, storageClassName in (
            ("ps", "PropertySet"),
            ("pl", "PropertyList"),
            ("pkg", "Packages"),
            ("config", "Config"),
        ):
            storageClass = cls.storageClassFactory.getStorageClass(
                storageClassName)
            addDatasetType(cls.creatorButler, datasetTypeName, {},
                           storageClass)
コード例 #10
0
ファイル: test_testRepo.py プロジェクト: bgounon/daf_butler
    def setUpClass(cls):
        # Repository should be re-created for each test case, but
        # this has a prohibitive run-time cost at present
        cls.root = tempfile.mkdtemp(dir=TESTDIR)

        dataIds = {
            "instrument": ["notACam", "dummyCam"],
            "physical_filter": ["k2020", "l2019"],
            "visit": [101, 102],
            "detector": [5]
        }
        cls.creatorButler = makeTestRepo(cls.root, dataIds)

        registerMetricsExample(cls.creatorButler)
        addDatasetType(cls.creatorButler, "DataType1", {"instrument"},
                       "StructuredDataNoComponents")
        addDatasetType(cls.creatorButler, "DataType2", {"instrument", "visit"},
                       "StructuredData")
コード例 #11
0
ファイル: test_testRepo.py プロジェクト: lsst/daf_butler
    def setUpClass(cls):
        # Repository should be re-created for each test case, but
        # this has a prohibitive run-time cost at present
        cls.root = makeTestTempDir(TESTDIR)

        cls.creatorButler = makeTestRepo(cls.root)
        addDataIdValue(cls.creatorButler, "instrument", "notACam")
        addDataIdValue(cls.creatorButler, "instrument", "dummyCam")
        addDataIdValue(cls.creatorButler, "physical_filter", "k2020", band="k", instrument="notACam")
        addDataIdValue(cls.creatorButler, "physical_filter", "l2019", instrument="dummyCam")
        addDataIdValue(cls.creatorButler, "visit", 101, instrument="notACam", physical_filter="k2020")
        addDataIdValue(cls.creatorButler, "visit", 102, instrument="notACam", physical_filter="k2020")
        addDataIdValue(cls.creatorButler, "detector", 5)
        # Leave skymap/patch/tract undefined so that tests can assume
        # they're missing.

        registerMetricsExample(cls.creatorButler)
        addDatasetType(cls.creatorButler, "DataType1", {"instrument"}, "StructuredDataNoComponents")
        addDatasetType(cls.creatorButler, "DataType2", {"instrument", "visit"}, "StructuredData")
コード例 #12
0
ファイル: test_ingest.py プロジェクト: gcmshadow/obs_base
    def setUpClass(cls):
        """Create a new butler once only."""
        cls.root = tempfile.mkdtemp(dir=TESTDIR)

        dataIds = {
            "instrument": ["DummyCam"],
            "physical_filter": ["d-r"],
            "exposure": [42, 43, 44],
        }

        cls.creatorButler = butlerTests.makeTestRepo(cls.root, dataIds)

        # Create dataset types used by the tests
        cls.storageClassFactory = dafButler.StorageClassFactory()
        for datasetTypeName, storageClassName in (("raw", "ExposureF"), ):
            storageClass = cls.storageClassFactory.getStorageClass(
                storageClassName)
            butlerTests.addDatasetType(cls.creatorButler, datasetTypeName,
                                       {"instrument", "exposure"},
                                       storageClass)
コード例 #13
0
    def setUpClass(cls):
        super().setUpClass()
        # Repository should be re-created for each test case, but
        # this has a prohibitive run-time cost at present
        cls.root = tempfile.mkdtemp()

        dataIds = {
            "instrument": ["notACam"],
            "physical_filter": ["k2020"],  # needed for expandUniqueId(visit)
            "visit": [101, 102],
            "skymap": ["sky"],
            "tract": [42],
            "patch": [0, 1],
        }
        cls.repo = butlerTests.makeTestRepo(cls.root, dataIds)
        butlerTests.registerMetricsExample(cls.repo)

        for typeName in {"VisitA", "VisitB", "VisitOutA", "VisitOutB"}:
            butlerTests.addDatasetType(cls.repo, typeName, {"instrument", "visit"}, "StructuredData")
        for typeName in {"PatchA", "PatchOut"}:
            butlerTests.addDatasetType(cls.repo, typeName, {"skymap", "tract", "patch"}, "StructuredData")
        butlerTests.addDatasetType(cls.repo, "PatchB", {"skymap", "tract"}, "StructuredData")
        for typeName in {"PixA", "PixOut"}:
            butlerTests.addDatasetType(cls.repo, typeName, {"htm7"}, "StructuredData")
コード例 #14
0
    def setUpClass(cls):
        super().setUpClass()

        cls.root = tempfile.mkdtemp()
        cls.repo = cls._makeTestRepo(cls.root)

        butlerTests.addDatasetType(cls.repo, "icExp",
                                   {"instrument", "visit", "detector"},
                                   "ExposureF")
        butlerTests.addDatasetType(cls.repo, "icExpBackground",
                                   {"instrument", "visit", "detector"},
                                   "Background")
        butlerTests.addDatasetType(cls.repo, "icSrc",
                                   {"instrument", "visit", "detector"},
                                   "SourceCatalog")
        butlerTests.addDatasetType(cls.repo, "cal_ref_cat", {"htm7"},
                                   "SimpleCatalog")
        butlerTests.addDatasetType(cls.repo, "calexp",
                                   {"instrument", "visit", "detector"},
                                   "ExposureF")
        butlerTests.addDatasetType(cls.repo, "src",
                                   {"instrument", "visit", "detector"},
                                   "SourceCatalog")
        butlerTests.addDatasetType(cls.repo, "calexpBackground",
                                   {"instrument", "visit", "detector"},
                                   "Background")
        butlerTests.addDatasetType(cls.repo, "srcMatch",
                                   {"instrument", "visit", "detector"},
                                   "Catalog")
        butlerTests.addDatasetType(cls.repo, "srcMatchFull",
                                   {"instrument", "visit", "detector"},
                                   "Catalog")
コード例 #15
0
def add_metricvalues(butler, plus):
    """Add Measurements as MetricValue datasets to a pre-configured butler,
    adding ``plus`` to the values that are stored (to allow different repos to
    have different Measurement values).
    """
    dimensions = {"instrument", "visit", "detector"}
    storageClass = "MetricValue"
    dataIds = [{
        "instrument": "TestCam",
        "visit": 12345,
        "detector": 12
    }, {
        "instrument": "TestCam",
        "visit": 54321,
        "detector": 25
    }, {
        "instrument": "TestCam",
        "visit": 54321,
        "detector": 12
    }]
    addDatasetType(butler, "metricvalue_verify_testing", dimensions,
                   storageClass)
    value = Measurement("verify.testing",
                        (12 + plus) * u.dimensionless_unscaled)
    butler.put(value, "metricvalue_verify_testing", dataIds[0], run=collection)
    value = Measurement("verify.testing",
                        (42 + plus) * u.dimensionless_unscaled)
    butler.put(value, "metricvalue_verify_testing", dataIds[1], run=collection)
    value = Measurement("verify.testing",
                        (5 + plus) * u.dimensionless_unscaled)
    butler.put(value, "metricvalue_verify_testing", dataIds[2], run=collection)

    addDatasetType(butler, "metricvalue_verify_other", dimensions,
                   storageClass)
    value = Measurement("verify.other", (7 + plus) * u.ct)
    butler.put(value, "metricvalue_verify_other", dataIds[0], run=collection)
    value = Measurement("verify.other", (8 + plus) * u.ct)
    butler.put(value, "metricvalue_verify_other", dataIds[1], run=collection)

    addDatasetType(butler, "metricvalue_verify_another", dimensions,
                   storageClass)
    value = Measurement("verify.another", (3 + plus) * u.mas)
    butler.put(value, "metricvalue_verify_another", dataIds[0], run=collection)

    addDatasetType(butler, "metricvalue_verify_testingTime", dimensions,
                   storageClass)
    value = Measurement("verify.testingTime", (18 + plus) * u.second)
    butler.put(value,
               "metricvalue_verify_testingTime",
               dataIds[0],
               run=collection)
    value = Measurement("verify.testingTime", (19 + plus) * u.second)
    butler.put(value,
               "metricvalue_verify_testingTime",
               dataIds[1],
               run=collection)

    addDatasetType(butler, "metricvalue_verify_anotherTime", dimensions,
                   storageClass)
    value = Measurement("verify.anotherTime", (100 + plus) * u.ms)
    butler.put(value,
               "metricvalue_verify_anotherTime",
               dataIds[0],
               run=collection)
    value = Measurement("verify.anotherTime", (200 + plus) * u.ms)
    butler.put(value,
               "metricvalue_verify_anotherTime",
               dataIds[1],
               run=collection)

    addDatasetType(butler, "metricvalue_verify_testingMemory", dimensions,
                   storageClass)
    value = Measurement("verify.testingMemory", (100 + plus) * u.Mbyte)
    butler.put(value,
               "metricvalue_verify_testingMemory",
               dataIds[0],
               run=collection)
    value = Measurement("verify.testingMemory", (200 + plus) * u.Mbyte)
    butler.put(value,
               "metricvalue_verify_testingMemory",
               dataIds[1],
               run=collection)

    addDatasetType(butler, "metricvalue_verify_anotherTaskMemory", dimensions,
                   storageClass)
    value = Measurement("verify.anotherTaskMemory", (5 + plus) * u.Gbyte)
    butler.put(value,
               "metricvalue_verify_anotherTaskMemory",
               dataIds[0],
               run=collection)
    value = Measurement("verify.anotherTaskMemory", (6 + plus) * u.Gbyte)
    butler.put(value,
               "metricvalue_verify_anotherTaskMemory",
               dataIds[1],
               run=collection)
コード例 #16
0
    def testRunQuantum(self):
        """Test the run quantum method with a gen3 butler.
        """
        root = tempfile.mkdtemp()
        dimensions = {
            "instrument": ["notACam"],
            "skymap": ["deepCoadd_skyMap"],
            "tract": [0, 42],
            "visit": [1234, 4321],
            "detector": [25, 26]
        }
        testRepo = butlerTests.makeTestRepo(root, dimensions)
        matchTask = MatchApFakesTask()
        connections = matchTask.config.ConnectionsClass(
            config=matchTask.config)

        dataId = {
            "instrument": "notACam",
            "skymap": "deepCoadd_skyMap",
            "tract": 0,
            "visit": 1234,
            "detector": 25
        }
        butlerTests.addDatasetType(testRepo, connections.fakeCat.name,
                                   connections.fakeCat.dimensions,
                                   connections.fakeCat.storageClass)
        butlerTests.addDatasetType(testRepo, connections.diffIm.name,
                                   connections.diffIm.dimensions,
                                   connections.diffIm.storageClass)
        butlerTests.addDatasetType(
            testRepo, connections.associatedDiaSources.name,
            connections.associatedDiaSources.dimensions,
            connections.associatedDiaSources.storageClass)
        butlerTests.addDatasetType(testRepo,
                                   connections.matchedDiaSources.name,
                                   connections.matchedDiaSources.dimensions,
                                   connections.matchedDiaSources.storageClass)
        butler = butlerTests.makeTestCollection(testRepo)

        butler.put(self.fakeCat, connections.fakeCat.name, {
            "tract": dataId["tract"],
            "skymap": dataId["skymap"]
        })
        butler.put(
            self.exposure, connections.diffIm.name, {
                "instrument": dataId["instrument"],
                "visit": dataId["visit"],
                "detector": dataId["detector"]
            })
        butler.put(
            self.sourceCat, connections.associatedDiaSources.name, {
                "instrument": dataId["instrument"],
                "visit": dataId["visit"],
                "detector": dataId["detector"]
            })

        quantum = testUtils.makeQuantum(
            matchTask, butler, dataId, {
                key: dataId
                for key in {
                    "fakeCat", "diffIm", "associatedDiaSources",
                    "matchedDiaSources"
                }
            })
        run = testUtils.runTestQuantum(matchTask, butler, quantum)
        # Actual input dataset omitted for simplicity
        run.assert_called_once()
        shutil.rmtree(root, ignore_errors=True)
コード例 #17
0
ファイル: test_testPipeline.py プロジェクト: lsst/ap_verify
    def setUpClass(cls):
        super().setUpClass()

        repoDir = tempfile.mkdtemp()
        cls.addClassCleanup(shutil.rmtree, repoDir, ignore_errors=True)
        cls.repo = butlerTests.makeTestRepo(repoDir)

        INSTRUMENT = "notACam"
        VISIT = 42
        CCD = 101
        HTM = 42
        SKYMAP = "TreasureMap"
        TRACT = 28
        PATCH = 4
        BAND = 'k'
        PHYSICAL = 'k2022'
        SUB_FILTER = 9
        # Mock instrument by hand, because some tasks care about parameters
        instrumentRecord = cls.repo.registry.dimensions[
            "instrument"].RecordClass(name=INSTRUMENT,
                                      visit_max=256,
                                      exposure_max=256,
                                      detector_max=128)
        cls.repo.registry.syncDimensionData("instrument", instrumentRecord)
        butlerTests.addDataIdValue(cls.repo,
                                   "physical_filter",
                                   PHYSICAL,
                                   band=BAND)
        butlerTests.addDataIdValue(cls.repo, "subfilter", SUB_FILTER)
        butlerTests.addDataIdValue(cls.repo, "exposure", VISIT)
        butlerTests.addDataIdValue(cls.repo, "visit", VISIT)
        butlerTests.addDataIdValue(cls.repo, "detector", CCD)
        butlerTests.addDataIdValue(cls.repo, "skymap", SKYMAP)
        butlerTests.addDataIdValue(cls.repo, "tract", TRACT)
        butlerTests.addDataIdValue(cls.repo, "patch", PATCH)

        cls.exposureId = cls.repo.registry.expandDataId({
            "instrument": INSTRUMENT,
            "exposure": VISIT,
            "detector": CCD
        })
        cls.visitId = cls.repo.registry.expandDataId({
            "instrument": INSTRUMENT,
            "visit": VISIT,
            "detector": CCD
        })
        cls.visitOnlyId = cls.repo.registry.expandDataId({
            "instrument": INSTRUMENT,
            "visit": VISIT
        })
        cls.skymapId = cls.repo.registry.expandDataId({"skymap": SKYMAP})
        cls.skymapVisitId = cls.repo.registry.expandDataId({
            "instrument": INSTRUMENT,
            "visit": VISIT,
            "detector": CCD,
            "skymap": SKYMAP
        })
        cls.patchId = cls.repo.registry.expandDataId({
            "skymap": SKYMAP,
            "tract": TRACT,
            "patch": PATCH,
            "band": BAND
        })
        cls.subfilterId = cls.repo.registry.expandDataId({
            "skymap":
            SKYMAP,
            "tract":
            TRACT,
            "patch":
            PATCH,
            "band":
            BAND,
            "subfilter":
            SUB_FILTER
        })
        cls.htmId = cls.repo.registry.expandDataId({"htm7": HTM})

        butlerTests.addDatasetType(cls.repo, "postISRCCD",
                                   cls.exposureId.keys(), "Exposure")
        butlerTests.addDatasetType(cls.repo, "icExp", cls.visitId.keys(),
                                   "ExposureF")
        butlerTests.addDatasetType(cls.repo, "icSrc", cls.visitId.keys(),
                                   "SourceCatalog")
        butlerTests.addDatasetType(cls.repo, "icExpBackground",
                                   cls.visitId.keys(), "Background")
        butlerTests.addDatasetType(cls.repo, "gaia_dr2_20200414",
                                   cls.htmId.keys(), "SimpleCatalog")
        butlerTests.addDatasetType(cls.repo, "ps1_pv3_3pi_20170110",
                                   cls.htmId.keys(), "SimpleCatalog")
        butlerTests.addDatasetType(cls.repo, "calexp", cls.visitId.keys(),
                                   "ExposureF")
        butlerTests.addDatasetType(cls.repo, "src", cls.visitId.keys(),
                                   "SourceCatalog")
        butlerTests.addDatasetType(cls.repo, "calexpBackground",
                                   cls.visitId.keys(), "Background")
        butlerTests.addDatasetType(cls.repo, "srcMatch", cls.visitId.keys(),
                                   "Catalog")
        butlerTests.addDatasetType(cls.repo, "srcMatchFull",
                                   cls.visitId.keys(), "Catalog")
        butlerTests.addDatasetType(
            cls.repo, lsst.skymap.BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
            cls.skymapId.keys(), "SkyMap")
        butlerTests.addDatasetType(cls.repo, "goodSeeingCoadd",
                                   cls.patchId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "deepDiff_differenceExp",
                                   cls.visitId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "deepDiff_differenceTempExp",
                                   cls.visitId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "deepDiff_templateExp",
                                   cls.visitId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "goodSeeingDiff_templateExp",
                                   cls.visitId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "deepDiff_matchedExp",
                                   cls.visitId.keys(), "ExposureF")
        butlerTests.addDatasetType(cls.repo, "deepDiff_diaSrc",
                                   cls.visitId.keys(), "SourceCatalog")
        butlerTests.addDatasetType(cls.repo, "deepDiff_diaSrcTable",
                                   cls.visitId.keys(), "DataFrame")
        butlerTests.addDatasetType(cls.repo, "visitSsObjects",
                                   cls.visitOnlyId.keys(), "DataFrame")
        butlerTests.addDatasetType(cls.repo, "apdb_marker", cls.visitId.keys(),
                                   "Config")
        butlerTests.addDatasetType(cls.repo, "deepDiff_associDiaSrc",
                                   cls.visitId.keys(), "DataFrame")