def testButlerKwargs(self): # outfile has the most obvious effects of any Butler.makeRepo keyword temp = tempfile.mkdtemp(dir=TESTDIR) try: path = os.path.join(temp, 'oddConfig.py') makeTestRepo(self.root, {}, outfile=path, createRegistry=False) self.assertTrue(os.path.isfile(path)) finally: shutil.rmtree(temp, ignore_errors=True)
def setUpClass(cls): super().setUpClass() cls.CAMERA_ID = "NotACam" cls.VISIT_ID = 42 cls.CHIP_ID = 5 # makeTestRepo called in setUpClass because it's *very* slow cls.root = tempfile.mkdtemp() cls.repo = butlerTests.makeTestRepo(cls.root, { "instrument": [cls.CAMERA_ID], "visit": [cls.VISIT_ID], "detector": [cls.CHIP_ID], }) # self.task not visible at class level task = cls.makeTask() connections = task.config.ConnectionsClass(config=task.config) butlerTests.addDatasetType( cls.repo, connections.measurement.name, connections.measurement.dimensions, connections.measurement.storageClass) butlerTests.addDatasetType( cls.repo, connections.dbInfo.name, connections.dbInfo.dimensions, connections.dbInfo.storageClass)
def makeSimpleButler(root: str, run: str = "test", inMemory: bool = True) -> Butler: """Create new data butler instance. Parameters ---------- root : `str` Path or URI to the root location of the new repository. run : `str`, optional Run collection name. inMemory : `bool`, optional If true make in-memory repository. Returns ------- butler : `~lsst.daf.butler.Butler` Data butler instance. """ root_path = ResourcePath(root, forceDirectory=True) if not root_path.isLocal: raise ValueError(f"Only works with local root not {root_path}") config = Config() if not inMemory: config["registry", "db"] = f"sqlite:///{root_path.ospath}/gen3.sqlite" config[ "datastore", "cls"] = "lsst.daf.butler.datastores.fileDatastore.FileDatastore" repo = butlerTests.makeTestRepo(str(root_path), {}, config=config) butler = Butler(butler=repo, run=run) return butler
def testRunQuantum(self): """Test the run quantum method with a gen3 butler. """ root = tempfile.mkdtemp() dimensions = {"instrument": ["notACam"], "skymap": ["skyMap"], "tract": [0, 42], } testRepo = butlerTests.makeTestRepo(root, dimensions) fakesTask = CreateRandomApFakesTask() connections = fakesTask.config.ConnectionsClass( config=fakesTask.config) butlerTests.addDatasetType( testRepo, connections.skyMap.name, connections.skyMap.dimensions, connections.skyMap.storageClass) butlerTests.addDatasetType( testRepo, connections.fakeCat.name, connections.fakeCat.dimensions, connections.fakeCat.storageClass) dataId = {"skymap": "skyMap", "tract": 0} butler = butlerTests.makeTestCollection(testRepo) butler.put(self.simpleMap, "skyMap", {"skymap": "skyMap"}) quantum = testUtils.makeQuantum( fakesTask, butler, dataId, {key: dataId for key in {"skyMap", "fakeCat"}}) run = testUtils.runTestQuantum(fakesTask, butler, quantum, True) # Actual input dataset omitted for simplicity run.assert_called_once_with(tractId=dataId["tract"], skyMap=self.simpleMap) shutil.rmtree(root, ignore_errors=True)
def setUpClass(cls): """Create a new butler once only.""" cls.storageClassFactory = StorageClassFactory() cls.root = tempfile.mkdtemp(dir=TESTDIR) data_ids = { "instrument": [INSTRUMENT_NAME], "detector": [0, 1, 2, 3, 4, 5], "exposure": [11, 22], } configURI = ButlerURI("resource://spherex/configs", forceDirectory=True) butlerConfig = Config(configURI.join("butler.yaml")) # in-memory db is being phased out # butlerConfig["registry", "db"] = 'sqlite:///:memory:' cls.creatorButler = makeTestRepo( cls.root, data_ids, config=butlerConfig, dimensionConfig=configURI.join("dimensions.yaml")) for formatter in FORMATTERS: datasetTypeName, storageClassName = (formatter["dataset_type"], formatter["storage_class"]) storageClass = cls.storageClassFactory.getStorageClass( storageClassName) addDatasetType(cls.creatorButler, datasetTypeName, set(data_ids), storageClass)
def setUpClass(cls): super().setUpClass() # Repository should be re-created for each test case, but # this has a prohibitive run-time cost at present cls.root = tempfile.mkdtemp() cls.repo = butlerTests.makeTestRepo(cls.root) butlerTests.addDataIdValue(cls.repo, "instrument", "notACam") butlerTests.addDataIdValue(cls.repo, "visit", 101) butlerTests.addDataIdValue(cls.repo, "visit", 102) butlerTests.addDataIdValue(cls.repo, "skymap", "sky") butlerTests.addDataIdValue(cls.repo, "tract", 42) butlerTests.addDataIdValue(cls.repo, "patch", 0) butlerTests.addDataIdValue(cls.repo, "patch", 1) butlerTests.registerMetricsExample(cls.repo) for typeName in {"VisitA", "VisitB", "VisitOutA", "VisitOutB"}: butlerTests.addDatasetType(cls.repo, typeName, {"instrument", "visit"}, "StructuredData") for typeName in {"PatchA", "PatchOut"}: butlerTests.addDatasetType(cls.repo, typeName, {"skymap", "tract", "patch"}, "StructuredData") butlerTests.addDatasetType(cls.repo, "PatchB", {"skymap", "tract"}, "StructuredData") for typeName in {"PixA", "PixOut"}: butlerTests.addDatasetType(cls.repo, typeName, {"htm7"}, "StructuredData") butlerTests.addDatasetType(cls.repo, "VisitInitIn", set(), "StructuredData")
def setUpClass(cls): """Create a new butler once only.""" cls.storageClassFactory = StorageClassFactory() cls.root = tempfile.mkdtemp(dir=TESTDIR) dataIds = { "instrument": ["DummyCam"], "physical_filter": ["d-r"], "visit": [42, 43, 44], } # Ensure that we test in a directory that will include some # metacharacters subdir = "sub?#dir" butlerRoot = os.path.join(cls.root, subdir) cls.creatorButler = makeTestRepo(butlerRoot, dataIds, config=Config.fromYaml(BUTLER_CONFIG)) # Create dataset types used by the tests for datasetTypeName, storageClassName in ( ("calexp", "ExposureF"), ("unknown", "ExposureCompositeF"), ("testCatalog", "SourceCatalog"), ("lossless", "ExposureF"), ("uncompressed", "ExposureF"), ("lossy", "ExposureF"), ): storageClass = cls.storageClassFactory.getStorageClass( storageClassName) addDatasetType(cls.creatorButler, datasetTypeName, set(dataIds), storageClass) # And some dataset types that have no dimensions for easy testing for datasetTypeName, storageClassName in ( ("ps", "PropertySet"), ("pl", "PropertyList"), ("pkg", "Packages"), ("config", "Config"), ): storageClass = cls.storageClassFactory.getStorageClass( storageClassName) addDatasetType(cls.creatorButler, datasetTypeName, {}, storageClass)
def setUpClass(cls): # Repository should be re-created for each test case, but # this has a prohibitive run-time cost at present cls.root = tempfile.mkdtemp(dir=TESTDIR) dataIds = { "instrument": ["notACam", "dummyCam"], "physical_filter": ["k2020", "l2019"], "visit": [101, 102], "detector": [5] } cls.creatorButler = makeTestRepo(cls.root, dataIds) registerMetricsExample(cls.creatorButler) addDatasetType(cls.creatorButler, "DataType1", {"instrument"}, "StructuredDataNoComponents") addDatasetType(cls.creatorButler, "DataType2", {"instrument", "visit"}, "StructuredData")
def setUpClass(cls): # Repository should be re-created for each test case, but # this has a prohibitive run-time cost at present cls.root = makeTestTempDir(TESTDIR) cls.creatorButler = makeTestRepo(cls.root) addDataIdValue(cls.creatorButler, "instrument", "notACam") addDataIdValue(cls.creatorButler, "instrument", "dummyCam") addDataIdValue(cls.creatorButler, "physical_filter", "k2020", band="k", instrument="notACam") addDataIdValue(cls.creatorButler, "physical_filter", "l2019", instrument="dummyCam") addDataIdValue(cls.creatorButler, "visit", 101, instrument="notACam", physical_filter="k2020") addDataIdValue(cls.creatorButler, "visit", 102, instrument="notACam", physical_filter="k2020") addDataIdValue(cls.creatorButler, "detector", 5) # Leave skymap/patch/tract undefined so that tests can assume # they're missing. registerMetricsExample(cls.creatorButler) addDatasetType(cls.creatorButler, "DataType1", {"instrument"}, "StructuredDataNoComponents") addDatasetType(cls.creatorButler, "DataType2", {"instrument", "visit"}, "StructuredData")
def setUpClass(cls): """Create a new butler once only.""" cls.root = tempfile.mkdtemp(dir=TESTDIR) dataIds = { "instrument": ["DummyCam"], "physical_filter": ["d-r"], "exposure": [42, 43, 44], } cls.creatorButler = butlerTests.makeTestRepo(cls.root, dataIds) # Create dataset types used by the tests cls.storageClassFactory = dafButler.StorageClassFactory() for datasetTypeName, storageClassName in (("raw", "ExposureF"), ): storageClass = cls.storageClassFactory.getStorageClass( storageClassName) butlerTests.addDatasetType(cls.creatorButler, datasetTypeName, {"instrument", "exposure"}, storageClass)
def setUpClass(cls): super().setUpClass() # Repository should be re-created for each test case, but # this has a prohibitive run-time cost at present cls.root = tempfile.mkdtemp() dataIds = { "instrument": ["notACam"], "physical_filter": ["k2020"], # needed for expandUniqueId(visit) "visit": [101, 102], "skymap": ["sky"], "tract": [42], "patch": [0, 1], } cls.repo = butlerTests.makeTestRepo(cls.root, dataIds) butlerTests.registerMetricsExample(cls.repo) for typeName in {"VisitA", "VisitB", "VisitOutA", "VisitOutB"}: butlerTests.addDatasetType(cls.repo, typeName, {"instrument", "visit"}, "StructuredData") for typeName in {"PatchA", "PatchOut"}: butlerTests.addDatasetType(cls.repo, typeName, {"skymap", "tract", "patch"}, "StructuredData") butlerTests.addDatasetType(cls.repo, "PatchB", {"skymap", "tract"}, "StructuredData") for typeName in {"PixA", "PixOut"}: butlerTests.addDatasetType(cls.repo, typeName, {"htm7"}, "StructuredData")
def setUpClass(cls): super().setUpClass() repoDir = tempfile.mkdtemp() cls.addClassCleanup(shutil.rmtree, repoDir, ignore_errors=True) cls.repo = butlerTests.makeTestRepo(repoDir) INSTRUMENT = "notACam" VISIT = 42 CCD = 101 HTM = 42 SKYMAP = "TreasureMap" TRACT = 28 PATCH = 4 BAND = 'k' PHYSICAL = 'k2022' SUB_FILTER = 9 # Mock instrument by hand, because some tasks care about parameters instrumentRecord = cls.repo.registry.dimensions[ "instrument"].RecordClass(name=INSTRUMENT, visit_max=256, exposure_max=256, detector_max=128) cls.repo.registry.syncDimensionData("instrument", instrumentRecord) butlerTests.addDataIdValue(cls.repo, "physical_filter", PHYSICAL, band=BAND) butlerTests.addDataIdValue(cls.repo, "subfilter", SUB_FILTER) butlerTests.addDataIdValue(cls.repo, "exposure", VISIT) butlerTests.addDataIdValue(cls.repo, "visit", VISIT) butlerTests.addDataIdValue(cls.repo, "detector", CCD) butlerTests.addDataIdValue(cls.repo, "skymap", SKYMAP) butlerTests.addDataIdValue(cls.repo, "tract", TRACT) butlerTests.addDataIdValue(cls.repo, "patch", PATCH) cls.exposureId = cls.repo.registry.expandDataId({ "instrument": INSTRUMENT, "exposure": VISIT, "detector": CCD }) cls.visitId = cls.repo.registry.expandDataId({ "instrument": INSTRUMENT, "visit": VISIT, "detector": CCD }) cls.visitOnlyId = cls.repo.registry.expandDataId({ "instrument": INSTRUMENT, "visit": VISIT }) cls.skymapId = cls.repo.registry.expandDataId({"skymap": SKYMAP}) cls.skymapVisitId = cls.repo.registry.expandDataId({ "instrument": INSTRUMENT, "visit": VISIT, "detector": CCD, "skymap": SKYMAP }) cls.patchId = cls.repo.registry.expandDataId({ "skymap": SKYMAP, "tract": TRACT, "patch": PATCH, "band": BAND }) cls.subfilterId = cls.repo.registry.expandDataId({ "skymap": SKYMAP, "tract": TRACT, "patch": PATCH, "band": BAND, "subfilter": SUB_FILTER }) cls.htmId = cls.repo.registry.expandDataId({"htm7": HTM}) butlerTests.addDatasetType(cls.repo, "postISRCCD", cls.exposureId.keys(), "Exposure") butlerTests.addDatasetType(cls.repo, "icExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "icSrc", cls.visitId.keys(), "SourceCatalog") butlerTests.addDatasetType(cls.repo, "icExpBackground", cls.visitId.keys(), "Background") butlerTests.addDatasetType(cls.repo, "gaia_dr2_20200414", cls.htmId.keys(), "SimpleCatalog") butlerTests.addDatasetType(cls.repo, "ps1_pv3_3pi_20170110", cls.htmId.keys(), "SimpleCatalog") butlerTests.addDatasetType(cls.repo, "calexp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "src", cls.visitId.keys(), "SourceCatalog") butlerTests.addDatasetType(cls.repo, "calexpBackground", cls.visitId.keys(), "Background") butlerTests.addDatasetType(cls.repo, "srcMatch", cls.visitId.keys(), "Catalog") butlerTests.addDatasetType(cls.repo, "srcMatchFull", cls.visitId.keys(), "Catalog") butlerTests.addDatasetType( cls.repo, lsst.skymap.BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, cls.skymapId.keys(), "SkyMap") butlerTests.addDatasetType(cls.repo, "goodSeeingCoadd", cls.patchId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "deepDiff_differenceExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "deepDiff_differenceTempExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "deepDiff_templateExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "goodSeeingDiff_templateExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "deepDiff_matchedExp", cls.visitId.keys(), "ExposureF") butlerTests.addDatasetType(cls.repo, "deepDiff_diaSrc", cls.visitId.keys(), "SourceCatalog") butlerTests.addDatasetType(cls.repo, "deepDiff_diaSrcTable", cls.visitId.keys(), "DataFrame") butlerTests.addDatasetType(cls.repo, "visitSsObjects", cls.visitOnlyId.keys(), "DataFrame") butlerTests.addDatasetType(cls.repo, "apdb_marker", cls.visitId.keys(), "Config") butlerTests.addDatasetType(cls.repo, "deepDiff_associDiaSrc", cls.visitId.keys(), "DataFrame")
def testButlerKwargs(self): # outfile has the most obvious effects of any Butler.makeRepo keyword with safeTestTempDir(TESTDIR) as temp: path = os.path.join(temp, 'oddConfig.json') makeTestRepo(temp, {}, outfile=path) self.assertTrue(os.path.isfile(path))
def testRunQuantum(self): """Test the run quantum method with a gen3 butler. """ root = tempfile.mkdtemp() dimensions = { "instrument": ["notACam"], "skymap": ["deepCoadd_skyMap"], "tract": [0, 42], "visit": [1234, 4321], "detector": [25, 26] } testRepo = butlerTests.makeTestRepo(root, dimensions) matchTask = MatchApFakesTask() connections = matchTask.config.ConnectionsClass( config=matchTask.config) dataId = { "instrument": "notACam", "skymap": "deepCoadd_skyMap", "tract": 0, "visit": 1234, "detector": 25 } butlerTests.addDatasetType(testRepo, connections.fakeCat.name, connections.fakeCat.dimensions, connections.fakeCat.storageClass) butlerTests.addDatasetType(testRepo, connections.diffIm.name, connections.diffIm.dimensions, connections.diffIm.storageClass) butlerTests.addDatasetType( testRepo, connections.associatedDiaSources.name, connections.associatedDiaSources.dimensions, connections.associatedDiaSources.storageClass) butlerTests.addDatasetType(testRepo, connections.matchedDiaSources.name, connections.matchedDiaSources.dimensions, connections.matchedDiaSources.storageClass) butler = butlerTests.makeTestCollection(testRepo) butler.put(self.fakeCat, connections.fakeCat.name, { "tract": dataId["tract"], "skymap": dataId["skymap"] }) butler.put( self.exposure, connections.diffIm.name, { "instrument": dataId["instrument"], "visit": dataId["visit"], "detector": dataId["detector"] }) butler.put( self.sourceCat, connections.associatedDiaSources.name, { "instrument": dataId["instrument"], "visit": dataId["visit"], "detector": dataId["detector"] }) quantum = testUtils.makeQuantum( matchTask, butler, dataId, { key: dataId for key in { "fakeCat", "diffIm", "associatedDiaSources", "matchedDiaSources" } }) run = testUtils.runTestQuantum(matchTask, butler, quantum) # Actual input dataset omitted for simplicity run.assert_called_once() shutil.rmtree(root, ignore_errors=True)
def makeSimpleQGraph(nQuanta=5, pipeline=None, butler=None, root=None, skipExisting=False, inMemory=True, userQuery=""): """Make simple QuantumGraph for tests. Makes simple one-task pipeline with AddTask, sets up in-memory registry and butler, fills them with minimal data, and generates QuantumGraph with all of that. Parameters ---------- nQuanta : `int` Number of quanta in a graph. pipeline : `~lsst.pipe.base.Pipeline` If `None` then one-task pipeline is made with `AddTask` and default `AddTaskConfig`. butler : `~lsst.daf.butler.Butler`, optional Data butler instance, this should be an instance returned from a previous call to this method. root : `str` Path or URI to the root location of the new repository. Only used if ``butler`` is None. skipExisting : `bool`, optional If `True` (default), a Quantum is not created if all its outputs already exist. inMemory : `bool`, optional If true make in-memory repository. userQuery : `str`, optional The user query to pass to ``makeGraph``, by default an empty string. Returns ------- butler : `~lsst.daf.butler.Butler` Butler instance qgraph : `~lsst.pipe.base.QuantumGraph` Quantum graph instance """ if pipeline is None: pipeline = makeSimplePipeline(nQuanta=nQuanta) if butler is None: if root is None: raise ValueError("Must provide `root` when `butler` is None") config = Config() if not inMemory: config["registry", "db"] = f"sqlite:///{root}/gen3.sqlite" config[ "datastore", "cls"] = "lsst.daf.butler.datastores.posixDatastore.PosixDatastore" repo = butlerTests.makeTestRepo(root, {}, config=config) collection = "test" butler = Butler(butler=repo, run=collection) # Add dataset types to registry registerDatasetTypes(butler.registry, pipeline.toExpandedPipeline()) instrument = pipeline.getInstrument() if instrument is not None: if isinstance(instrument, str): instrument = doImport(instrument) instrumentName = instrument.getName() else: instrumentName = "INSTR" # Add all needed dimensions to registry butler.registry.insertDimensionData("instrument", dict(name=instrumentName)) butler.registry.insertDimensionData( "detector", dict(instrument=instrumentName, id=0, full_name="det0")) # Add inputs to butler data = numpy.array([0., 1., 2., 5.]) butler.put(data, "add_dataset0", instrument=instrumentName, detector=0) # Make the graph builder = pipeBase.GraphBuilder(registry=butler.registry, skipExisting=skipExisting) qgraph = builder.makeGraph(pipeline, collections=[butler.run], run=butler.run, userQuery=userQuery) return butler, qgraph