def testMerge(self): a = dafPersist.Policy() b = dafPersist.Policy() a['a.b.c'] = 1 b['a.b.c'] = 2 b['a.b.d'] = 3 a.merge(b) self.assertEqual(a['a.b.c'], 1) self.assertEqual(a['a.b.d'], 3) # b should remain unchanged: self.assertEqual(b['a.b.c'], 2) self.assertEqual(b['a.b.d'], 3)
def __init__(self, root, policy=None, **kwargs): if policy is None: policy = dafPersist.Policy() super(CompositeMapper, self).__init__(policy, repositoryDir=root, root=root, **kwargs)
def __init__(self, root, parentRegistry=None, repositoryCfg=None): policyFilePath = dafPersist.Policy.defaultPolicyFile( self.packageName, "testCalexpMetadataObjects.yaml", "policy") policy = dafPersist.Policy(policyFilePath) super(MapperForTestCalexpMetadataObjects, self).__init__(policy, repositoryDir=root, root=root, parentRegistry=None, repositoryCfg=None) self.filterIdMap = { 'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5 } # The LSST Filters from L. Jones 04/07/10 afwImageUtils.defineFilter('u', 364.59) afwImageUtils.defineFilter('g', 476.31) afwImageUtils.defineFilter('r', 619.42) afwImageUtils.defineFilter('i', 752.06) afwImageUtils.defineFilter('z', 866.85) afwImageUtils.defineFilter('y', 971.68, alias=['y4']) # official y filter
def __init__(self, inputPolicy=None, **kwargs): policyFile = dafPersist.Policy.defaultPolicyFile( self.packageName, "LsstSimMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) self.doFootprints = False if inputPolicy is not None: for kw in inputPolicy.paramNames(True): if kw == "doFootprints": self.doFootprints = True else: kwargs[kw] = inputPolicy.get(kw) super(LsstSimMapper, self).__init__(policy, os.path.dirname(policyFile), **kwargs) self.filterIdMap = { 'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5 } # The LSST Filters from L. Jones 04/07/10 afwImageUtils.resetFilters() afwImageUtils.defineFilter('u', 364.59) afwImageUtils.defineFilter('g', 476.31) afwImageUtils.defineFilter('r', 619.42) afwImageUtils.defineFilter('i', 752.06) afwImageUtils.defineFilter('z', 866.85) afwImageUtils.defineFilter('y', 971.68, alias=['y4']) # official y filter
def __init__(self, root, parentRegistry=None, repositoryCfg=None): policyFilePath = dafPersist.Policy.defaultPolicyFile( self.packageName, "testCalexpMetadataObjects.yaml", "policy") policy = dafPersist.Policy(policyFilePath) super(MapperForTestCalexpMetadataObjects, self).__init__(policy, repositoryDir=root, root=root, parentRegistry=None, repositoryCfg=None) self.filterIdMap = { 'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5 } with warnings.catch_warnings(): # surpress Filter warnings; we already know this is deprecated warnings.simplefilter('ignore', category=FutureWarning) # The LSST Filters from L. Jones 04/07/10 afwImageUtils.defineFilter('u', 364.59) afwImageUtils.defineFilter('g', 476.31) afwImageUtils.defineFilter('r', 619.42) afwImageUtils.defineFilter('i', 752.06) afwImageUtils.defineFilter('z', 866.85) afwImageUtils.defineFilter('y', 971.68, alias=['y4']) # official y filter
def __init__(self): policy = dafPersist.Policy(os.path.join(ROOT, "BaseMapper.paf")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT) return
def __init__(self, inputPolicy=None, **kwargs): policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "LsstSimMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) repositoryDir = os.path.join(getPackageDir(self.packageName), 'policy') self.defectRegistry = None if 'defects' in policy: self.defectPath = os.path.join(repositoryDir, policy['defects']) defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3") self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation) self.doFootprints = False if inputPolicy is not None: for kw in inputPolicy.paramNames(True): if kw == "doFootprints": self.doFootprints = True else: kwargs[kw] = inputPolicy.get(kw) super(LsstSimMapper, self).__init__(policy, os.path.dirname(policyFile), **kwargs) self.filterIdMap = {'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5} # The LSST Filters from L. Jones 04/07/10 afwImageUtils.resetFilters() afwImageUtils.defineFilter('u', lambdaEff=364.59, lambdaMin=324.0, lambdaMax=395.0) afwImageUtils.defineFilter('g', lambdaEff=476.31, lambdaMin=405.0, lambdaMax=552.0) afwImageUtils.defineFilter('r', lambdaEff=619.42, lambdaMin=552.0, lambdaMax=691.0) afwImageUtils.defineFilter('i', lambdaEff=752.06, lambdaMin=818.0, lambdaMax=921.0) afwImageUtils.defineFilter('z', lambdaEff=866.85, lambdaMin=922.0, lambdaMax=997.0) # official y filter afwImageUtils.defineFilter('y', lambdaEff=971.68, lambdaMin=975.0, lambdaMax=1075.0, alias=['y4'])
def __init__(self, **kwargs): policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs) return
def __init__(self, inputPolicy=None, **kwargs): """Initialization for the T80cam Mapper.""" policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "t80camMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) CameraMapper.__init__(self, policy, os.path.dirname(policyFile), **kwargs) # # The composite objects don't seem to set these # for d in (self.mappings, self.exposures): d['raw'] = d['_raw'] afwImageUtils.defineFilter('NONE', 0.0, alias=['no_filter', 'OPEN', 'empty']) afwImageUtils.defineFilter('c', 0.0, alias=[]) afwImageUtils.defineFilter('o', 0.0, alias=[]) afwImageUtils.defineFilter('t', 0.0, alias=[]) afwImageUtils.defineFilter('B', 0.0, alias=[]) afwImageUtils.defineFilter('V', 0.0, alias=[]) afwImageUtils.defineFilter('R', 0.0, alias=[]) afwImageUtils.defineFilter('I', 0.0, alias=[]) afwImageUtils.defineFilter('u', 0.0, alias=[]) afwImageUtils.defineFilter('v', 0.0, alias=[]) afwImageUtils.defineFilter('g', 0.0, alias=[]) afwImageUtils.defineFilter('r', 0.0, alias=[]) afwImageUtils.defineFilter('i', 0.0, alias=[]) afwImageUtils.defineFilter('z', 0.0, alias=[]) afwImageUtils.defineFilter('h', 0.0, alias=[]) afwImageUtils.defineFilter('oiii', 0.0, alias=[])
def testConflictRaises(self): policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml")) with self.assertRaisesRegex( ValueError, r"Duplicate mapping policy for dataset type packages"): mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841
def __init__(self): policy = dafPersist.Policy(os.path.join(ROOT, 'MinMapper2.yaml')) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT, registry=os.path.join(ROOT, 'cfhtls.sqlite3')) return
def __init__(self, **kwargs): policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.paf")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, registry="cfhtls.sqlite3", **kwargs) return
def testUpdateWithPolicy(self): p1 = dafPersist.Policy({'body': {'job': {'position': 'Manager'}}}) self.policy.update(p1) self.assertEqual(self.policy['body'], { 'job': { 'position': 'Manager', 'company': 'Microsoft' }, 'name': 'John' })
def setUp(self): self.policy = dafPersist.Policy({ 'body': { 'job': { 'position': 'Developer', 'company': 'Microsoft' }, 'name': 'John' }, 'error': False })
def __init__(self, *args, **kwargs): # policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "MyMapper.yaml", "policy") policy = dafPersist.Policy( yaml.load(""" camera: "camera" defaultLevel: "sensor" datasets: {} exposures: {} calibrations: {} images: {}""")) super(MyMapper, self).__init__(policy, repositoryDir=ROOT, **kwargs)
def testInRepoPolicyOverrides(self): """Verify that the template value specified in the policy file in the repository overrides the template value set in the policy file in the package. Checks that child repositories do not get the policy from the parent (per specification). Checks that values not specified in the local _policy file are set with those of the package's _policy file. """ policyOverride = { 'exposures': { 'raw': { 'template': "raw/v%(visit)d_f%(filter)s.fits.gz" } } } policyPath = os.path.join(ROOT, 'policy', 'testMapper.yaml') policy = dafPersist.Policy(policyPath) postISRCCDtemplate = policy.get('exposures.postISRCCD.template') butler = dafPersist.Butler( outputs={ 'root': self.repoARoot, 'mapper': lsst.obs.test.TestMapper, 'policy': policyOverride }) # check that the declared policy got used in the mapper mapper = butler._repos.outputs()[0].repo._mapper self.assertEqual(mapper.mappings['raw'].template, "raw/v%(visit)d_f%(filter)s.fits.gz") # Run a simple test case to verify that although the package's policy was overloaded with some # values, other values specified in the policy file in the package are loaded. self.assertEqual(postISRCCDtemplate, mapper.mappings['postISRCCD'].template) del butler del mapper repoBRoot = os.path.join(self.testDir, 'b') butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) # check that the reloaded policy got used in the mapper for repo A mapper = butler._repos.inputs()[0].repo._mapper self.assertEqual(mapper.mappings['raw'].template, "raw/v%(visit)d_f%(filter)s.fits.gz") # again, test that another value is loaded from package policy file is loaded correctly. self.assertEqual(postISRCCDtemplate, mapper.mappings['postISRCCD'].template) # also check that repo B does not get the in-repo policy from A mapper = butler._repos.outputs()[0].repo._mapper self.assertNotEqual(mapper.mappings['raw'].template, "raw/v%(visit)d_f%(filter)s.fits.gz") # and again, test that another value is loaded from package policy file is loaded correctly. self.assertEqual(postISRCCDtemplate, mapper.mappings['postISRCCD'].template)
def __init__(self, inputPolicy=None, **kwargs): # # Merge the list of .yaml files # policy = None for yamlFile in self.yamlFileList: policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, yamlFile, "policy") npolicy = dafPersist.Policy(policyFile) if policy is None: policy = npolicy else: policy.merge(npolicy) # # Look for the calibrations root "root/CALIB" if not supplied # if kwargs.get('root', None) and not kwargs.get('calibRoot', None): calibSearch = [os.path.join(kwargs['root'], 'CALIB')] if "repositoryCfg" in kwargs: calibSearch += [os.path.join(cfg.root, 'CALIB') for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root")] calibSearch += [cfg.root for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root")] for calibRoot in calibSearch: if os.path.exists(os.path.join(calibRoot, "calibRegistry.sqlite3")): kwargs['calibRoot'] = calibRoot break if not kwargs.get('calibRoot', None): lsst.log.Log.getLogger("LsstCamMapper").warn("Unable to find valid calib root directory") super().__init__(policy, os.path.dirname(policyFile), **kwargs) # # The composite objects don't seem to set these # for d in (self.mappings, self.exposures): d['raw'] = d['_raw'] self.defineFilters() LsstCamMapper._nbit_tract = 16 LsstCamMapper._nbit_patch = 5 LsstCamMapper._nbit_filter = 6 LsstCamMapper._nbit_id = 64 - (LsstCamMapper._nbit_tract + 2*LsstCamMapper._nbit_patch + LsstCamMapper._nbit_filter) if len(afwImage.Filter.getNames()) >= 2**LsstCamMapper._nbit_filter: raise RuntimeError("You have more filters defined than fit into the %d bits allocated" % LsstCamMapper._nbit_filter)
def setUp(self): self.testData = tempfile.mkdtemp(dir=ROOT, prefix='TestInputOnly-') self.firstRepoPath = os.path.join(self.testData, 'repo1') self.objA = dpTest.TestObject("abc") self.objB = dpTest.TestObject("def") self.policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, 'basicObject2': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/name%(name)s.pickle', 'storage': 'PickleStorage' }, 'basicPair': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { 'a': { 'datasetType': 'basicObject1' }, 'b': { 'datasetType': 'basicObject2', 'inputOnly': True } }, 'assembler': 'lsst.daf.persistence.test.TestObjectPair.assembler', 'disassembler': 'lsst.daf.persistence.test.TestObjectPair.disassembler' } } }) repoArgs = dafPersist.RepositoryArgs( root=self.firstRepoPath, mapper='lsst.obs.base.test.CompositeMapper', policy=self.policy) butler = dafPersist.Butler(outputs=repoArgs) butler.put(self.objA, 'basicObject1', dataId={'id': 'foo'}) butler.put(self.objB, 'basicObject2', dataId={'name': 'bar'}) del butler del repoArgs
def __init__(self, inputPolicy=None, **kwargs): """Initialization for the AuxTel Mapper.""" policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "auxTelMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) CameraMapper.__init__(self, policy, os.path.dirname(policyFile), **kwargs) # # The composite objects don't seem to set these # for d in (self.mappings, self.exposures): d['raw'] = d['_raw'] afwImageUtils.defineFilter('NONE', 0.0, alias=['no_filter', 'OPEN', 'empty']) afwImageUtils.defineFilter('275CutOn', 0.0, alias=[]) afwImageUtils.defineFilter('550CutOn', 0.0, alias=[]) afwImageUtils.defineFilter('green', 0.0, alias=[]) afwImageUtils.defineFilter('blue', 0.0, alias=[])
def __init__(self, inputPolicy=None, **kwargs): """Initialization for the ComCam Mapper.""" policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "comCamMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) CameraMapper.__init__(self, policy, os.path.dirname(policyFile), **kwargs) # # The composite objects don't seem to set these # for d in (self.mappings, self.exposures): d['raw'] = d['_raw'] # self.filterIdMap = {} # where is this used? Generating objIds?? afwImageUtils.defineFilter('NONE', 0.0, alias=['no_filter', "OPEN"]) afwImageUtils.defineFilter('275CutOn', 0.0, alias=[]) afwImageUtils.defineFilter('550CutOn', 0.0, alias=[])
def __init__(self, **kwargs): policyFile = dafPersistence.Policy.defaultPolicyFile( self.packageName, "ztfMapper.yaml", "policy") policy = dafPersistence.Policy(policyFile) # # Look for the calibrations root "root/CALIB" if not supplied # if kwargs.get('root', None) and not kwargs.get('calibRoot', None): calibSearch = [os.path.join(kwargs['root'], 'CALIB')] if "repositoryCfg" in kwargs: calibSearch += [ os.path.join(cfg.root, 'CALIB') for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root") ] calibSearch += [ cfg.root for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root") ] for calibRoot in calibSearch: if os.path.exists( os.path.join(calibRoot, "calibRegistry.sqlite3")): kwargs['calibRoot'] = calibRoot break if not kwargs.get('calibRoot', None): lsst.log.Log.getLogger("ZtfCamMapper").warn( "Unable to find valid calib root directory") super(ZtfMapper, self).__init__(policy, os.path.dirname(policyFile), **kwargs) afwImageUtils.resetFilters() afwImageUtils.defineFilter('NONE', 0.0) afwImageUtils.defineFilter('ZTF_g', 0.0, alias=['g']) afwImageUtils.defineFilter('ZTF_r', 0.0, alias=['r']) afwImageUtils.defineFilter('ZTF_i', 0.0, alias=['i']) ZtfMapper._nbit_tract = 16 ZtfMapper._nbit_patch = 5 ZtfMapper._nbit_filter = 6 ZtfMapper._nbit_id = 64 - (ZtfMapper._nbit_tract + 2 * ZtfMapper._nbit_patch + ZtfMapper._nbit_filter)
def test(self): """Verify that when specifying a repo policy that the policy gets written & loaded correctly. """ objA = dpTest.TestObject("abc") dpTest.TestObject("def") firstRepoPath = os.path.join(self.testData, 'repo1') os.path.join(self.testData, 'repo2') policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, } }) firstRepoPath = os.path.join(self.testData, 'repo1') repoArgs = dafPersist.RepositoryArgs( root=firstRepoPath, mapper='lsst.obs.base.test.CompositeMapper', policy=policy) butler = dafPersist.Butler(outputs=repoArgs) with open(os.path.join(firstRepoPath, 'repositoryCfg.yaml')) as f: cfg = yaml.load(f, Loader=yaml.UnsafeLoader) self.assertEqual(cfg.policy, policy) butler.put(objA, 'basicObject1', {'id': 1}) del butler del repoArgs # Test that a newly-initialized butler can find the policy in the # repositoryCfg. repoArgs = dafPersist.RepositoryArgs(root=firstRepoPath) butler = dafPersist.Butler(inputs=repoArgs) reloadedObjA = butler.get('basicObject1', {'id': 1}) self.assertEqual(reloadedObjA, objA)
def __init__(self, inputPolicy=None, **kwargs): policyFile = dafPersist.Policy.defaultPolicyFile(self.packageName, "SdssMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) self.doFootprints = False if inputPolicy is not None: for kw in inputPolicy.paramNames(True): if kw == "doFootprints": self.doFootprints = True else: kwargs[kw] = inputPolicy.get(kw) super(SdssMapper, self).__init__(policy, os.path.dirname(policyFile), **kwargs) # define filters? self.filterIdMap = dict(u=0, g=1, r=2, i=3, z=4) afwImageUtils.defineFilter('u', lambdaEff=380) afwImageUtils.defineFilter('g', lambdaEff=450) afwImageUtils.defineFilter('r', lambdaEff=600) afwImageUtils.defineFilter('i', lambdaEff=770) afwImageUtils.defineFilter('z', lambdaEff=900)
def __init__(self, inputPolicy=None, **kwargs): policyFilePath = dafPersist.Policy.defaultPolicyFile( self.packageName, "testMapper.yaml", "policy") policy = dafPersist.Policy(policyFilePath) self.doFootprints = False if inputPolicy is not None: for kw in inputPolicy.paramNames(True): if kw == "doFootprints": self.doFootprints = True else: kwargs[kw] = inputPolicy.get(kw) CameraMapper.__init__(self, policy, policyFilePath, **kwargs) self.filterIdMap = { 'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5 } with warnings.catch_warnings(): # surpress Filter warnings; we already know this is deprecated warnings.simplefilter('ignore', category=FutureWarning) # The LSST Filters from L. Jones 04/07/10 afwImageUtils.defineFilter('u', 364.59) afwImageUtils.defineFilter('g', 476.31) afwImageUtils.defineFilter('r', 619.42) afwImageUtils.defineFilter('i', 752.06) afwImageUtils.defineFilter('z', 866.85) afwImageUtils.defineFilter('y', 971.68, alias=['y4']) # official y filter
def __init__(self, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None): """Initialize the CameraMapper. Parameters ---------- policy : daf_persistence.Policy, Can also be pexPolicy.Policy, only for backward compatibility. Policy with per-camera defaults already merged. repositoryDir : string Policy repository for the subclassing module (obtained with getRepositoryPath() on the per-camera default dictionary). root : string, optional Path to the root directory for data. registry : string, optional Path to registry with data's metadata. calibRoot : string, optional Root directory for calibrations. calibRegistry : string, optional Path to registry with calibrations' metadata. provided : list of string, optional Keys provided by the mapper. parentRegistry : Registry subclass, optional Registry from a parent repository that may be used to look up data's metadata. repositoryCfg : daf_persistence.RepositoryCfg or None, optional The configuration information for the repository this mapper is being used with. """ policyFile = Policy.defaultPolicyFile("obs_hsc_sims", "HscSimsMapper.yaml", "policy") policy = Policy(policyFile) dafPersist.Mapper.__init__(self) self.log = lsstLog.Log.getLogger("HscSimsMapper") if root: self.root = root elif repositoryCfg: self.root = repositoryCfg.root else: self.root = None if isinstance(policy, pexPolicy.Policy): policy = dafPersist.Policy(policy) repoPolicy = repositoryCfg.policy if repositoryCfg else None if repoPolicy is not None: policy.update(repoPolicy) # Don't load the default policy from obs_base # defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base", # "MapperDictionary.paf", # "policy") # dictPolicy = dafPersist.Policy(defaultPolicyFile) # policy.merge(dictPolicy) # Levels self.levels = dict() if 'levels' in policy: levelsPolicy = policy['levels'] for key in levelsPolicy.names(True): self.levels[key] = set(levelsPolicy.asArray(key)) self.defaultLevel = policy['defaultLevel'] self.defaultSubLevels = dict() if 'defaultSubLevels' in policy: self.defaultSubLevels = policy['defaultSubLevels'] # Root directories if root is None: root = "." root = dafPersist.LogicalLocation(root).locString() self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) # If the calibRoot is passed in, use that. If not and it's indicated in # the policy, use that. And otherwise, the calibs are in the regular # root. # If the location indicated by the calib root does not exist, do not # create it. calibStorage = None if calibRoot is not None: calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False) else: calibRoot = policy.get('calibRoot', None) if calibRoot: calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False) if calibStorage is None: calibStorage = self.rootStorage self.root = root # Registries self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", self.rootStorage, searchParents=False, posixIfNoSql=True) if not self.registry: self.registry = parentRegistry needCalibRegistry = policy.get('needCalibRegistry', None) if needCalibRegistry: if calibStorage: self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, "calibRegistryPath", calibStorage, posixIfNoSql=False) # NB never use posix for calibs else: raise RuntimeError( "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " + "calibRoot ivar:%s or policy['calibRoot']:%s" % (calibRoot, policy.get('calibRoot', None))) else: self.calibRegistry = None # Dict of valid keys and their value types self.keyDict = dict() self._initMappings(policy, self.rootStorage, calibStorage, provided=None) self._initWriteRecipes() # Camera geometry # #self.cameraDataLocation = None # path to camera geometry config file # #self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) # Defect registry and root. Defects are stored with the camera and the registry is loaded from the # camera package, which is on the local filesystem. # #self.defectRegistry = None # #if 'defects' in policy: # # self.defectPath = os.path.join(repositoryDir, policy['defects']) # # defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3") # # self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation) # Filter translation table self.filters = None
def __init__(self, inputPolicy=None, **kwargs): policyFile = dafPersist.Policy.defaultPolicyFile( self.packageName, "LsstSimMapper.yaml", "policy") policy = dafPersist.Policy(policyFile) self.doFootprints = False if inputPolicy is not None: for kw in inputPolicy.paramNames(True): if kw == "doFootprints": self.doFootprints = True else: kwargs[kw] = inputPolicy.get(kw) super(LsstSimMapper, self).__init__(policy, os.path.dirname(policyFile), **kwargs) self.filterIdMap = { 'u': 0, 'g': 1, 'r': 2, 'i': 3, 'z': 4, 'y': 5, 'i2': 5 } # The LSST Filters from L. Jones 04/07/10 afwImageUtils.resetFilters() afwImageUtils.defineFilter('u', lambdaEff=364.59, lambdaMin=324.0, lambdaMax=395.0) afwImageUtils.defineFilter('g', lambdaEff=476.31, lambdaMin=405.0, lambdaMax=552.0) afwImageUtils.defineFilter('r', lambdaEff=619.42, lambdaMin=552.0, lambdaMax=691.0) afwImageUtils.defineFilter('i', lambdaEff=752.06, lambdaMin=818.0, lambdaMax=921.0) afwImageUtils.defineFilter('z', lambdaEff=866.85, lambdaMin=922.0, lambdaMax=997.0) # official y filter afwImageUtils.defineFilter('y', lambdaEff=971.68, lambdaMin=975.0, lambdaMax=1075.0, alias=['y4']) # If/when y3 sim data becomes available, uncomment this and # modify the schema appropriately # afwImageUtils.defineFilter('y3', 1002.44) # candidate y-band # FTaken from hscMapper.py # The number of bits allocated for fields in object IDs, appropriate for # the default-configured Rings skymap. # # This shouldn't be the mapper's job at all; see #2797. LsstSimMapper._nbit_tract = 16 LsstSimMapper._nbit_patch = 5 LsstSimMapper._nbit_filter = 6 LsstSimMapper._nbit_id = 64 - (LsstSimMapper._nbit_tract + 2 * LsstSimMapper._nbit_patch + LsstSimMapper._nbit_filter)
def __init__(self, inputPolicy=None, **kwargs): # # Merge the list of .yaml files # policy = None for yamlFile in self.yamlFileList: policyFile = dafPersist.Policy.defaultPolicyFile( self.packageName, yamlFile, "policy") npolicy = dafPersist.Policy(policyFile) if policy is None: policy = npolicy else: policy.merge(npolicy) # # Look for the calibrations root "root/CALIB" if not supplied # if kwargs.get('root', None) and not kwargs.get('calibRoot', None): calibSearch = [os.path.join(kwargs['root'], 'CALIB')] if "repositoryCfg" in kwargs: calibSearch += [ os.path.join(cfg.root, 'CALIB') for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root") ] calibSearch += [ cfg.root for cfg in kwargs["repositoryCfg"].parents if hasattr(cfg, "root") ] for calibRoot in calibSearch: if os.path.exists( os.path.join(calibRoot, "calibRegistry.sqlite3")): kwargs['calibRoot'] = calibRoot break if not kwargs.get('calibRoot', None): lsst.log.Log.getLogger("LsstCamMapper").warn( "Unable to find valid calib root directory") super().__init__(policy, os.path.dirname(policyFile), **kwargs) # # The composite objects don't seem to set these # for d in (self.mappings, self.exposures): d['raw'] = d['_raw'] self.filterDefinitions.reset() self.filterDefinitions.defineFilters() LsstCamMapper._nbit_tract = 16 LsstCamMapper._nbit_patch = 5 LsstCamMapper._nbit_filter = 7 LsstCamMapper._nbit_id = 64 - (LsstCamMapper._nbit_tract + 2 * LsstCamMapper._nbit_patch + LsstCamMapper._nbit_filter) # # The BOT has many ND filters in a second filter wheel, resulting in # more than 128 composite filters. However, we're never going to # build coadds with the BOT. So let's ignore the qualifier after # the ~ in filter names when we're calculating the number of filters # # Because the first filter wheel can be empty some of baseFilters are # actually in the second wheel, but that's OK -- we still easily fit # in 7 bits (5 would actually be enough) baseFilters = set() for n in afwImage.Filter.getNames(): i = n.find('~') if i >= 0: n = n[:i] baseFilters.add(n) nFilter = len(baseFilters) if nFilter >= 2**LsstCamMapper._nbit_filter: raise RuntimeError( "You have more filters (%d) defined than fit into the %d bits allocated" % (nFilter, LsstCamMapper._nbit_filter))
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None, use_default=True): """Initialize mappings For each of the dataset types that we want to be able to read, there are methods that can be created to support them: * map_<dataset> : determine the path for dataset * std_<dataset> : standardize the retrieved dataset * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) * query_<dataset> : query the registry Besides the dataset types explicitly listed in the policy, we create additional, derived datasets for additional conveniences, e.g., reading the header of an image, retrieving only the size of a catalog. Parameters ---------- policy : `lsst.daf.persistence.Policy` Policy with per-camera defaults already merged rootStorage : `Storage subclass instance` Interface to persisted repository data. calibRoot : `Storage subclass instance` Interface to persisted calib repository data provided : `list` of `str` Keys provided by the mapper use_default : `bool` Load default camera mappings """ # Sub-dictionaries (for exposure/calibration/dataset types) imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( "obs_base", "ImageMappingDictionary.paf", "policy")) expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( "obs_base", "ExposureMappingDictionary.paf", "policy")) calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( "obs_base", "CalibrationMappingDictionary.paf", "policy")) dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( "obs_base", "DatasetMappingDictionary.paf", "policy")) # Mappings mappingList = ( ("images", imgMappingPolicy, ImageMapping), ("exposures", expMappingPolicy, ExposureMapping), ("calibrations", calMappingPolicy, CalibrationMapping), ("datasets", dsMappingPolicy, DatasetMapping) ) self.mappings = dict() for name, defPolicy, cls in mappingList: if name in policy: datasets = policy[name] # Centrally-defined datasets defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") if os.path.exists(defaultsPath) and use_default: datasets.merge(dafPersist.Policy(defaultsPath)) mappings = dict() setattr(self, name, mappings) for datasetType in datasets.names(True): subPolicy = datasets[datasetType] subPolicy.merge(defPolicy) if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: def compositeClosure(dataId, write=False, mapper=None, mapping=None, subPolicy=subPolicy): components = subPolicy.get('composite') assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None python = subPolicy['python'] butlerComposite = dafPersist.ButlerComposite(assembler=assembler, disassembler=disassembler, python=python, dataId=dataId, mapper=self) for name, component in components.items(): butlerComposite.add(id=name, datasetType=component.get('datasetType'), setter=component.get('setter', None), getter=component.get('getter', None), subset=component.get('subset', False), inputOnly=component.get('inputOnly', False)) return butlerComposite setattr(self, "map_" + datasetType, compositeClosure) # for now at least, don't set up any other handling for this dataset type. continue if name == "calibrations": mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, provided=provided, dataRoot=rootStorage) else: mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) self.keyDict.update(mapping.keys()) mappings[datasetType] = mapping self.mappings[datasetType] = mapping if not hasattr(self, "map_" + datasetType): def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): return mapping.map(mapper, dataId, write) setattr(self, "map_" + datasetType, mapClosure) if not hasattr(self, "query_" + datasetType): def queryClosure(format, dataId, mapping=mapping): return mapping.lookup(format, dataId) setattr(self, "query_" + datasetType, queryClosure) if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): return mapping.standardize(mapper, item, dataId) setattr(self, "std_" + datasetType, stdClosure) def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): """Set convenience methods on CameraMapper""" mapName = "map_" + datasetType + "_" + suffix bypassName = "bypass_" + datasetType + "_" + suffix queryName = "query_" + datasetType + "_" + suffix if not hasattr(self, mapName): setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) if not hasattr(self, bypassName): if bypassImpl is None and hasattr(self, "bypass_" + datasetType): bypassImpl = getattr(self, "bypass_" + datasetType) if bypassImpl is not None: setattr(self, bypassName, bypassImpl) if not hasattr(self, queryName): setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) # Filename of dataset setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) # Metadata from FITS file if subPolicy["storage"] == "FitsStorage": # a FITS image setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId: readMetadata(location.getLocationsWithRoot()[0])) # Add support for configuring FITS compression addName = "add_" + datasetType if not hasattr(self, addName): setattr(self, addName, self.getImageCompressionSettings) if name == "exposures": setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId: afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))) setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId: afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0]))) setMethods("visitInfo", bypassImpl=lambda datasetType, pythonType, location, dataId: afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0]))) setMethods("filter", bypassImpl=lambda datasetType, pythonType, location, dataId: afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0]))) setMethods("detector", mapImpl=lambda dataId, write=False: dafPersist.ButlerLocation( pythonType="lsst.afw.cameraGeom.CameraConfig", cppType="Config", storageName="Internal", locationList="ignored", dataId=dataId, mapper=self, storage=None, ), bypassImpl=lambda datasetType, pythonType, location, dataId: self.camera[self._extractDetectorName(dataId)] ) setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId: afwImage.bboxFromMetadata( readMetadata(location.getLocationsWithRoot()[0], hdu=1))) elif name == "images": setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId: afwImage.bboxFromMetadata( readMetadata(location.getLocationsWithRoot()[0]))) if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId: readMetadata(os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1)) # Sub-images if subPolicy["storage"] == "FitsStorage": def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): subId = dataId.copy() del subId['bbox'] loc = mapping.map(mapper, subId, write) bbox = dataId['bbox'] llcX = bbox.getMinX() llcY = bbox.getMinY() width = bbox.getWidth() height = bbox.getHeight() loc.additionalData.set('llcX', llcX) loc.additionalData.set('llcY', llcY) loc.additionalData.set('width', width) loc.additionalData.set('height', height) if 'imageOrigin' in dataId: loc.additionalData.set('imageOrigin', dataId['imageOrigin']) return loc def querySubClosure(key, format, dataId, mapping=mapping): subId = dataId.copy() del subId['bbox'] return mapping.lookup(format, subId) setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) if subPolicy["storage"] == "FitsCatalogStorage": # Length of catalog setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId: readMetadata(os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1).get("NAXIS2")) # Schema of catalog if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: afwTable.Schema.readFits(os.path.join(location.getStorage().root, location.getLocations()[0])))
def _initWriteRecipes(self): """Read the recipes for writing files These recipes are currently used for configuring FITS compression, but they could have wider uses for configuring different flavors of the storage types. A recipe is referred to by a symbolic name, which has associated settings. These settings are stored as a `PropertySet` so they can easily be passed down to the boost::persistence framework as the "additionalData" parameter. The list of recipes is written in YAML. A default recipe and some other convenient recipes are in obs_base/policy/writeRecipes.yaml and these may be overridden or supplemented by the individual obs_* packages' own policy/writeRecipes.yaml files. Recipes are grouped by the storage type. Currently, only the ``FitsStorage`` storage type uses recipes, which uses it to configure FITS image compression. Each ``FitsStorage`` recipe for FITS compression should define "image", "mask" and "variance" entries, each of which may contain "compression" and "scaling" entries. Defaults will be provided for any missing elements under "compression" and "scaling". The allowed entries under "compression" are: * algorithm (string): compression algorithm to use * rows (int): number of rows per tile (0 = entire dimension) * columns (int): number of columns per tile (0 = entire dimension) * quantizeLevel (float): cfitsio quantization level The allowed entries under "scaling" are: * algorithm (string): scaling algorithm to use * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) * fuzz (bool): fuzz the values when quantising floating-point values? * seed (long): seed for random number generator when fuzzing * maskPlanes (list of string): mask planes to ignore when doing statistics * quantizeLevel: divisor of the standard deviation for STDEV_* scaling * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) * bscale: manually specified BSCALE (for MANUAL scaling) * bzero: manually specified BSCALE (for MANUAL scaling) A very simple example YAML recipe: FitsStorage: default: image: &default compression: algorithm: GZIP_SHUFFLE mask: *default variance: *default """ recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") recipes = dafPersist.Policy(recipesFile) supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") validationMenu = {'FitsStorage': validateRecipeFitsStorage, } if os.path.exists(supplementsFile) and supplementsFile != recipesFile: supplements = dafPersist.Policy(supplementsFile) # Don't allow overrides, only supplements for entry in validationMenu: intersection = set(recipes[entry].names()).intersection(set(supplements.names())) if intersection: raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % (supplementsFile, entry, recipesFile, intersection)) recipes.update(supplements) self._writeRecipes = {} for storageType in recipes.names(True): if "default" not in recipes[storageType]: raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % (storageType, recipesFile)) self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
def setUp(self): self.testData = tempfile.mkdtemp(dir=ROOT, prefix='TestGenericAssembler-') self.firstRepoPath = os.path.join(self.testData, 'repo1') self.secondRepoPath = os.path.join(self.testData, 'repo2') self.objA = dpTest.TestObject("abc") self.objB = dpTest.TestObject("def") self.policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, 'basicObject2': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/name%(name)s.pickle', 'storage': 'PickleStorage' }, 'basicPair': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { 'a': { 'datasetType': 'basicObject1' }, 'b': { 'datasetType': 'basicObject2' } }, # note, no assembler or disassembler specified here, will use # setter names inferred by component name. }, # "generic assembler default constructor pair" 'gaDefCtorPair': { # dataset defition that uses the default ctor 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { # note that the component names are the same as the argument # names in the TestObjectPair.__init__ func. 'objA': { 'datasetType': 'basicObject1', 'getter': 'get_a' }, 'objB': { 'datasetType': 'basicObject2', 'getter': 'get_b' } }, # note, no assembler or disassembler specified here. }, # "generic assembler default " 'gaPairWithSetter': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { # note that the component names do not match argument names # in the TestObjectPair.__init__ func or the set functions # in the python object. 'z': { 'datasetType': 'basicObject1', 'setter': 'set_a', 'getter': 'get_a' }, 'x': { 'datasetType': 'basicObject2', 'setter': 'set_b', 'getter': 'get_b' } } }, # simple object where setter and getter is named with underscore # separator 'underscoreSetter': { 'python': 'lsst.daf.persistence.test.TestObjectUnderscoreSetter', 'composite': { 'foo': { 'datasetType': 'basicObject1' } } }, # simple object where setter and getter is named with camelcase 'camelCaseSetter': { 'python': 'lsst.daf.persistence.test.TestObjectCamelCaseSetter', 'composite': { 'foo': { 'datasetType': 'basicObject1' } } } } }) repoArgs = dafPersist.RepositoryArgs( root=self.firstRepoPath, policy=self.policy, mapper='lsst.obs.base.test.CompositeMapper') butler = dafPersist.Butler(outputs=repoArgs) butler.put(self.objA, 'basicObject1', dataId={'id': 'foo'}) butler.put(self.objB, 'basicObject2', dataId={'name': 'bar'}) del butler del repoArgs