def setUp(self): self.testData = tempfile.mkdtemp(dir=ROOT, prefix='TestInputOnly-') self.firstRepoPath = os.path.join(self.testData, 'repo1') self.objA = dpTest.TestObject("abc") self.objB = dpTest.TestObject("def") self.policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, 'basicObject2': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/name%(name)s.pickle', 'storage': 'PickleStorage' }, 'basicPair': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { 'a': { 'datasetType': 'basicObject1' }, 'b': { 'datasetType': 'basicObject2', 'inputOnly': True } }, 'assembler': 'lsst.daf.persistence.test.TestObjectPair.assembler', 'disassembler': 'lsst.daf.persistence.test.TestObjectPair.disassembler' } } }) repoArgs = dafPersist.RepositoryArgs( root=self.firstRepoPath, mapper='lsst.obs.base.test.CompositeMapper', policy=self.policy) butler = dafPersist.Butler(outputs=repoArgs) butler.put(self.objA, 'basicObject1', dataId={'id': 'foo'}) butler.put(self.objB, 'basicObject2', dataId={'name': 'bar'}) del butler del repoArgs
def test(self): """Verify that when specifying a repo policy that the policy gets written & loaded correctly. """ objA = dpTest.TestObject("abc") dpTest.TestObject("def") firstRepoPath = os.path.join(self.testData, 'repo1') os.path.join(self.testData, 'repo2') policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, } }) firstRepoPath = os.path.join(self.testData, 'repo1') repoArgs = dafPersist.RepositoryArgs( root=firstRepoPath, mapper='lsst.obs.base.test.CompositeMapper', policy=policy) butler = dafPersist.Butler(outputs=repoArgs) with open(os.path.join(firstRepoPath, 'repositoryCfg.yaml')) as f: cfg = yaml.load(f, Loader=yaml.UnsafeLoader) self.assertEqual(cfg.policy, policy) butler.put(objA, 'basicObject1', {'id': 1}) del butler del repoArgs # Test that a newly-initialized butler can find the policy in the # repositoryCfg. repoArgs = dafPersist.RepositoryArgs(root=firstRepoPath) butler = dafPersist.Butler(inputs=repoArgs) reloadedObjA = butler.get('basicObject1', {'id': 1}) self.assertEqual(reloadedObjA, objA)
def testDatasetDoesNotExist(self): """Verify that Butler.datasetExists returns false for a composite dataset where some of the components do not exist.""" repoPath = os.path.join(self.testData, 'repo') repoArgs = dafPersist.RepositoryArgs( root=repoPath, policy=self.policy, mapper='lsst.obs.base.test.CompositeMapper') butler = dafPersist.Butler(outputs=repoArgs) self.objA = dpTest.TestObject("abc") butler.put(self.objA, 'basicObject1', dataId={'id': 'foo'}) self.assertFalse( butler.datasetExists('basicPair', dataId={ 'id': 'foo', 'name': 'bar' }))
def setUp(self): self.testData = tempfile.mkdtemp(dir=ROOT, prefix='TestGenericAssembler-') self.firstRepoPath = os.path.join(self.testData, 'repo1') self.secondRepoPath = os.path.join(self.testData, 'repo2') self.objA = dpTest.TestObject("abc") self.objB = dpTest.TestObject("def") self.policy = dafPersist.Policy({ 'camera': 'lsst.afw.cameraGeom.Camera', 'datasets': { 'basicObject1': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/id%(id)s.pickle', 'storage': 'PickleStorage' }, 'basicObject2': { 'python': 'lsst.daf.persistence.test.TestObject', 'template': 'basic/name%(name)s.pickle', 'storage': 'PickleStorage' }, 'basicPair': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { 'a': { 'datasetType': 'basicObject1' }, 'b': { 'datasetType': 'basicObject2' } }, # note, no assembler or disassembler specified here, will use # setter names inferred by component name. }, # "generic assembler default constructor pair" 'gaDefCtorPair': { # dataset defition that uses the default ctor 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { # note that the component names are the same as the argument # names in the TestObjectPair.__init__ func. 'objA': { 'datasetType': 'basicObject1', 'getter': 'get_a' }, 'objB': { 'datasetType': 'basicObject2', 'getter': 'get_b' } }, # note, no assembler or disassembler specified here. }, # "generic assembler default " 'gaPairWithSetter': { 'python': 'lsst.daf.persistence.test.TestObjectPair', 'composite': { # note that the component names do not match argument names # in the TestObjectPair.__init__ func or the set functions # in the python object. 'z': { 'datasetType': 'basicObject1', 'setter': 'set_a', 'getter': 'get_a' }, 'x': { 'datasetType': 'basicObject2', 'setter': 'set_b', 'getter': 'get_b' } } }, # simple object where setter and getter is named with underscore # separator 'underscoreSetter': { 'python': 'lsst.daf.persistence.test.TestObjectUnderscoreSetter', 'composite': { 'foo': { 'datasetType': 'basicObject1' } } }, # simple object where setter and getter is named with camelcase 'camelCaseSetter': { 'python': 'lsst.daf.persistence.test.TestObjectCamelCaseSetter', 'composite': { 'foo': { 'datasetType': 'basicObject1' } } } } }) repoArgs = dafPersist.RepositoryArgs( root=self.firstRepoPath, policy=self.policy, mapper='lsst.obs.base.test.CompositeMapper') butler = dafPersist.Butler(outputs=repoArgs) butler.put(self.objA, 'basicObject1', dataId={'id': 'foo'}) butler.put(self.objB, 'basicObject2', dataId={'name': 'bar'}) del butler del repoArgs
def testSwiftStorage(self): """Verify that SwiftStorage implements all the StorageInterface functions.""" storage = SwiftStorage(uri=self.uri, create=True) self.assertEqual(storage._containerName, self.container1Name) self.assertTrue(storage.containerExists()) # Test containerExists by changing the container name so that it will # return false, and then put the name back. containerName = storage._containerName storage._containerName = "foo" self.assertFalse(storage.containerExists()) storage._containerName = containerName testObject = dpTest.TestObject("abc") butlerLocation = dp.ButlerLocation( pythonType='lsst.daf.persistence.test.TestObject', cppType=None, storageName='PickleStorage', locationList='firstTestObject', dataId={}, mapper=None, storage=storage) # Test writing an object to storage storage.write(butlerLocation, testObject) # Test getting a local copy of the file in storage. localFile = storage.getLocalFile('firstTestObject') # Test reading the file in a new object using the localFile's name, as # well as using the localFile handle directly. for f in (open(localFile.name, 'r'), localFile): if sys.version_info.major >= 3: obj = pickle.load(f, encoding="latin1") else: obj = pickle.load(f) self.assertEqual(testObject, obj) # Test reading the butlerLocation, should return the object instance. reloadedObject = storage.read(butlerLocation) self.assertEqual(testObject, reloadedObject[0]) # Test the 'exists' function with a string self.assertTrue(storage.exists('firstTestObject')) self.assertFalse(storage.exists('secondTestObject')) # Test the 'exists' function with a ButlerLocation. (note that most of # the butler location fields are unused in exists and so are set to # None here.) location = dp.ButlerLocation(pythonType=None, cppType=None, storageName=None, locationList=['firstTestObject'], dataId={}, mapper=None, storage=None) self.assertTrue(storage.exists(location)) location = dp.ButlerLocation(pythonType=None, cppType=None, storageName=None, locationList=['secondTestObject'], dataId={}, mapper=None, storage=None) self.assertFalse(storage.exists(location)) # Test the 'instanceSearch' function, with and without the fits header # extension self.assertEqual(storage.instanceSearch('firstTestObject'), ['firstTestObject']) self.assertEqual(storage.instanceSearch('firstTestObject[1]'), ['firstTestObject[1]']) self.assertEqual(storage.instanceSearch('first*Object'), ['firstTestObject']) self.assertEqual(storage.instanceSearch('*TestObject[1]'), ['firstTestObject[1]']) self.assertIsNone(storage.instanceSearch('secondTestObject')) self.assertIsNone(storage.instanceSearch('secondTestObject[1]')) # Test the 'search' function self.assertEqual(storage.search(self.uri, 'firstTestObject'), ['firstTestObject']) # Test the copy function storage.copyFile('firstTestObject', 'secondTestObject') with self.assertRaises(RuntimeError): storage.copyFile('thirdTestObject', 'fourthTestObject') # Test locationWithRoot self.assertEqual(storage.locationWithRoot('firstTestObject'), self.uri + '/' + 'firstTestObject') # Test getRepositoryCfg and putRepositoryCfg repositoryCfg = dp.RepositoryCfg.makeFromArgs(dp.RepositoryArgs( root=self.uri, mapper=TestMapper), parents=None) storage.putRepositoryCfg(repositoryCfg) reloadedRepoCfg = storage.getRepositoryCfg(self.uri) self.assertEqual(repositoryCfg, reloadedRepoCfg) # Test getting a non-existant RepositoryCfg self.assertIsNone(storage.getRepositoryCfg(self.uri2)) # Test getting the mapper class from the repoCfg in the repo. mapper = SwiftStorage.getMapperClass(self.uri) self.assertEqual(mapper, TestMapper) # Test for a repoCfg that resides outside its repository; it has a # root that is not the same as its location. repositoryCfg = dp.RepositoryCfg.makeFromArgs(dp.RepositoryArgs( root='foo/bar/baz', mapper='lsst.obs.base.CameraMapper'), parents=None) storage.putRepositoryCfg(repositoryCfg, loc=self.uri) reloadedRepoCfg = storage.getRepositoryCfg(self.uri) self.assertEqual(repositoryCfg, reloadedRepoCfg) storage.deleteContainer() self.assertFalse(storage.containerExists())