def test_link_resolve(self): foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14) bucket1 = FooBucket('test_bucket1', [foo1]) foo2 = Foo('foo2', [5, 6, 7, 8, 9], "I am foo2", 34, 6.28) bucket2 = FooBucket('test_bucket2', [foo1, foo2]) foofile = FooFile('test_foofile', [bucket1, bucket2]) with HDF5IO(self.path, 'w', manager=_get_manager()) as io: io.write(foofile) with HDF5IO(self.path, 'r', manager=_get_manager()) as io: foofile_read = io.read() b = foofile_read.buckets b1, b2 = (b[0], b[1]) if b[0].name == 'test_bucket1' else (b[1], b[0]) f = b2.foos f1, f2 = (f[0], f[1]) if f[0].name == 'foo1' else (f[1], f[0]) self.assertIs(b1.foos[0], f1)
def setUp(self): super(TestNestedBase, self).setUp() self.foo_bucket = FooBucket('test_foo_bucket', [ Foo('my_foo1', list(range(10)), 'value1', 10), Foo('my_foo2', list(range(10, 20)), 'value2', 20) ]) self.foo_builders = { 'my_foo1': GroupBuilder('my_foo1', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10)), attributes={'attr2': 10}) }, attributes={ 'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }), 'my_foo2': GroupBuilder('my_foo2', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10, 20)), attributes={'attr2': 20}) }, attributes={ 'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }) } self.setUpBucketBuilder() self.setUpBucketSpec() self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml') self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket) self.type_map.register_map(FooBucket, ObjectMapper) self.manager = BuildManager(self.type_map)
def test_copy_file_with_external_links(self): # Setup all the data we need foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14) bucket1 = FooBucket('test_bucket1', [foo1]) foofile1 = FooFile('test_foofile1', buckets=[bucket1]) # Write the first file self.io[0].write(foofile1) bucket1_read = self.io[0].read() # Create the second file foo2 = Foo('foo2', bucket1_read.buckets[0].foos[0].my_data, "I am foo2", 34, 6.28) bucket2 = FooBucket('test_bucket2', [foo2]) foofile2 = FooFile('test_foofile2', buckets=[bucket2]) # Write the second file self.io[1].write(foofile2) self.io[1].close() self.io[0].close() # Don't forget to close the first file too # Copy the file self.io[2].close() HDF5IO.copy_file(source_filename=self.test_temp_files[1], dest_filename=self.test_temp_files[2], expand_external=True, expand_soft=False, expand_refs=False) # Test that everything is working as expected # Confirm that our original data file is correct f1 = File(self.test_temp_files[0]) self.assertIsInstance(f1.get('/buckets/test_bucket1/foo_holder/foo1/my_data', getlink=True), HardLink) # Confirm that we successfully created and External Link in our second file f2 = File(self.test_temp_files[1]) self.assertIsInstance(f2.get('/buckets/test_bucket2/foo_holder/foo2/my_data', getlink=True), ExternalLink) # Confirm that we successfully resolved the External Link when we copied our second file f3 = File(self.test_temp_files[2]) self.assertIsInstance(f3.get('/buckets/test_bucket2/foo_holder/foo2/my_data', getlink=True), HardLink)
def test_cache_spec(self): self.test_temp_file = tempfile.NamedTemporaryFile() self.test_temp_file.close() # On Windows h5py cannot truncate an open file in write mode. # The temp file will be closed before h5py truncates it # and will be removed during the tearDown step. self.io = HDF5IO(self.test_temp_file.name, manager=self.manager, mode='w') # Setup all the data we need foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14) foo2 = Foo('foo2', [5, 6, 7, 8, 9], "I am foo2", 34, 6.28) foobucket = FooBucket('test_bucket', [foo1, foo2]) foofile = FooFile('test_foofile', [foobucket]) # Write the first file self.io.write(foofile, cache_spec=True) self.io.close() ns_catalog = NamespaceCatalog() HDF5IO.load_namespaces(ns_catalog, self.test_temp_file.name) self.assertEqual(ns_catalog.namespaces, ('test_core',)) source_types = self.__get_types(self.io.manager.namespace_catalog) read_types = self.__get_types(ns_catalog) self.assertSetEqual(source_types, read_types)
def test_build(self): container_inst = Foo('my_foo', list(range(10)), 'value1', 10) expected = GroupBuilder('my_foo', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10)), attributes={'attr2': 10}) }, attributes={ 'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }) builder1 = self.manager.build(container_inst) self.assertDictEqual(builder1, expected)
def test_construct_memoization(self): builder = GroupBuilder('my_foo', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10)), attributes={'attr2': 10}) }, attributes={ 'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }) expected = Foo('my_foo', list(range(10)), 'value1', 10) # noqa: F841 container1 = self.manager.construct(builder) container2 = self.manager.construct(builder) self.assertIs(container1, container2)
def setUp(self): self.manager = _get_manager() self.path = "test_io_hdf5.h5" self.foo_builder = GroupBuilder('foo1', attributes={ 'data_type': 'Foo', 'namespace': 'test_core', 'attr1': "bar" }, datasets={ 'my_data': DatasetBuilder( 'my_data', list(range(100, 200, 10)), attributes={'attr2': 17}) }) self.foo = Foo('foo1', list(range(100, 200, 10)), attr1="bar", attr2=17, attr3=3.14) self.manager.prebuilt(self.foo, self.foo_builder) self.builder = GroupBuilder( 'root', source=self.path, groups={ 'test_bucket': GroupBuilder('test_bucket', groups={ 'foo_holder': GroupBuilder( 'foo_holder', groups={'foo1': self.foo_builder}) }) }, attributes={'data_type': 'FooFile'})