Exemplo n.º 1
0
    def test_append_1_0_5(self):
        '''Test whether we can append to files made by hdmf version 1.0.5'''
        foo = Foo('foo3', [10, 20, 30, 40, 50], "I am foo3", 17, 3.14)
        foobucket = FooBucket('foobucket2', [foo])

        with HDF5IO(self.path_1_0_5, manager=self.manager, mode='a') as io:
            read_foofile = io.read()
            read_foofile.add_bucket(foobucket)
            io.write(read_foofile)

        with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io:
            read_foofile = io.read()
            self.assertListEqual(
                read_foofile.buckets['foobucket2'].foos['foo3'].my_data[:].
                tolist(), foo.my_data)
Exemplo n.º 2
0
    def test_append_1_0_5(self):
        '''Test whether we can append to files made by hdmf version 1.0.5'''
        foo = Foo('foo3', [10, 20, 30, 40, 50], "I am foo3", 17, 3.14)
        foobucket = FooBucket('foobucket2', [foo])

        with HDF5IO(self.path_1_0_5, manager=self.manager, mode='a') as io:
            read_foofile = io.read()
            read_foofile.buckets.append(foobucket)
            foobucket.parent = read_foofile
            io.write(read_foofile)

        with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io:
            read_foofile = io.read()
            # workaround for the fact that order of buckets is not maintained
            for bucket in read_foofile.buckets:
                if bucket.name == 'foobucket2':
                    self.assertListEqual(bucket.foos[0].my_data[:].tolist(), foo.my_data)
Exemplo n.º 3
0
    def test_link_resolve(self):
        foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14)
        bucket1 = FooBucket('test_bucket1', [foo1])
        foo2 = Foo('foo2', [5, 6, 7, 8, 9], "I am foo2", 34, 6.28)
        bucket2 = FooBucket('test_bucket2', [foo1, foo2])
        foofile = FooFile('test_foofile', [bucket1, bucket2])

        with HDF5IO(self.path, 'w', manager=_get_manager()) as io:
            io.write(foofile)

        with HDF5IO(self.path, 'r', manager=_get_manager()) as io:
            foofile_read = io.read()
        b = foofile_read.buckets
        b1, b2 = (b[0], b[1]) if b[0].name == 'test_bucket1' else (b[1], b[0])
        f = b2.foos
        f1, f2 = (f[0], f[1]) if f[0].name == 'foo1' else (f[1], f[0])
        self.assertIs(b1.foos[0], f1)
Exemplo n.º 4
0
    def setUp(self):
        self.test_temp_file = tempfile.NamedTemporaryFile()

        # On Windows h5py cannot truncate an open file in write mode.
        # The temp file will be closed before h5py truncates it
        # and will be removed during the tearDown step.
        self.test_temp_file.close()
        self.io = HDF5IO(self.test_temp_file.name, mode='a')
        self.f = self.io._file
Exemplo n.º 5
0
    def setUp(self):
        numfiles = 3
        base_name = "test_multifile_hdf5_%d.h5"
        self.test_temp_files = [base_name % i for i in range(numfiles)]

        # On Windows h5py cannot truncate an open file in write mode.
        # The temp file will be closed before h5py truncates it
        # and will be removed during the tearDown step.
        self.io = [HDF5IO(i, mode='w', manager=_get_manager()) for i in self.test_temp_files]
        self.f = [i._file for i in self.io]
Exemplo n.º 6
0
 def test_read_1_0_5(self):
     '''Test whether we can read files made by hdmf version 1.0.5'''
     with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io:
         read_foofile = io.read()
         self.assertTrue(len(read_foofile.buckets) == 1)
         # workaround for the fact that order of foos is not maintained
         for foo in read_foofile.buckets[0].foos:
             if foo.name == 'foo1':
                 self.assertListEqual(foo.my_data[:].tolist(), [0, 1, 2, 3, 4])
             if foo.name == 'foo2':
                 self.assertListEqual(foo.my_data[:].tolist(), [5, 6, 7, 8, 9])
Exemplo n.º 7
0
 def test_read_1_0_5(self):
     '''Test whether we can read files made by hdmf version 1.0.5'''
     with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io:
         read_foofile = io.read()
         self.assertTrue(len(read_foofile.buckets) == 1)
         self.assertListEqual(
             read_foofile.buckets['test_bucket'].foos['foo1'].my_data[:].
             tolist(), [0, 1, 2, 3, 4])
         self.assertListEqual(
             read_foofile.buckets['test_bucket'].foos['foo2'].my_data[:].
             tolist(), [5, 6, 7, 8, 9])
Exemplo n.º 8
0
    def test_copy_file_with_external_links(self):

        # Setup all the data we need
        foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14)
        bucket1 = FooBucket('test_bucket1', [foo1])

        foofile1 = FooFile('test_foofile1', buckets=[bucket1])

        # Write the first file
        self.io[0].write(foofile1)
        bucket1_read = self.io[0].read()

        # Create the second file

        foo2 = Foo('foo2', bucket1_read.buckets[0].foos[0].my_data, "I am foo2", 34, 6.28)

        bucket2 = FooBucket('test_bucket2', [foo2])
        foofile2 = FooFile('test_foofile2', buckets=[bucket2])
        # Write the second file
        self.io[1].write(foofile2)
        self.io[1].close()
        self.io[0].close()  # Don't forget to close the first file too

        # Copy the file
        self.io[2].close()
        HDF5IO.copy_file(source_filename=self.test_temp_files[1],
                         dest_filename=self.test_temp_files[2],
                         expand_external=True,
                         expand_soft=False,
                         expand_refs=False)

        # Test that everything is working as expected
        # Confirm that our original data file is correct
        f1 = File(self.test_temp_files[0])
        self.assertIsInstance(f1.get('/buckets/test_bucket1/foo_holder/foo1/my_data', getlink=True), HardLink)
        # Confirm that we successfully created and External Link in our second file
        f2 = File(self.test_temp_files[1])
        self.assertIsInstance(f2.get('/buckets/test_bucket2/foo_holder/foo2/my_data', getlink=True), ExternalLink)
        # Confirm that we successfully resolved the External Link when we copied our second file
        f3 = File(self.test_temp_files[2])
        self.assertIsInstance(f3.get('/buckets/test_bucket2/foo_holder/foo2/my_data', getlink=True), HardLink)
Exemplo n.º 9
0
    def test_cache_spec(self):
        self.test_temp_file = tempfile.NamedTemporaryFile()
        self.test_temp_file.close()
        # On Windows h5py cannot truncate an open file in write mode.
        # The temp file will be closed before h5py truncates it
        # and will be removed during the tearDown step.
        self.io = HDF5IO(self.test_temp_file.name, manager=self.manager, mode='w')

        # Setup all the data we need
        foo1 = Foo('foo1', [0, 1, 2, 3, 4], "I am foo1", 17, 3.14)
        foo2 = Foo('foo2', [5, 6, 7, 8, 9], "I am foo2", 34, 6.28)
        foobucket = FooBucket('test_bucket', [foo1, foo2])
        foofile = FooFile('test_foofile', [foobucket])

        # Write the first file
        self.io.write(foofile, cache_spec=True)
        self.io.close()
        ns_catalog = NamespaceCatalog()
        HDF5IO.load_namespaces(ns_catalog, self.test_temp_file.name)
        self.assertEqual(ns_catalog.namespaces, ('test_core',))
        source_types = self.__get_types(self.io.manager.namespace_catalog)
        read_types = self.__get_types(ns_catalog)
        self.assertSetEqual(source_types, read_types)
Exemplo n.º 10
0
 def test__chunked_iter_fill_list_matched_buffer_size(self):
     a = np.arange(30).reshape(5, 2, 3)
     dci = DataChunkIterator(data=a.tolist(), buffer_size=1)
     my_dset = HDF5IO.__chunked_iter_fill__(self.f, 'test_dataset', dci)
     self.assertTrue(np.all(my_dset[:] == a))
     self.assertTupleEqual(my_dset.shape, a.shape)
Exemplo n.º 11
0
 def test__chunked_iter_fill_iterator_unmatched_buffer_size(self):
     dci = DataChunkIterator(data=range(10), buffer_size=3)
     my_dset = HDF5IO.__chunked_iter_fill__(self.f, 'test_dataset', dci)
     self.assertListEqual(my_dset[:].tolist(), list(range(10)))