def test_copy_file_with_external_links(self): # Setup all the data we need start_time = datetime(2017, 4, 3, 11, 0, 0) create_date = datetime(2017, 4, 15, 12, 0, 0) data = np.arange(1000).reshape((100, 10)) timestamps = np.arange(100) # Create the first file nwbfile1 = NWBFile(source='PyNWB tutorial', session_description='demonstrate external files', identifier='NWBE1', session_start_time=start_time, file_create_date=create_date) test_ts1 = TimeSeries(name='test_timeseries', source='PyNWB tutorial', data=data, unit='SIunit', timestamps=timestamps) nwbfile1.add_acquisition(test_ts1) # Write the first file self.io[0].write(nwbfile1) nwbfile1_read = self.io[0].read() # Create the second file nwbfile2 = NWBFile(source='PyNWB tutorial', session_description='demonstrate external files', identifier='NWBE1', session_start_time=start_time, file_create_date=create_date) test_ts2 = TimeSeries(name='test_timeseries', source='PyNWB tutorial', data=nwbfile1_read.get_acquisition('test_timeseries').data, unit='SIunit', timestamps=timestamps) nwbfile2.add_acquisition(test_ts2) # Write the second file self.io[1].write(nwbfile2) self.io[1].close() self.io[0].close() # Don't forget to close the first file too # Copy the file self.io[2].close() HDF5IO.copy_file(source_filename=self.test_temp_files[1].name, dest_filename=self.test_temp_files[2].name, expand_external=True, expand_soft=False, expand_refs=False) # Test that everything is working as expected # Confirm that our original data file is correct f1 = File(self.test_temp_files[0].name) self.assertTrue(isinstance(f1.get('/acquisition/test_timeseries/data', getlink=True), HardLink)) # Confirm that we successfully created and External Link in our second file f2 = File(self.test_temp_files[1].name) self.assertTrue(isinstance(f2.get('/acquisition/test_timeseries/data', getlink=True), ExternalLink)) # Confirm that we successfully resolved the External Link when we copied our second file f3 = File(self.test_temp_files[2].name) self.assertTrue(isinstance(f3.get('/acquisition/test_timeseries/data', getlink=True), HardLink))
def test__chunked_iter_fill_numpy_unmatched_buffer_size( self): # noqa: F811 a = np.arange(30).reshape(5, 2, 3) dci = DataChunkIterator(data=a.tolist(), buffer_size=3) my_dset = HDF5IO.__chunked_iter_fill__(self.f, 'test_dataset', dci) self.assertTrue(np.all(my_dset[:] == a)) self.assertTupleEqual(my_dset.shape, a.shape)
def setUp(self): self.test_temp_file = tempfile.NamedTemporaryFile() # On Windows h5py cannot truncate an open file in write mode. # The temp file will be closed before h5py truncates it # and will be removed during the tearDown step. self.test_temp_file.close() self.io = HDF5IO(self.test_temp_file.name) self.f = self.io._file
def test__chunked_iter_fill_iterator_unmatched_buffer_size(self): dci = DataChunkIterator(data=range(10), buffer_size=3) my_dset = HDF5IO.__chunked_iter_fill__(self.f, 'test_dataset', dci) self.assertListEqual(my_dset[:].tolist(), list(range(10)))