def test_ro(self): ''' Read-only mode: any changes in the uncompressed file will be lost on exit from the context manager. ''' # Read-only mode: the file is uncompressed, but it is not recompressed # on exit from `with` block, so any changes performed in the file are # lost. for filename in self.filenames: # Create contents of the file self.generateContent(filename) # Now let's open it in read-only mode: with ReadOnlyCompressedFile(filename) as uncompressed: with open(uncompressed, 'a') as f: # This is ineffective, the compressed file will not be changed text = 'WARNING: this will not be saved in the file!\n' f.write(text) # The content of the file should be as created in the setUp() expected = [ ' 1. This is the first line of content\n', ] with ReadOnlyCompressedFile(filename) as uncompressed: with open(uncompressed) as f: self.assertListEqual(f.readlines(), expected)
def test_exc(self): ''' Any exception raised in context manager should be propagated through. Changes made in the context manager should not be saved. ''' def _raiseValueError(filename): with CompressedFile(filename, create=True) as uncompressed: with open(uncompressed, 'w+') as f: text = ' This is a failed content, ' \ 'it should not appear in the compressed file\n' f.write(text) raise ValueError for filename in self.filenames: self.assertRaises(ValueError, _raiseValueError, filename) # File should not have been created self.assertFalse(os.path.exists(filename)) self.generateContent(filename) self.assertRaises(ValueError, _raiseValueError, filename) # File should still exist self.assertTrue(os.path.exists(filename)) # The content of the file should not have changed expected = [ ' 1. This is the first line of content\n', ] with ReadOnlyCompressedFile(filename) as uncompressed: with open(uncompressed) as f: self.assertListEqual(f.readlines(), expected)
def __init__(self, file_path_or_obj, cache_param_list=False, create=False, read_only=False): ''' Opens an HDF file (or accepts and already open h5py.File object) - will create if does not exist if create=True! :param cache_param_list: Names of parameters to cache where accessed. A value of True will result in all parameters to be cached. :type cache_param_list: [str] or bool :param file_path_or_obj: Can be either the path to an HDF file or an already opened HDF file object. :type file_path_or_obj: str or h5py.File :param create: ill allow creation of file if it does not exist. :type create: bool ''' if isinstance(file_path_or_obj, h5py.File): hdf_exists = True self.hdf = file_path_or_obj if self.hdf.mode != 'r+': raise ValueError("hdf_file requires mode 'r+'.") self.file_path = os.path.abspath(self.hdf.filename) else: hdf_exists = os.path.isfile(file_path_or_obj) if not create and not hdf_exists: raise IOError('File not found: %s' % file_path_or_obj) self.file_path = os.path.abspath(file_path_or_obj) if read_only: self.compressor = ReadOnlyCompressedFile(self.file_path) mode = 'r' else: self.compressor = CompressedFile(self.file_path) mode = 'a' uncompressed_path = self.compressor.load() self.hdf = h5py.File(uncompressed_path, mode=mode) self.hdfaccess_version = self.hdf.attrs.get('hdfaccess_version', 1) if hdf_exists: # default version is 1 assert self.hdfaccess_version == HDFACCESS_VERSION else: # just created this file, add the current version self.hdf.attrs['hdfaccess_version'] = HDFACCESS_VERSION if 'series' not in self.hdf.keys(): # The 'series' group is required for storing parameters. self.hdf.create_group('series') # cache keys as accessing __iter__ on hdf groups is v.slow self._cache = defaultdict(SortedSet) # cache parameters that are used often self._params_cache = {} # this is the list of parameters to cache if cache_param_list is True: cache_param_list = self.keys() elif cache_param_list is False: cache_param_list = [] self.cache_param_list = cache_param_list