Ejemplo n.º 1
0
    def test_save_state(self):
        file_size = len(self.cache_data)
        max_file_size = 2*file_size
        sim_data = array.array('c', self.cache_data + "\x00"*(max_file_size-file_size))

        # Do random I/O on a file
        tmpf = CryptFile(self.file_name, key=b"a"*32, mode='w+b')
        f = BlockCachedFile(tmpf, file_size, block_size=7)
        file_size = self._do_random_rw(f, sim_data, file_size, max_file_size, count=17)

        # Save state
        state_file = CryptFile(self.file_name + '.state', key=b"b"*32, mode='w+b')
        f.save_state(state_file)
        state_file.close()
        f.close()

        # Restore state
        state_file = CryptFile(self.file_name + '.state', key=b"b"*32, mode='rb')
        tmpf = CryptFile(self.file_name, key=b"a"*32, mode='r+b')
        f = BlockCachedFile.restore_state(tmpf, state_file)
        state_file.close()

        # More random I/O
        for k in range(3):
            file_size = self._do_random_rw(f, sim_data, file_size, max_file_size, count=15)
        f.close()
Ejemplo n.º 2
0
    def test_save_state(self):
        file_size = len(self.cache_data)
        max_file_size = 2 * file_size
        sim_data = array.array(
            'c', self.cache_data + "\x00" * (max_file_size - file_size))

        # Do random I/O on a file
        tmpf = CryptFile(self.file_name, key=b"a" * 32, mode='w+b')
        f = BlockCachedFile(tmpf, file_size, block_size=7)
        file_size = self._do_random_rw(f,
                                       sim_data,
                                       file_size,
                                       max_file_size,
                                       count=17)

        # Save state
        state_file = CryptFile(self.file_name + '.state',
                               key=b"b" * 32,
                               mode='w+b')
        f.save_state(state_file)
        state_file.close()
        f.close()

        # Restore state
        state_file = CryptFile(self.file_name + '.state',
                               key=b"b" * 32,
                               mode='rb')
        tmpf = CryptFile(self.file_name, key=b"a" * 32, mode='r+b')
        f = BlockCachedFile.restore_state(tmpf, state_file)
        state_file.close()

        # More random I/O
        for k in range(3):
            file_size = self._do_random_rw(f,
                                           sim_data,
                                           file_size,
                                           max_file_size,
                                           count=15)
        f.close()
Ejemplo n.º 3
0
    def __init__(self, cachedb, upath, io, filecap, persistent=False):
        self.upath = upath
        self.closed = False
        self.refcnt = 0
        self.persistent = persistent
        self.invalidated = False

        # Use per-file keys for different files, for safer fallback
        # in the extremely unlikely event of SHA512 hash collisions
        filename, key = cachedb.get_filename_and_key(upath)
        filename_state, key_state = cachedb.get_filename_and_key(
            upath, b'state')
        filename_data, key_data = cachedb.get_filename_and_key(upath, b'data')

        self.lock = threading.RLock()
        self.dirty = False
        self.f = None
        self.f_state = None
        self.f_data = None

        self.stream_f = None
        self.stream_offset = 0
        self.stream_data = []

        open_complete = False

        try:
            if filecap is None:
                # Create new file
                raise ValueError()

            # Reuse cached metadata
            self.f = CryptFile(filename, key=key, mode='r+b')
            self.info = json_zlib_load(self.f)

            if persistent:
                # Reuse cached data
                self.f_state = CryptFile(filename_state,
                                         key=key_state,
                                         mode='r+b')
                self.f_data = CryptFile(filename_data,
                                        key=key_data,
                                        mode='r+b')
                self.block_cache = BlockCachedFile.restore_state(
                    self.f_data, self.f_state)
                open_complete = True
        except (IOError, OSError, ValueError):
            open_complete = False
            if self.f is not None:
                self.f.close()
                self.f = None
            if self.f_state is not None:
                self.f_state.close()
            if self.f_data is not None:
                self.f_data.close()

        if not open_complete:
            if self.f is None:
                self.f = CryptFile(filename, key=key, mode='w+b')
                try:
                    if filecap is not None:
                        self._load_info(filecap, io, iscap=True)
                    else:
                        self.info = ['file', {u'size': 0}]
                        self.dirty = True
                except IOError as err:
                    os.unlink(filename)
                    self.f.close()
                    raise

            # Create a data file
            self.f_data = CryptFile(filename_data, key=key_data, mode='w+b')

            # Block cache on top of data file
            self.block_cache = BlockCachedFile(self.f_data,
                                               self.info[1][u'size'])

            # Block data state file
            self.f_state = CryptFile(filename_state, key=key_state, mode='w+b')

        os.utime(self.f.path, None)
        os.utime(self.f_data.path, None)
        os.utime(self.f_state.path, None)
Ejemplo n.º 4
0
    def __init__(self, cachedb, upath, io, filecap, persistent=False):
        self.upath = upath
        self.closed = False
        self.refcnt = 0
        self.persistent = persistent
        self.invalidated = False

        # Use per-file keys for different files, for safer fallback
        # in the extremely unlikely event of SHA512 hash collisions
        filename, key = cachedb.get_filename_and_key(upath)
        filename_state, key_state = cachedb.get_filename_and_key(upath, b'state')
        filename_data, key_data = cachedb.get_filename_and_key(upath, b'data')

        self.lock = threading.RLock()
        self.dirty = False
        self.f = None
        self.f_state = None
        self.f_data = None

        self.stream_f = None
        self.stream_offset = 0
        self.stream_data = []

        open_complete = False

        try:
            if filecap is None:
                # Create new file
                raise ValueError()

            # Reuse cached metadata
            self.f = CryptFile(filename, key=key, mode='r+b')
            self.info = json_zlib_load(self.f)

            if persistent:
                # Reuse cached data
                self.f_state = CryptFile(filename_state, key=key_state, mode='r+b')
                self.f_data = CryptFile(filename_data, key=key_data, mode='r+b')
                self.block_cache = BlockCachedFile.restore_state(self.f_data, self.f_state)
                open_complete = True
        except (IOError, OSError, ValueError):
            open_complete = False
            if self.f is not None:
                self.f.close()
                self.f = None
            if self.f_state is not None:
                self.f_state.close()
            if self.f_data is not None:
                self.f_data.close()

        if not open_complete:
            if self.f is None:
                self.f = CryptFile(filename, key=key, mode='w+b')
                try:
                    if filecap is not None:
                        self._load_info(filecap, io, iscap=True)
                    else:
                        self.info = ['file', {u'size': 0}]
                        self.dirty = True
                except IOError as err:
                    os.unlink(filename)
                    self.f.close()
                    raise

            # Create a data file
            self.f_data = CryptFile(filename_data, key=key_data, mode='w+b')

            # Block cache on top of data file
            self.block_cache = BlockCachedFile(self.f_data, self.info[1][u'size'])

            # Block data state file
            self.f_state = CryptFile(filename_state, key=key_state, mode='w+b')

        os.utime(self.f.path, None)
        os.utime(self.f_data.path, None)
        os.utime(self.f_state.path, None)