def snapshot(self, data): chunksize = len(data) offset = self._grow(chunksize=chunksize) os.lseek(self.fd, offset, 0) os.write(self.fd, data) cp = str( carvpath._Entity( lpmap=self.context.longpathmap, maxfstoken=self.context.maxfstoken, fragments=[carvpath.Fragment(offset=offset, size=chunksize)])) return cp
def newmutable(self, chunksize): # Grow the underlying archive by chunksize chunkoffset = self._grow(chunksize=chunksize) # Get the (currently still secret) carvpath of the unfrozen allocated # chunk. cp = str(carvpath._Entity(lpmap=self.context.longpathmap, maxfstoken=self.context.maxfstoken, fragments=[ carvpath.Fragment(offset=chunkoffset, size=chunksize)])) return cp
def newmutable(self, chunksize): # Grow the underlying archive by chunksize chunkoffset = self._grow(chunksize=chunksize) # Get the (currently still secret) carvpath of the unfrozen allocated # chunk. cp = str( carvpath._Entity(lpmap=self.context.longpathmap, maxfstoken=self.context.maxfstoken, fragments=[ carvpath.Fragment(offset=chunkoffset, size=chunksize) ])) return cp
def read(self, offset, size): # Create an entity object for the thing we need to read. readent = self.entity.subentity(childent=carvpath._Entity( lpmap=self.entity.longpathmap, maxfstoken=self.entity.maxfstoken, fragments=[carvpath.Fragment(offset=offset, size=size)]), truncate=True) # Start with empty result. result = b'' for chunk in readent: # One entity chunk at a time. datachunk = self.pread(chunk=chunk) # Read chunk from offset result += datachunk # Add chunk to result. if not chunk.issparse(): self.ohashcollection.lowlevel_read_data( offset=chunk.offset, data=datachunk) # Do opportunistic hasing # if possible. return result
def read(self, offset, size): # Create an entity object for the thing we need to read. readent = self.entity.subentity( childent=carvpath._Entity(lpmap=self.entity.longpathmap, maxfstoken=self.entity.maxfstoken, fragments=[carvpath.Fragment( offset=offset, size=size)]), truncate=True) # Start with empty result. result = b'' for chunk in readent: # One entity chunk at a time. datachunk = self.pread(chunk=chunk) # Read chunk from offset result += datachunk # Add chunk to result. if not chunk.issparse(): self.ohashcollection.lowlevel_read_data( offset=chunk.offset, data=datachunk) # Do opportunistic hasing # if possible. return result
def write(self, offset, data): size = len(data) # Create an entity object for the thing we need to write. writeent = self.entity.subentity(childent=carvpath._Entity( lpmap=self.entity.longpathmap, maxfstoken=self.entity.maxfstoken, fragments=[carvpath.Fragment(offset=offset, size=size)]), truncate=True) # Start of at a zero data index wigin our writale entity. dataindex = 0 for chunk in writeent: # One fragment at a time. # Get the part of our writable data we need for this fragment. chunkdata = data[dataindex:dataindex + chunk.size] dataindex += chunk.size # Update dataindex for next fragment. # Write data fragment to propper offset self.pwrite(chunk=chunk, chunkdata=chunkdata) # Update opportunistic hasing if possible. self.ohashcollection.lowlevel_written_data(offset=chunk.offset, data=chunkdata) return size
def write(self, offset, data): size = len(data) # Create an entity object for the thing we need to write. writeent = self.entity.subentity( childent=carvpath._Entity(lpmap=self.entity.longpathmap, maxfstoken=self.entity.maxfstoken, fragments=[carvpath.Fragment( offset=offset, size=size)]), truncate=True) # Start of at a zero data index wigin our writale entity. dataindex = 0 for chunk in writeent: # One fragment at a time. # Get the part of our writable data we need for this fragment. chunkdata = data[dataindex:dataindex+chunk.size] dataindex += chunk.size # Update dataindex for next fragment. # Write data fragment to propper offset self.pwrite(chunk=chunk, chunkdata=chunkdata) # Update opportunistic hasing if possible. self.ohashcollection.lowlevel_written_data(offset=chunk.offset, data=chunkdata) return size