def __init__( self, file, size, cache_size=DEFAULT_CACHE_SIZE, block_size=DEFAULT_BLOCK_SIZE ): """ Create a new `FileCache` wrapping the file-like object `file` that has total size `size` and caching blocks of size `block_size`. """ self.file = file self.size = size self.cache_size = cache_size self.block_size = block_size # Setup the cache self.nblocks = ( self.size // self.block_size ) + 1 self.cache = LRUCache( self.cache_size ) # Position in file self.dirty = True self.at_eof = False self.file_pos = 0 self.current_block_index = -1 self.current_block = None
def __init__(self, f, cache=32): # If cache=None, then everything is allowed to stay in memory, # this is the default behavior. self.f = f M, V, max_size, bin_size, nbins = read_packed(f, ">5I") assert M == MAGIC # assert version less than max supported assert V <= VERSION, "File is version %d but I don't know about anything beyond %d" % ( V, VERSION) self.max_size = max_size self.bin_size = bin_size self.nbins = nbins self.bins = LRUCache(size=cache) # Read typecode if V >= 1: self.typecode = unpack('c', f.read(1))[0] else: self.typecode = 'f' # Read compression type if V >= 2: self.comp_type = f.read(4).strip() else: self.comp_type = 'zlib' self.decompress = comp_types[self.comp_type][1] # Read default value s = f.read(calcsize(self.typecode)) a = fromstring(s, self.typecode) if platform_is_little_endian: a = a.byteswap() self.default = a[0] # Read bin sizes and offsets self.bin_pos = [] self.bin_sizes = [] for i in range(nbins): pos, size = read_packed(f, ">2I") self.bin_pos.append(pos) self.bin_sizes.append(size)