def write(self, cacheId, content, memory=False, writeToFile=True, keepLock=False): filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeToFile: try: if not cacheFile in self._locked_files: self._locked_files.add( cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') pickle.dump(content, fobj, 2) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove( cacheFile) # not atomic with the previous one! except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: e.args = ("Could not store cache to %s\n" % self._path + e.args[0], ) + e.args[1:] raise e
def write(self, cacheId, content, memory=False, writeToFile=True): filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeToFile: try: self._locked_files.add(cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') pickle.dump(content, fobj, 2) fobj.close() filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) # not atomic with the previous one! #if cacheId.startswith("tree-"): # print "caching: %s" % cacheId except (IOError, EOFError, pickle.PickleError, pickle.PicklingError): self._console.error("Could not store cache to %s" % self._path) sys.exit(1) if memory: memcache[cacheId] = content
def _unlock_files(self): for file in self._locked_files: try: filetool.unlock(file) self._console.debug("Cleaned up lock for file: %r" % file) except: # file might not exists since adding to _lock_files and actually locking is not atomic pass # no sense to do much fancy in an interrupt handler
def write(self, cacheId, content, memory=False, writeToFile=True, keepLock=False): filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeCond(cacheId): print "\nWriting %s ..." % (cacheId,), if writeToFile: try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') pickle.dump(content, fobj, 2) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) # not atomic with the previous one! #print "wrote cacheId: %s" % cacheId if writeCond(cacheId): print "to disk" except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: e.args = ("Could not store cache to %s\n" % self._path + e.args[0], ) + e.args[1:] raise e
def read(self, cacheId, dependsOn=None, memory=False, keepLock=False): if dependsOn: dependsModTime = os.stat(dependsOn).st_mtime # Mem cache if cacheId in memcache: memitem = memcache[cacheId] if not dependsOn or dependsModTime < memitem['time']: return memitem['content'], memitem['time'] # File cache cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None, None # out of date check if dependsOn and dependsModTime > cacheModTime: return None, cacheModTime try: try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') fcontent = fobj.read().decode('zlib') fobj.close() finally: if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) except (IOError, ): self._console.warn("Could not read cache object %s" % cacheFile) filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) return None, cacheModTime try: gc.disable() try: content = pickle.loads(fcontent) finally: gc.enable() if memory: memcache[cacheId] = {'content': content, 'time': time.time()} #print "read cacheId: %s" % cacheId return content, cacheModTime except (EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.warn("Could not unpickle cache object %s" % cacheFile) return None, cacheModTime
def _unlock_files(self): for file_ in self._locked_files: try: filetool.unlock(file_) os.unlink(file_) # remove file, as write might be corrupted self._console.debug("Cleaned up lock and file: %r" % file_) except: # file might not exists since adding to _lock_files and actually locking is not atomic pass # no sense to do much fancy in an interrupt handler
def read(self, cacheId, dependsOn=None, memory=False, keepLock=False): if dependsOn: dependsModTime = os.stat(dependsOn).st_mtime # Mem cache if cacheId in memcache: memitem = memcache[cacheId] if not dependsOn or dependsModTime < memitem['time']: return memitem['content'], memitem['time'] # File cache cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None, None # out of date check if dependsOn and dependsModTime > cacheModTime: return None, cacheModTime try: try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') fcontent = fobj.read().decode('zlib') fobj.close() finally: if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) except (IOError, ): self._console.warn("Could not read cache object %s" % cacheFile) filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) return None, cacheModTime try: gc.disable() try: content = pickle.loads(fcontent) finally: gc.enable() if memory: memcache[cacheId] = {'content':content, 'time': time.time()} #print "read cacheId: %s" % cacheId return content, cacheModTime except (EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.warn("Could not unpickle cache object %s" % cacheFile) return None, cacheModTime
def read(self, cacheId, dependsOn=None, memory=False, keepLock=False): if dependsOn: dependsModTime = os.stat(dependsOn).st_mtime if writeCond(cacheId): print "\nReading %s ..." % (cacheId, ), # Mem cache if cacheId in memcache: memitem = memcache[cacheId] if not dependsOn or dependsModTime < memitem['time']: if writeCond(cacheId): print "from memcache" return memitem['content'], memitem['time'] # File cache filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None, None # out of date check if dependsOn and dependsModTime > cacheModTime: return None, cacheModTime try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') #filetool.lock(fobj.fileno()) content = pickle.load(fobj) #filetool.unlock(fobj.fileno()) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) if memory: memcache[cacheId] = {'content': content, 'time': time.time()} #print "read cacheId: %s" % cacheId if writeCond(cacheId): print "from disk" return content, cacheModTime except (IOError, EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.warn( "Could not read cache object from %s, recalculating..." % self._path) return None, cacheModTime
def read(self, cacheId, dependsOn=None, memory=False, keepLock=False): if dependsOn: dependsModTime = os.stat(dependsOn).st_mtime if writeCond(cacheId): print "\nReading %s ..." % (cacheId,), # Mem cache if cacheId in memcache: memitem = memcache[cacheId] if not dependsOn or dependsModTime < memitem['time']: if writeCond(cacheId): print "from memcache" return memitem['content'], memitem['time'] # File cache filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None, None # out of date check if dependsOn and dependsModTime > cacheModTime: return None, cacheModTime try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') #filetool.lock(fobj.fileno()) content = pickle.load(fobj) #filetool.unlock(fobj.fileno()) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) if memory: memcache[cacheId] = {'content':content, 'time': time.time()} #print "read cacheId: %s" % cacheId if writeCond(cacheId): print "from disk" return content, cacheModTime except (IOError, EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.error("Could not read cache from %s" % self._path) return None, cacheModTime
def read(self, cacheId, dependsOn=None, memory=False): if dependsOn: dependsModTime = os.stat(dependsOn).st_mtime # Mem cache if cacheId in memcache: memitem = memcache[cacheId] if not dependsOn or dependsModTime < memitem['time']: return memitem['content'], memitem['time'] # File cache filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None, None # out of date check if dependsOn and dependsModTime > cacheModTime: return None, cacheModTime try: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') #filetool.lock(fobj.fileno()) content = pickle.load(fobj) #filetool.unlock(fobj.fileno()) fobj.close() filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) if memory: memcache[cacheId] = {'content': content, 'time': time.time()} return content, cacheModTime except (IOError, EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.error("Could not read cache from %s" % self._path) return None, cacheModTime
def write(self, cacheId, content, memory=False, writeToFile=True, keepLock=False): filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeCond(cacheId): print "\nWriting %s ..." % (cacheId, ), if writeToFile: try: if not cacheFile in self._locked_files: self._locked_files.add( cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') pickle.dump(content, fobj, 2) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove( cacheFile) # not atomic with the previous one! #print "wrote cacheId: %s" % cacheId if writeCond(cacheId): print "to disk" except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: try: os.unlink( cacheFile ) # try remove cache file, Pickle might leave incomplete files except: e.args = ("Cache file might be crippled.\n" % self._path + e.args[0], ) + e.args[1:] e.args = ("Could not store cache to %s.\n" % self._path + e.args[0], ) + e.args[1:] raise e
def write(self, cacheId, content, memory=False, writeToFile=True): filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeToFile: try: self._locked_files.add(cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') pickle.dump(content, fobj, 2) fobj.close() filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) # not atomic with the previous one! except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: e.args = ("Could not store cache to %s\n" % self._path + e.args[0], ) + e.args[1:] raise e
def read(self, cacheId, dependsOn=None, memory=False): if memcache.has_key(cacheId): return memcache[cacheId] filetool.directory(self._path) cacheFile = os.path.join(self._path, self.filename(cacheId)) try: cacheModTime = os.stat(cacheFile).st_mtime except OSError: return None # Out of date check if dependsOn: fileModTime = os.stat(dependsOn).st_mtime if fileModTime > cacheModTime: return None try: self._locked_files.add(cacheFile) filetool.lock(cacheFile) fobj = open(cacheFile, 'rb') #filetool.lock(fobj.fileno()) content = pickle.load(fobj) #filetool.unlock(fobj.fileno()) fobj.close() filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) if memory: memcache[cacheId] = content return content except (IOError, EOFError, pickle.PickleError, pickle.UnpicklingError): self._console.error("Could not read cache from %s" % self._path) return None
def write(self, cacheId, content, memory=False, writeToFile=True, keepLock=False): cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeToFile: try: if not cacheFile in self._locked_files: self._locked_files.add( cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') fobj.write(pickle.dumps(content, 2).encode('zlib')) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove( cacheFile) # not atomic with the previous one! except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: try: os.unlink( cacheFile ) # try remove cache file, Pickle might leave incomplete files except: e.args = ("Cache file might be crippled.\n" % self._path + e.args[0], ) + e.args[1:] e.args = ("Could not store cache to %s.\n" % self._path + e.args[0], ) + e.args[1:] raise e
def write(self, cacheId, content, memory=False, writeToFile=True, keepLock=False): cacheFile = os.path.join(self._path, self.filename(cacheId)) if writeToFile: try: if not cacheFile in self._locked_files: self._locked_files.add(cacheFile) # this is not atomic with the next one! filetool.lock(cacheFile) fobj = open(cacheFile, 'wb') fobj.write(pickle.dumps(content, 2).encode('zlib')) fobj.close() if not keepLock: filetool.unlock(cacheFile) self._locked_files.remove(cacheFile) # not atomic with the previous one! except (IOError, EOFError, pickle.PickleError, pickle.PicklingError), e: try: os.unlink(cacheFile) # try remove cache file, Pickle might leave incomplete files except: e.args = ("Cache file might be crippled.\n" % self._path + e.args[0], ) + e.args[1:] e.args = ("Could not store cache to %s.\n" % self._path + e.args[0], ) + e.args[1:] raise e