def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.values(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError as error: log.error("Failed to save persistent cache ({0})".format(error))
def clear(self, classes=None): """ Completely wipe out cache of all (or selected) classes Accepts class or a list of classes. Clears all given classes and their subclasses. For example Cache().clear(Mutable) will empty cache of all mutable objects. """ # Wipe everything if classes == None: log.cache("Wiping out all objects memory cache") classes = self._classes # Wipe selected classes only else: # Convert single class into a list if isinstance(classes, type): classes = [classes] # Prepare the list of given classes and their subclasses classes = [cls for cls in self._classes if any([issubclass(cls, klass) for klass in classes])] log.cache("Wiping out {0} memory cache".format( listed([klass.__name__ for klass in classes]))) # For each class re-initialize objects and remove from index for current_class in classes: for current_object in list(current_class._cache.values()): # Reset the object to the initial state current_object._init() current_class._cache = {}
def clear(self, classes=None): """ Completely wipe out cache of all (or selected) classes Accepts class or a list of classes. Clears all given classes and their subclasses. For example Cache().clear(Mutable) will empty cache of all mutable objects. """ # Wipe everything if classes == None: log.cache("Wiping out all objects memory cache") classes = self._classes # Wipe selected classes only else: # Convert single class into a list if isinstance(classes, type): classes = [classes] # Prepare the list of given classes and their subclasses classes = [cls for cls in self._classes if any([issubclass(cls, klass) for klass in classes])] log.cache("Wiping out {0} memory cache".format( listed([klass.__name__ for klass in classes]))) # For each class re-initialize objects and remove from index for current_class in classes: for current_object in current_class._cache.itervalues(): # Reset the object to the initial state current_object._init() current_class._cache = {}
def lock(self): """ Create the cache lock unless exists, set mode appropriately """ try: # Attempt to extract the PID from the lock file lock = open(self._lock) pid = lock.readline().strip() lock.close() # Make sure the PID is sane (otherwise ignore it) try: pid = int(pid) except ValueError: log.warn("Malformed cache lock ({0}), ignoring".format(pid)) raise IOError # Check that the process is still running if not os.path.exists("/proc/{0}".format(pid)): log.cache("Breaking stale lock (process {0} dead)".format(pid)) raise IOError log.info("Found lock {0}, opening read-only".format(self._lock)) self._mode = "read-only" except IOError: log.cache("Creating cache lock {0}".format(self._lock)) lock = open(self._lock, "w") lock.write("{0}\n".format(os.getpid())) lock.close() self._mode = "read-write"
def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.itervalues(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError as error: log.error("Failed to save persistent cache ({0})".format(error))
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error), error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return
def unlock(self): """ Remove the cache lock """ # Nothing to do when in read-only mode if self._mode == "read-only": return try: log.cache("Removing cache lock {0}".format(self._lock)) os.remove(self._lock) except OSError as error: log.error("Failed to remove the cache lock {0} ({1})".format( self._lock, error))
def expire(self): """ Remove all out-of-date objects from the cache All expired objects are wiped out as well as those mutable objects which are in modified state (hold different information from what is on the server a thus could cause inconsistencies). Also all uninitialized objects are removed from the cache. """ for current_class in self._classes: expired = [] for id, current_object in current_class._cache.iteritems(): expire = False # Check if object is uninitialized if (current_object._id is NitrateNone or current_object._fetched is None): log.all("Wiping uninitialized {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Check if object is expired elif current_object._is_expired: log.all("Wiping expired {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Check if object is modified elif (isinstance(current_object, mutable.Mutable) and current_object._modified): log.all("Wiping modified {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Expire containers with uncached items elif (isinstance(current_object, containers.Container) and not current_object._class._is_cached( current_object._current)): log.all("Wiping {0} {1} with uncached items".format( current_object.__class__.__name__, current_object.identifier)) expire = True if expire: # Reset the object to the initial state current_object._init() expired.append(id) before = len(current_class._cache) for id in expired: del current_class._cache[id] after = len(current_class._cache) if before != after: log.cache("Wiped {0} from the {1} cache".format( listed(before - after, "expired object"), current_class.__name__))
def expire(self): """ Remove all out-of-date objects from the cache All expired objects are wiped out as well as those mutable objects which are in modified state (hold different information from what is on the server a thus could cause inconsistencies). Also all uninitialized objects are removed from the cache. """ for current_class in self._classes: expired = [] for id, current_object in current_class._cache.items(): expire = False # Check if object is uninitialized if (current_object._id is NitrateNone or current_object._fetched is None): log.all("Wiping uninitialized {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Check if object is expired elif current_object._is_expired: log.all("Wiping expired {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Check if object is modified elif (isinstance(current_object, mutable.Mutable) and current_object._modified): log.all("Wiping modified {0} {1} from cache".format( current_object.__class__.__name__, current_object.identifier)) expire = True # Expire containers with uncached items elif (isinstance(current_object, containers.Container) and not current_object._class._is_cached( current_object._current)): log.all("Wiping {0} {1} with uncached items".format( current_object.__class__.__name__, current_object.identifier)) expire = True if expire: # Reset the object to the initial state current_object._init() expired.append(id) before = len(current_class._cache) for id in expired: del current_class._cache[id] after = len(current_class._cache) if before != after: log.cache("Wiped {0} from the {1} cache".format( listed(before - after, "expired object"), current_class.__name__))
def exit(self): """ Save the cache and remove the lock """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Skip cache save in read-only mode if self._mode == "read-only": log.cache("Skipping persistent cache save in read-only mode") return # Save the cache and remove the lock self.save() self.unlock()
def _wake(self): """ Restore container object after loading from cache """ # See _sleep() method above for explanation why this is necessary if self._current is NitrateNone: return if self._class._is_cached(list(self._original)): log.cache("Waking up {0} for {1}".format(self.__class__.__name__, self._identifier)) self._original = set([self._class(id) for id in self._original]) self._current = set([self._class(id) for id in self._current]) else: log.cache("Skipping wake up of {0} for {1}".format(self.__class__.__name__, self._identifier)) self._init()
def _wake(self): """ Restore container object after loading from cache """ # See _sleep() method above for explanation why this is necessary if self._current is NitrateNone: return if self._class._is_cached(list(self._original)): log.cache("Waking up {0} for {1}".format(self.__class__.__name__, self._identifier)) self._original = set([self._class(id) for id in self._original]) self._current = set([self._class(id) for id in self._current]) else: log.cache("Skipping wake up of {0} for {1}".format( self.__class__.__name__, self._identifier)) self._init()
def __new__(cls, id=None, *args, **kwargs): """ Create a new object, handle caching if enabled """ # No caching when turned of or class does not support it if (config.get_cache_level() < config.CACHE_OBJECTS or getattr(cls, "_cache", None) is None): return super(Nitrate, cls).__new__(cls) # Make sure that cache has been initialized Cache() # Look up cached object by id (or other arguments in kwargs) try: # If found, we get instance and key by which it was found instance, key = cls._cache_lookup(id, **kwargs) if isinstance(key, int): log.cache("Using cached {0} ID#{1}".format(cls.__name__, key)) else: log.cache("Using cached {0} '{1}'".format(cls.__name__, key)) return instance # Object not cached yet, create a new one and cache it except KeyError: new = super(Nitrate, cls).__new__(cls) if isinstance(id, int): log.cache("Caching {0} ID#{1}".format(cls.__name__, id)) cls._cache[id] = new elif isinstance(id, basestring) or "name" in kwargs: log.cache("Caching {0} '{1}'".format( cls.__name__, (id or kwargs.get("name")))) return new
def __new__(cls, id=None, *args, **kwargs): """ Create a new object, handle caching if enabled """ # No caching when turned of or class does not support it if (config.get_cache_level() < config.CACHE_OBJECTS or getattr(cls, "_cache", None) is None): return super(Nitrate, cls).__new__(cls) # Make sure that cache has been initialized Cache() # Look up cached object by id (or other arguments in kwargs) try: # If found, we get instance and key by which it was found instance, key = cls._cache_lookup(id, **kwargs) if isinstance(key, int): log.cache("Using cached {0} ID#{1}".format(cls.__name__, key)) else: log.cache("Using cached {0} '{1}'".format(cls.__name__, key)) return instance # Object not cached yet, create a new one and cache it except KeyError: new = super(Nitrate, cls).__new__(cls) if isinstance(id, int): log.cache("Caching {0} ID#{1}".format(cls.__name__, id)) cls._cache[id] = new elif isinstance(id, six.string_types) or "name" in kwargs: log.cache("Caching {0} '{1}'".format( cls.__name__, (id or kwargs.get("name")))) return new
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.itervalues(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())
return except (IOError, zlib.error), error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {}
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.values(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())