def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error), error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return
def _fetch(self, inset=None): """ Fetch currently linked test cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Initialize all plan-case tags (skip when caching persistently # as this an additional/unnecessary call in that case) if config.get_cache_level() == config.CACHE_OBJECTS: log.info("Fetching tags for all {0}'s test cases".format(self._object.identifier)) for tag in self._server.TestPlan.get_all_cases_tags(self.id): Tag(tag) # Fetch test cases from the server log.info("Fetching {0}'s cases".format(self._identifier)) injects = self._server.TestPlan.get_test_cases(self.id) log.data("Fetched {0}".format(listed(injects, "inject"))) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current) # Initialize case plans if not already cached if not PlanCasePlans._is_cached(self._object.caseplans): inset = [ CasePlan( { # Fake our own internal id from testplan & testcase "id": _idify([self._object.id, inject["case_id"]]), "case_id": inject["case_id"], "plan_id": self._object.id, "sortkey": inject["sortkey"], } ) for inject in injects ] self._object.caseplans._fetch(inset)
def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.values(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError as error: log.error("Failed to save persistent cache ({0})".format(error))
def __new__(cls, id=None, *args, **kwargs): """ Create a new object, handle caching if enabled """ # No caching when turned of or class does not support it if (config.get_cache_level() < config.CACHE_OBJECTS or getattr(cls, "_cache", None) is None): return super(Nitrate, cls).__new__(cls) # Make sure that cache has been initialized Cache() # Look up cached object by id (or other arguments in kwargs) try: # If found, we get instance and key by which it was found instance, key = cls._cache_lookup(id, **kwargs) if isinstance(key, int): log.cache("Using cached {0} ID#{1}".format(cls.__name__, key)) else: log.cache("Using cached {0} '{1}'".format(cls.__name__, key)) return instance # Object not cached yet, create a new one and cache it except KeyError: new = super(Nitrate, cls).__new__(cls) if isinstance(id, int): log.cache("Caching {0} ID#{1}".format(cls.__name__, id)) cls._cache[id] = new elif isinstance(id, six.string_types) or "name" in kwargs: log.cache("Caching {0} '{1}'".format( cls.__name__, (id or kwargs.get("name")))) return new
def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.itervalues(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError, error: log.error("Failed to save persistent cache ({0})".format(error))
def _fetch(self, inset=None): """ Fetch currently linked test cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Initialize all plan-case tags (skip when caching persistently # as this an additional/unnecessary call in that case) if config.get_cache_level() == config.CACHE_OBJECTS: log.info("Fetching tags for all {0}'s test cases".format( self._object.identifier)) for tag in self._server.TestPlan.get_all_cases_tags(self.id): Tag(tag) # Fetch test cases from the server log.info("Fetching {0}'s cases".format(self._identifier)) injects = self._server.TestPlan.get_test_cases(self.id) log.data("Fetched {0}".format(listed(injects, "inject"))) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current) # Initialize case plans if not already cached if not PlanCasePlans._is_cached(self._object.caseplans): inset = [ CasePlan({ # Fake our own internal id from testplan & testcase "id": _idify([self._object.id, inject["case_id"]]), "case_id": inject["case_id"], "plan_id": self._object.id, "sortkey": inject["sortkey"] }) for inject in injects ] self._object.caseplans._fetch(inset)
def __new__(cls, id=None, *args, **kwargs): """ Create a new object, handle caching if enabled """ # No caching when turned of or class does not support it if (config.get_cache_level() < config.CACHE_OBJECTS or getattr(cls, "_cache", None) is None): return super(Nitrate, cls).__new__(cls) # Make sure that cache has been initialized Cache() # Look up cached object by id (or other arguments in kwargs) try: # If found, we get instance and key by which it was found instance, key = cls._cache_lookup(id, **kwargs) if isinstance(key, int): log.cache("Using cached {0} ID#{1}".format(cls.__name__, key)) else: log.cache("Using cached {0} '{1}'".format(cls.__name__, key)) return instance # Object not cached yet, create a new one and cache it except KeyError: new = super(Nitrate, cls).__new__(cls) if isinstance(id, int): log.cache("Caching {0} ID#{1}".format(cls.__name__, id)) cls._cache[id] = new elif isinstance(id, basestring) or "name" in kwargs: log.cache("Caching {0} '{1}'".format( cls.__name__, (id or kwargs.get("name")))) return new
def enter(self, filename=None): """ Perform setup, create lock, load the cache """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Setup the cache self.setup(filename) # Create the lock and load the cache self.lock() self.load()
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.itervalues(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())
def _index(self, *keys): """ Index self into the class cache if caching enabled """ # Skip indexing completely when caching off if config.get_cache_level() < config.CACHE_OBJECTS: return # Index by ID if self._id is not NitrateNone: self.__class__._cache[self._id] = self # Index each given key for key in keys: self.__class__._cache[key] = self
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.values(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())
def exit(self): """ Save the cache and remove the lock """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Skip cache save in read-only mode if self._mode == "read-only": log.cache("Skipping persistent cache save in read-only mode") return # Save the cache and remove the lock self.save() self.unlock()
def _fetch(self, inset=None): """ Save cache timestamp and initialize from inset if given """ Nitrate._fetch(self) # Create copies of the initial set (if given) if inset is not None: log.debug("Initializing {0} for {1} from the inset".format(self.__class__.__name__, self._identifier)) log.debug(pretty(inset)) self._current = set(inset) self._original = set(inset) # Cache into container class if config.get_cache_level() >= config.CACHE_OBJECTS: self.__class__._cache[self._id] = self # Return True if the data are already initialized return inset is not None
def setter(self, value): # Initialize the attribute unless already done if getattr(self, "_" + field) is NitrateNone: self._fetch() # Update only if changed if getattr(self, "_" + field) != value: setattr(self, "_" + field, value) log.info(u"Updating {0}'s {1} to '{2}'".format( self.identifier, field, value)) # Remember modified state if caching if config.get_cache_level() != config.CACHE_NONE: self._modified = True # Save the changes immediately otherwise else: self._update()
def _fetch(self, inset=None): """ Save cache timestamp and initialize from inset if given """ Nitrate._fetch(self) # Create copies of the initial set (if given) if inset is not None: log.debug("Initializing {0} for {1} from the inset".format( self.__class__.__name__, self._identifier)) log.debug(pretty(inset)) self._current = set(inset) self._original = set(inset) # Cache into container class if config.get_cache_level() >= config.CACHE_OBJECTS: self.__class__._cache[self._id] = self # Return True if the data are already initialized return inset is not None
def setup(self, filename=None): """ Set cache filename and initialize expiration times """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Detect cache filename, argument first, then config if filename is not None: self._filename = filename else: try: self._filename = config.Config().cache.file except AttributeError: log.warn("Persistent caching off " "(cache filename not found in the config)") self._lock = "{0}.lock".format(self._filename) # Initialize user-defined expiration times from the config for klass in self._classes + [ Nitrate, mutable.Mutable, containers.Container ]: try: expiration = getattr(config.Config().expiration, klass.__name__.lower()) except AttributeError: continue # Convert from seconds, handle special values if isinstance(expiration, int): expiration = datetime.timedelta(seconds=expiration) elif expiration == "NEVER_EXPIRE": expiration = config.NEVER_EXPIRE elif expiration == "NEVER_CACHE": expiration = config.NEVER_CACHE # Give warning for invalid expiration values if isinstance(expiration, datetime.timedelta): klass._expiration = expiration log.debug("User defined expiration for {0}: {1}".format( klass.__name__, expiration)) else: log.warn("Invalid expiration time '{0}'".format(expiration))
def setup(self, filename=None): """ Set cache filename and initialize expiration times """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Detect cache filename, argument first, then config if filename is not None: self._filename = filename else: try: self._filename = config.Config().cache.file except AttributeError: log.warn("Persistent caching off " "(cache filename not found in the config)") self._lock = "{0}.lock".format(self._filename) # Initialize user-defined expiration times from the config for klass in self._classes + [Nitrate, mutable.Mutable, containers.Container]: try: expiration = getattr( config.Config().expiration, klass.__name__.lower()) except AttributeError: continue # Convert from seconds, handle special values if isinstance(expiration, int): expiration = datetime.timedelta(seconds=expiration) elif expiration == "NEVER_EXPIRE": expiration = config.NEVER_EXPIRE elif expiration == "NEVER_CACHE": expiration = config.NEVER_CACHE # Give warning for invalid expiration values if isinstance(expiration, datetime.timedelta): klass._expiration = expiration log.debug("User defined expiration for {0}: {1}".format( klass.__name__, expiration)) else: log.warn("Invalid expiration time '{0}'".format(expiration))
def remove(self, items): """ Remove an item or a list of items from the container """ # Convert to set representation if isinstance(items, list): items = set(items) else: items = set([items]) # If there are any items to be removed remove_items = items.intersection(self._items) if remove_items: log.info("Removing {0} from {1}'s {2}".format( listed([item.identifier for item in remove_items], self._class.__name__, max=10), self._object.identifier, self.__class__.__name__)) self._items.difference_update(items) if config.get_cache_level() != config.CACHE_NONE: self._modified = True else: self._update()
def add(self, items): """ Add an item or a list of items to the container """ # Convert to set representation if isinstance(items, list): items = set(items) else: items = set([items]) # If there are any new items add_items = items - self._items if add_items: log.info("Adding {0} to {1}'s {2}".format( listed([item.identifier for item in add_items], self._class.__name__, max=10), self._object.identifier, self.__class__.__name__)) self._items.update(items) if config.get_cache_level() != config.CACHE_NONE: self._modified = True else: self._update()