def _read_master_cache(self): try: _master_idx = os.path.join(self.root, 'master.idx') if os.path.isfile(_master_idx): logger.debug("Reading Master index") self._master_index_timestamp = os.stat(_master_idx).st_ctime with open(_master_idx, 'r') as input_f: this_master_cache = pickle_from_file(input_f)[0] for this_cache in this_master_cache: this_id = this_cache[0] self._cache_load_timestamp[this_id] = this_cache[1] self._cached_cat[this_id] = this_cache[2] self._cached_cls[this_id] = this_cache[3] self._cached_obj[this_id] = this_cache[4] else: logger.debug("Not Reading Master Index") except Exception, err: Ganga.Utility.logging.log_unknown_exception() logger.debug("Master Index corrupt, ignoring it") logger.debug("Exception: %s" % str(err)) for k, v in self._cache_load_timestamp.iteritems(): self._cache_load_timestamp.pop(k) for k, v in self._cached_cat.iteritems(): self._cached_cat.pop(k) for k, v in self._cached_cls.iteritems(): self._cached_cls.pop(k) for k, v in self._cached_obj.iteritems(): self._cached_obj.pop(k)
def index_load(self, id): """ load the index file for this object if necessary Loads if never loaded or timestamp changed. Creates object if necessary Returns True if this object has been changed, False if not Raise IOError on access or unpickling error Raise OSError on stat error Raise PluginManagerError if the class name is not found""" #logger.debug("Loading index %s" % id) fn = self.get_idxfn(id) # index timestamp changed if self._cache_load_timestamp.get(id, 0) != os.stat(fn).st_ctime: try: with open(fn, 'r') as fobj: cat, cls, cache = pickle_from_file(fobj)[0] except Exception as x: logger.debug("index_load Exception: %s" % str(x)) raise IOError("Error on unpickling: %s %s" %(getName(x), x)) if id in self.objects: obj = self.objects[id] if obj._data: obj.__dict__["_registry_refresh"] = True else: obj = self._make_empty_object_(id, cat, cls) obj._index_cache = cache self._cache_load_timestamp[id] = os.stat(fn).st_ctime self._cached_cat[id] = cat self._cached_cls[id] = cls self._cached_obj[id] = cache return True elif id not in self.objects: self.objects[id] = self._make_empty_object_( id, self._cached_cat[id], self._cached_cls[id]) self.objects[id]._index_cache = self._cached_obj[id] return True return False
def index_load(self, this_id): """ load the index file for this object if necessary Loads if never loaded or timestamp changed. Creates object if necessary Returns True if this object has been changed, False if not Raise IOError on access or unpickling error Raise OSError on stat error Raise PluginManagerError if the class name is not found""" #logger.debug("Loading index %s" % this_id) fn = self.get_idxfn(this_id) # index timestamp changed if self._cache_load_timestamp.get(this_id, 0) != os.stat(fn).st_ctime: logger.debug("%s != %s" % (str(self._cache_load_timestamp.get( this_id, 0)), str(os.stat(fn).st_ctime))) try: with open(fn, 'r') as fobj: cat, cls, cache = pickle_from_file(fobj)[0] except Exception as x: logger.debug("index_load Exception: %s" % str(x)) raise IOError("Error on unpickling: %s %s" % (getName(x), x)) if this_id in self.objects: obj = self.objects[this_id] setattr(obj, "_registry_refresh", True) else: try: obj = self._make_empty_object_(this_id, cat, cls) except Exception as err: raise IOError( 'Failed to Parse information in Index file: %s. Err: %s' % (str(fn), str(err))) this_cache = obj.getNodeIndexCache() this_data = this_cache if this_cache else {} for k, v in cache.iteritems(): this_data[k] = v #obj.setNodeData(this_data) obj.setNodeIndexCache(cache) self._cache_load_timestamp[this_id] = os.stat(fn).st_ctime self._cached_cat[this_id] = cat self._cached_cls[this_id] = cls self._cached_obj[this_id] = cache return True elif this_id not in self.objects: self.objects[this_id] = self._make_empty_object_( this_id, self._cached_cat[this_id], self._cached_cls[this_id]) self.objects[this_id].setNodeIndexCache(self._cached_obj[this_id]) setattr(self.objects[this_id], '_registry_refresh', True) return True else: logger.debug("Doubly loading of object with ID: %s" % this_id) logger.debug("Just silently continuing") return False
def index_load(self, this_id): """ load the index file for this object if necessary Loads if never loaded or timestamp changed. Creates object if necessary Returns True if this object has been changed, False if not Raise IOError on access or unpickling error Raise OSError on stat error Raise PluginManagerError if the class name is not found""" #logger.debug("Loading index %s" % this_id) fn = self.get_idxfn(this_id) # index timestamp changed if self._cache_load_timestamp.get(this_id, 0) != os.stat(fn).st_ctime: logger.debug("%s != %s" % (str(self._cache_load_timestamp.get(this_id, 0)), str(os.stat(fn).st_ctime))) try: with open(fn, 'r') as fobj: cat, cls, cache = pickle_from_file(fobj)[0] except Exception as x: logger.debug("index_load Exception: %s" % str(x)) raise IOError("Error on unpickling: %s %s" %(getName(x), x)) if this_id in self.objects: obj = self.objects[this_id] setattr(obj, "_registry_refresh", True) else: try: obj = self._make_empty_object_(this_id, cat, cls) except Exception as err: raise IOError('Failed to Parse information in Index file: %s. Err: %s' % (str(fn), str(err))) this_cache = obj.getNodeIndexCache() this_data = this_cache if this_cache else {} for k, v in cache.iteritems(): this_data[k] = v #obj.setNodeData(this_data) obj.setNodeIndexCache(cache) self._cache_load_timestamp[this_id] = os.stat(fn).st_ctime self._cached_cat[this_id] = cat self._cached_cls[this_id] = cls self._cached_obj[this_id] = cache return True elif this_id not in self.objects: self.objects[this_id] = self._make_empty_object_(this_id, self._cached_cat[this_id], self._cached_cls[this_id]) self.objects[this_id].setNodeIndexCache( self._cached_obj[this_id] ) setattr(self.objects[this_id], '_registry_refresh', True) return True else: logger.debug("Doubly loading of object with ID: %s" % this_id) logger.debug("Just silently continuing") return False
def _read_master_cache(self): try: _master_idx = os.path.join(self.root, 'master.idx') if os.path.isfile(_master_idx): logger.debug("Reading Master index") self._master_index_timestamp = os.stat(_master_idx).st_ctime with open(_master_idx, 'r') as input_f: this_master_cache = pickle_from_file(input_f)[0] for this_cache in this_master_cache: this_id = this_cache[0] self._cache_load_timestamp[this_id] = this_cache[1] self._cached_cat[this_id] = this_cache[2] self._cached_cls[this_id] = this_cache[3] self._cached_obj[this_id] = this_cache[4] else: logger.debug("Not Reading Master Index") except Exception as err: Ganga.Utility.logging.log_unknown_exception() logger.debug("Master Index corrupt, ignoring it") logger.debug("Exception: %s" % str(err)) self._clear_stored_cache()
def index_load(self,id): """ load the index file for this object if necessary Loads if never loaded or timestamp changed. Creates object if necessary Returns True if this object has been changed, False if not Raise IOError on access or unpickling error Raise OSError on stat error Raise PluginManagerError if the class name is not found""" logger.debug("Loading index %s" % id) fn = self.get_idxfn(id) if self._cache_load_timestamp.get(id,0) != os.stat(fn).st_ctime: # index timestamp changed fobj = file(fn) try: try: cat,cls,cache = pickle_from_file(fobj)[0] except Exception, x: raise IOError("Error on unpickling: %s %s" % (x.__class__.__name__, x)) if id in self.objects: obj = self.objects[id] if obj._data: obj.__dict__["_registry_refresh"] = True else: obj = self._make_empty_object_(id,cat,cls) obj._index_cache = cache finally: