def _write_master_cache(self, shutdown=False): logger.debug("Updating master index") try: _master_idx = os.path.join(self.root, 'master.idx') this_master_cache = [] if os.path.isfile(_master_idx) and not shutdown: if abs(self._master_index_timestamp - os.stat(_master_idx).st_ctime) < 300: return items_to_save = self.objects.iteritems() for k, v in items_to_save: try: # Check and write index first obj = self.objects[k] new_index = None if obj is not None: new_index = self.registry.getIndexCache(obj) if new_index is not None and new_index != obj.getNodeIndexCache(): arr_k = [k] if len(self.lock(arr_k)) != 0: self.index_write(arr_k) self.unlock(arr_k) except Exception as err: logger.debug("Failed to update index: %s on startup/shutdown" % str(k)) logger.debug("Reason: %s" % str(err)) cached_list = [] iterables = self._cache_load_timestamp.iteritems() for k, v in iterables: cached_list.append(k) try: fn = self.get_idxfn(k) time = os.stat(fn).st_ctime except OSError as err: logger.debug("_write_master_cache: %s" % str(err)) logger.debug("_cache_load_timestamp: %s" % str(self._cache_load_timestamp)) import errno if err.errno == errno.ENOENT: # If file is not found time = -1 else: raise if time > 0: cached_list.append(time) cached_list.append(self._cached_cat[k]) cached_list.append(self._cached_cls[k]) cached_list.append(self._cached_obj[k]) this_master_cache.append(cached_list) try: with open(_master_idx, 'w') as of: pickle_to_file(this_master_cache, of) except IOError as err: logger.debug("write_master: %s" % str(err)) try: os.remove(os.path.join(self.root, 'master.idx')) except OSError as x: Ganga.Utility.logging.log_user_exception(debug=True) except Exception as err: logger.debug("write_error2: %s" % str(err)) Ganga.Utility.logging.log_unknown_exception() return
def index_write(self,id): """ write an index file for this object (must be locked). Should not raise any Errors """ obj = self.objects[id] try: ifn = self.get_idxfn(id) new_idx_cache = self.registry.getIndexCache(obj) if new_idx_cache != obj._index_cache or not os.path.exists(ifn): obj._index_cache = new_idx_cache pickle_to_file((obj._category,obj._name,obj._index_cache),file(ifn,"w")) except IOError, x: logger.error("Index saving to '%s' failed: %s %s" % (ifn,x.__class__.__name__,x))
def index_write(self, id): """ write an index file for this object (must be locked). Should not raise any Errors """ obj = self.objects[id] try: ifn = self.get_idxfn(id) new_idx_cache = self.registry.getIndexCache(obj) if new_idx_cache != obj._index_cache or not os.path.exists(ifn): obj._index_cache = new_idx_cache with open(ifn, "w") as this_file: pickle_to_file((obj._category, getName(obj), obj._index_cache), this_file) except IOError as err: logger.error("Index saving to '%s' failed: %s %s" % (ifn, getName(err), str(err)))
def index_write(self, this_id): """ write an index file for this object (must be locked). Should not raise any Errors """ obj = self.objects[this_id] try: ifn = self.get_idxfn(this_id) new_idx_cache = self.registry.getIndexCache(stripProxy(obj)) if new_idx_cache != obj.getNodeIndexCache() or not os.path.exists(ifn): new_cache = new_idx_cache with open(ifn, "w") as this_file: pickle_to_file((obj._category, getName(obj), new_cache), this_file) self._cached_obj[this_id] = new_cache all_cache = new_cache.keys() for attr in all_cache: obj.removeNodeIndexCacheAttribute(attr) self._cached_obj[this_id] = new_idx_cache except IOError as err: logger.error("Index saving to '%s' failed: %s %s" % (ifn, getName(err), str(err)))
def index_write(self, this_id, shutdown=False): """ write an index file for this object (must be locked). Should not raise any Errors, shutdown=True causes this to always be written regardless of any checks""" logger.debug("Writing index: %s" % this_id) obj = self.objects[this_id] try: ifn = self.get_idxfn(this_id) new_idx_cache = self.registry.getIndexCache(stripProxy(obj)) if not os.path.exists(ifn) or shutdown: new_cache = new_idx_cache with open(ifn, "w") as this_file: new_index = (obj._category, getName(obj), new_cache) logger.debug("Writing: %s" % str(new_index)) pickle_to_file(new_index, this_file) self._cached_obj[this_id] = new_cache obj._index_cache = {} self._cached_obj[this_id] = new_idx_cache except IOError as err: logger.error("Index saving to '%s' failed: %s %s" % (ifn, getName(err), err))
def index_write(self, this_id): """ write an index file for this object (must be locked). Should not raise any Errors """ obj = self.objects[this_id] try: ifn = self.get_idxfn(this_id) new_idx_cache = self.registry.getIndexCache(stripProxy(obj)) if new_idx_cache != obj.getNodeIndexCache() or not os.path.exists( ifn): new_cache = new_idx_cache with open(ifn, "w") as this_file: pickle_to_file((obj._category, getName(obj), new_cache), this_file) self._cached_obj[this_id] = new_cache obj.setNodeIndexCache({}) #all_cache = new_cache.keys() #for attr in all_cache: # obj.removeNodeIndexCacheAttribute(attr) self._cached_obj[this_id] = new_idx_cache except IOError as err: logger.error("Index saving to '%s' failed: %s %s" % (ifn, getName(err), str(err)))
def _write_master_cache(self, shutdown=False): #logger.info("Updating master index: %s" % self.registry.name) try: _master_idx = os.path.join(self.root, 'master.idx') this_master_cache = [] if os.path.isfile(_master_idx) and not shutdown: if abs(self._master_index_timestamp - os.stat(_master_idx).st_ctime) < 300: return items_to_save = self.objects.iteritems() #logger.info("Updating Items: %s" % str(self.objects.keys())) for k, v in items_to_save: try: #logger.info("Examining: %s" % k) if k in self._fully_loaded.keys(): # Check and write index first obj = v #self.objects[k] new_index = None if obj is not None: new_index = self.registry.getIndexCache( stripProxy(obj)) if new_index is not None: #logger.debug("k: %s" % str(k)) arr_k = [k] if len(self.lock(arr_k)) != 0: self.index_write(k) self.unlock(arr_k) #stripProxy(obj).setNodeIndexCache(new_index) self._cached_obj[k] = new_index except Exception as err: logger.debug( "Failed to update index: %s on startup/shutdown" % str(k)) logger.debug("Reason: %s" % str(err)) #cached_list = [] iterables = self._cache_load_timestamp.iteritems() for k, v in iterables: cached_list = [] cached_list.append(k) try: fn = self.get_idxfn(k) time = os.stat(fn).st_ctime except OSError as err: logger.debug("_write_master_cache: %s" % str(err)) logger.debug("_cache_load_timestamp: %s" % str(self._cache_load_timestamp)) import errno if err.errno == errno.ENOENT: # If file is not found time = -1 else: raise if time > 0: cached_list.append(time) cached_list.append(self._cached_cat[k]) cached_list.append(self._cached_cls[k]) cached_list.append(self._cached_obj[k]) this_master_cache.append(cached_list) try: with open(_master_idx, 'w') as of: pickle_to_file(this_master_cache, of) except IOError as err: logger.debug("write_master: %s" % str(err)) try: os.remove(os.path.join(self.root, 'master.idx')) except OSError as x: Ganga.Utility.logging.log_user_exception(debug=True) except Exception as err: logger.debug("write_error2: %s" % str(err)) Ganga.Utility.logging.log_unknown_exception() return