def refreshCatalog(self, clear=0, pghandler=None, quick=False): """ re-index everything we can find """ cat = self._catalog paths = cat.paths.values() if clear: paths = tuple(paths) cat.clear() num_objects = len(paths) if pghandler: pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects) for i in xrange(num_objects): if pghandler: pghandler.report(i) p = paths[i] obj = self.resolve_path(p) if obj is None: obj = self.resolve_url(p, self.REQUEST) if obj is not None: try: self.catalog_object(obj, p, pghandler=pghandler, quick=quick) except ConflictError: raise except Exception: LOG.error('Recataloging object at %s failed' % p, exc_info=sys.exc_info()) if pghandler: pghandler.finish()
def catalogObject(self, object, uid, threshold=None, idxs=None, update_metadata=1, quick=False): """ Adds an object to the Catalog by iteratively applying it to all indexes. 'object' is the object to be cataloged 'uid' is the unique Catalog identifier for this object If 'idxs' is specified (as a sequence), apply the object only to the named indexes. If 'update_metadata' is true (the default), also update metadata for the object. If the object is new to the catalog, this flag has no effect (metadata is always created for new objects). """ if idxs is None: idxs = [] index = self.uids.get(uid, None) if index is None: # we are inserting new data index = self.updateMetadata(object, uid, None) self._length.change(1) self.uids[uid] = index self.paths[index] = uid elif update_metadata: # we are updating and we need to update metadata self.updateMetadata(object, uid, index) # do indexing total = 0 if idxs == []: use_indexes = self.indexes.keys() else: use_indexes = idxs for name in use_indexes: x = self.getIndex(name) if quick and x.meta_type in SLOW_INDEXES: continue if hasattr(x, 'index_object'): blah = x.index_object(index, object, threshold) total = total + blah else: LOG.error('catalogObject was passed bad index ' 'object %s.' % str(x)) return total
def manage_convertIndexes(self, REQUEST=None, RESPONSE=None, URL1=None): """Recreate indexes derived from UnIndex because the implementation of __len__ changed in Zope 2.8. Pre-Zope 2.7 installation used to implement __len__ as persistent attribute of the index instance which is totally incompatible with the new extension class implementation based on new-style classes. """ LOG.info('Start migration of indexes for %s' % self.absolute_url(1)) reindex_ids = [] for idx in self.Indexes.objectValues(): bases = [str(name) for name in idx.__class__.__bases__] found = False if idx.meta_type == 'PathIndex': found = True else: for base in bases: if 'UnIndex' in base: found = True break if found: idx_type = idx.meta_type idx_id = idx.getId() LOG.info('processing index %s' % idx_id) indexed_attrs = getattr(idx, 'indexed_attrs', None) if idx.meta_type == 'DateRangeIndex': since_field = getattr(idx, '_since_field', None) until_field = getattr(idx, '_until_field', None) self.delIndex(idx.getId()) self.addIndex(idx_id, idx_type) new_idx = self.Indexes[idx_id] if indexed_attrs: setattr(new_idx, 'indexed_attrs', indexed_attrs) if idx.meta_type == 'DateRangeIndex': setattr(new_idx, '_since_field', since_field) setattr(new_idx, '_until_field', until_field) reindex_ids.append(idx_id) if reindex_ids: LOG.info('Reindexing %s' % ', '.join(reindex_ids)) self.manage_reindexIndex(reindex_ids, REQUEST) self._migrated_280 = True LOG.info('Finished migration of indexes for %s' % self.absolute_url(1)) if RESPONSE: RESPONSE.redirect( URL1 + '/manage_main?manage_tabs_message=Indexes%20converted%20and%20reindexed' )
def manage_convertIndexes(self, REQUEST=None, RESPONSE=None, URL1=None): """Recreate indexes derived from UnIndex because the implementation of __len__ changed in Zope 2.8. Pre-Zope 2.7 installation used to implement __len__ as persistent attribute of the index instance which is totally incompatible with the new extension class implementation based on new-style classes. """ LOG.info('Start migration of indexes for %s' % self.absolute_url(1)) reindex_ids = [] for idx in self.Indexes.objectValues(): bases = [str(name) for name in idx.__class__.__bases__] found = False if idx.meta_type == 'PathIndex': found = True else: for base in bases: if 'UnIndex' in base: found = True break if found: idx_type = idx.meta_type idx_id = idx.getId() LOG.info('processing index %s' % idx_id) indexed_attrs = getattr(idx, 'indexed_attrs', None) if idx.meta_type == 'DateRangeIndex': since_field = getattr(idx, '_since_field', None) until_field = getattr(idx, '_until_field', None) self.delIndex(idx.getId()) self.addIndex(idx_id, idx_type) new_idx = self.Indexes[idx_id] if indexed_attrs: setattr(new_idx, 'indexed_attrs', indexed_attrs) if idx.meta_type == 'DateRangeIndex': setattr(new_idx, '_since_field', since_field) setattr(new_idx, '_until_field', until_field) reindex_ids.append(idx_id) if reindex_ids: LOG.info('Reindexing %s' % ', '.join(reindex_ids)) self.manage_reindexIndex(reindex_ids, REQUEST) self._migrated_280 = True LOG.info('Finished migration of indexes for %s' % self.absolute_url(1)) if RESPONSE: RESPONSE.redirect( URL1 + '/manage_main?manage_tabs_message=Indexes%20converted%20and%20reindexed')
def uncatalogObject(self, uid): """ Uncatalog and object from the Catalog. and 'uid' is a unique Catalog identifier Note, the uid must be the same as when the object was catalogued, otherwise it will not get removed from the catalog This method should not raise an exception if the uid cannot be found in the catalog. """ data = self.data uids = self.uids paths = self.paths indexes = self.indexes.keys() rid = uids.get(uid, None) if rid is not None: for name in indexes: x = self.getIndex(name) if hasattr(x, 'unindex_object'): try: x.unindex_object(rid) except KeyError: LOG.error('uncatalogObject unsuccessfully ' 'attempted to unindex uid %s ' 'for index %s. ' % (str(uid), name)) continue del data[rid] del paths[rid] del uids[uid] self._length.change(-1) else: LOG.error('uncatalogObject unsuccessfully ' 'attempted to uncatalog an object ' 'with a uid of %s. ' % str(uid))