def handle_label(self, label, **options): for model in get_models_from_label(label): try: index = get_index(model) except NotHandled: if self.verbosity >= 2: print "Skipping '%s' - no index." % model else: self.handle_model(model, index)
def update_object(item, remove=False, exception_handling=True): """ Update or remove an object from the search index. Accepts an identifier string, a Model instance, or a LazyModel instance. Runs after the transaction is committed, allowing for related data to be saved before indexing the object. """ try: index = get_index(item) if isinstance(item, basestring): # Dealing with an identifier string. if not remove: # Create a lazy instance with the read only cache. This means # that it can benefit from existing cached objects, but won't # fill up the cache with everything that gets indexed here. item = LazyModel(item, cache_backend=read_only_cache) if not remove and isinstance(item, LazyModel) and not item: # The identifier was for an object that does not exist any # more, so change this to a remove operation. logging.warning('Could not access %r for indexing' % LazyModel.get_identifier(item)) remove = True if remove: # Remove this object from the index. identifier = LazyModel.get_identifier(item) index.remove_object(identifier) else: # Update this object in the index. This can actually remove the # object from the index, if the result of should_index is False. index.update_object(item) except Exception: if exception_handling: logging.exception('Error running update_object(%r)' % item, debug_raise=True) else: raise
def model_search(*models): """Create the basic combined search for the specified models.""" if not models: index = get_unified_index() index.build() models = index.indexes.keys() search = SearchQuerySet().models(*models) filters = {} for model in models: for lookup, value in get_index(model).filters().items(): # Make the lookup values optional, meaning that it # will only apply to documents containing the field. filters[lookup] = Optional(value) if filters: search = search.filter(**filters) return search
self.conn.refresh(indexes=[index_name]) def remove(self, obj_or_string, commit=True): doc_id = get_identifier(obj_or_string) if not self.setup_complete: try: self.setup() except pyelasticsearch.ElasticSearchError, e: if not self.silently_fail: raise self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e) return index = get_index(doc_id) index_name = self.index_names[index] try: self.conn.delete(index_name, "modelresult", doc_id) if commit: self.conn.refresh(indexes=[index_name]) except (requests.RequestException, pyelasticsearch.ElasticSearchError), e: if not self.silently_fail: raise self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e) def clear(self, models=[], commit=True):
def remove(self, obj_or_string, commit=True): doc_id = get_identifier(obj_or_string) if not self.setup_complete: try: self.setup() except pyelasticsearch.ElasticSearchError, e: if not self.silently_fail: raise self.log.error( "Failed to remove document '%s' from Elasticsearch: %s", doc_id, e) return index = get_index(doc_id) index_name = self.index_names[index] try: self.conn.delete(index_name, 'modelresult', doc_id) if commit: self.conn.refresh(indexes=[index_name]) except (requests.RequestException, pyelasticsearch.ElasticSearchError), e: if not self.silently_fail: raise self.log.error( "Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)