def refresh_index(self): refreshed_indexes = [] for model in get_indexed_models(): index = self.get_index_for_model(model) if index not in refreshed_indexes: index.refresh() refreshed_indexes.append(index)
def register_signal_handlers(): # Loop through list and register signal handlers for each one for model in index.get_indexed_models(): if not getattr(model, 'search_auto_update', True): continue post_save.connect(post_save_signal_handler, sender=model) post_delete.connect(post_delete_signal_handler, sender=model)
def update_backend(self, backend_name, schema_only=False, chunk_size=DEFAULT_CHUNK_SIZE): self.stdout.write("Updating backend: " + backend_name) backend = get_search_backend(backend_name) if not backend.rebuilder_class: self.stdout.write("Backend '%s' doesn't require rebuilding" % backend_name) return models_grouped_by_index = group_models_by_index( backend, get_indexed_models()).items() if not models_grouped_by_index: self.stdout.write(backend_name + ": No indices to rebuild") for index, models in models_grouped_by_index: self.stdout.write(backend_name + ": Rebuilding index %s" % index.name) # Start rebuild rebuilder = backend.rebuilder_class(index) index = rebuilder.start() # Add models for model in models: index.add_model(model) # Add objects object_count = 0 if not schema_only: for model in models: self.stdout.write( "{}: {}.{} ".format(backend_name, model._meta.app_label, model.__name__).ljust(35), ending="", ) # Add items (chunk_size at a time) for chunk in self.print_iter_progress( self.queryset_chunks( model.get_indexed_objects().order_by("pk"), chunk_size)): index.add_items(model, chunk) object_count += len(chunk) self.print_newline() # Finish rebuild rebuilder.finish() self.stdout.write(backend_name + ": indexed %d objects" % object_count) self.print_newline()
def update_backend(self, backend_name, schema_only=False, chunk_size=DEFAULT_CHUNK_SIZE): self.stdout.write("Updating backend: " + backend_name) backend = get_search_backend(backend_name) if not backend.rebuilder_class: self.stdout.write("Backend '%s' doesn't require rebuilding" % backend_name) return models_grouped_by_index = group_models_by_index(backend, get_indexed_models()).items() if not models_grouped_by_index: self.stdout.write(backend_name + ": No indices to rebuild") for index, models in models_grouped_by_index: self.stdout.write(backend_name + ": Rebuilding index %s" % index.name) # Start rebuild rebuilder = backend.rebuilder_class(index) index = rebuilder.start() # Add models for model in models: index.add_model(model) # Add objects object_count = 0 if not schema_only: for model in models: self.stdout.write('{}: {}.{} '.format(backend_name, model._meta.app_label, model.__name__).ljust(35), ending='') # Add items (chunk_size at a time) for chunk in self.print_iter_progress(self.queryset_chunks(model.get_indexed_objects().order_by('pk'), chunk_size)): index.add_items(model, chunk) object_count += len(chunk) self.print_newline() # Finish rebuild rebuilder.finish() self.stdout.write(backend_name + ": indexed %d objects" % object_count) self.print_newline()
def delete_stale_entries(self): for model in get_indexed_models(): # We don’t need to delete stale entries for non-root models, # since we already delete them by deleting roots. if not model._meta.parents: self.delete_stale_model_entries(model)
def register_signal_handlers(): # Loop through list and register signal handlers for each one for model in index.get_indexed_models(): post_save.connect(post_save_signal_handler, sender=model) post_delete.connect(post_delete_signal_handler, sender=model)
def delete_stale_entries(self): for model in get_indexed_models(): # We don’t need to delete stale entries for non-root models, # since we already delete them by deleting roots. if not model._meta.parents: self.delete_stale_model_entries(model)