def setUp(self): super(QueuedSearchIndexTestCase, self).setUp() # Nuke the queue. queues.delete_queue(get_queue_name()) # Nuke the index. call_command('clear_index', interactive=False, verbosity=0) # Get a queue connection so we can poke at it. self.queue = queues.Queue(get_queue_name())
def setUp(self): super(QueuedSearchIndexTestCase, self).setUp() # Nuke the queue. queues.delete_queue(get_queue_name()) # Nuke the index. back = backend.SearchBackend() back.clear() # Get a queue connection so we can poke at it. self.queue = queues.Queue(get_queue_name())
def handle_noargs(self, **options): self.batchsize = options.get('batchsize', DEFAULT_BATCH_SIZE) # Setup the queue. self.queue = queues.Queue(get_queue_name()) #@UndefinedVariable # Check if enough is there to process. if not len(self.queue): self.log.info("Not enough items in the queue to process.") self.log.info("Starting to process the queue.") # Consume the whole queue first so that we can group update/deletes # for efficiency. try: while True: message = self.queue.read() if not message: break self.process_message(message) except QueueException: # We've run out of items in the queue. pass self.log.info("Queue consumed.") try: self.handle_updates() self.handle_deletes() except Exception, e: self.log.error('Exception seen during processing: %s' % e) self.requeue() raise e
def setUp(self): super(ProcessSearchQueueTestCase, self).setUp() # Nuke the queue. queues.delete_queue(get_queue_name()) # Nuke the index. call_command('clear_index', interactive=False, verbosity=0) # Get a queue connection so we can poke at it. self.queue = queues.Queue(get_queue_name()) # Clear out and capture log messages. AssertableHandler.stowed_messages = [] self.psqc = ProcessSearchQueueCommand()
def handle_noargs(self, **options): self.batchsize = options.get('batchsize', DEFAULT_BATCH_SIZE) or 1000 self.using = options.get('using') # Setup the queue. self.queue = queues.Queue(get_queue_name()) # Check if enough is there to process. if not len(self.queue): self.log.info("Not enough items in the queue to process.") self.log.info("Starting to process the queue.") # Consume the whole queue first so that we can group update/deletes # for efficiency. try: while True: message = self.queue.read() if not message: break self.process_message(message) except QueueException: # We've run out of items in the queue. pass self.log.info("Queue consumed.") try: self.handle_updates() self.handle_deletes() except Exception, e: self.log.error('Exception seen during processing: %s' % e) self.requeue() raise e
def setUp(self): super(ProcessSearchQueueTestCase, self).setUp() # Nuke the queue. queues.delete_queue(get_queue_name()) # Nuke the index. back = backend.SearchBackend() back.clear() # Get a queue connection so we can poke at it. self.queue = queues.Queue(get_queue_name()) # Clear out and capture log messages. AssertableHandler.stowed_messages = [] self.psqc = ProcessSearchQueueCommand()
def test_delete(self): note1 = Note.objects.create( title='A test note', content='Because everyone loves test data.', author='Daniel' ) note2 = Note.objects.create( title='Another test note', content='More test data.', author='Daniel' ) note3 = Note.objects.create( title='Final test note', content='The test data. All done.', author='Joe' ) # Dump the queue in preparation for the deletes. queues.delete_queue(get_queue_name()) self.queue = queues.Queue(get_queue_name()) self.assertEqual(len(self.queue), 0) note1.delete() self.assertEqual(len(self.queue), 1) note2.delete() self.assertEqual(len(self.queue), 2) note3.delete() self.assertEqual(len(self.queue), 3) # Pull the whole queue. messages = [] try: while True: messages.append(self.queue.read()) except QueueException: # We're out of queued bits. pass self.assertEqual(messages, [u'delete:notes.note.1', u'delete:notes.note.2', u'delete:notes.note.3'])
def enqueue(self, action, instance): """ Shoves a message about how to update the index into the queue. This is a standardized string, resembling something like:: ``update:notes.note.23`` # ...or... ``delete:weblog.entry.8`` """ message = "%s:%s" % (action, get_identifier(instance)) queue = queues.Queue(get_queue_name()) return queue.write(message)
def test_delete(self): note1 = Note.objects.create( title='A test note', content='Because everyone loves test data.', author='Daniel') note2 = Note.objects.create(title='Another test note', content='More test data.', author='Daniel') note3 = Note.objects.create(title='Final test note', content='The test data. All done.', author='Joe') # Dump the queue in preparation for the deletes. queues.delete_queue(get_queue_name()) self.queue = queues.Queue(get_queue_name()) self.assertEqual(len(self.queue), 0) note1.delete() self.assertEqual(len(self.queue), 1) note2.delete() self.assertEqual(len(self.queue), 2) note3.delete() self.assertEqual(len(self.queue), 3) # Pull the whole queue. messages = [] try: while True: messages.append(self.queue.read()) except QueueException: # We're out of queued bits. pass self.assertEqual(messages, [ u'delete:notes.note.1', u'delete:notes.note.2', u'delete:notes.note.3' ])
from queues import queues from queues import QueueException from django.conf import settings from django.db.models import signals from haystack import indexes from haystack.utils import get_identifier from queued_search import get_queue_name queue = queues.Queue(get_queue_name()) class QueuedSearchIndex(indexes.SearchIndex): """ A ``SearchIndex`` subclass that enqueues updates/deletes for later processing. This allows page loads to remain snappy (as appending to a queue is a very fast operation) and background updating of the index, allowing you to maintain near real-time results without impacting user experience. Make the fast, go go go. """ # We override the built-in _setup_* methods to connect the enqueuing # operation. def _setup_save(self, model): signals.post_save.connect(self.enqueue_save, sender=model) def _setup_delete(self, model): signals.post_delete.connect(self.enqueue_delete, sender=model) def _teardown_save(self, model): signals.post_save.disconnect(self.enqueue_save, sender=model)