def get_queue_list(request): """ Get the list of queues used by the archive """ logging.warn('Retrieving queue list for %s' % request.user) return render_to_response('archive_admin/queues.html', {'queues': [sqs._get_queue()]})
def get_queue_list(request): """ Get the list of queues used by the archive """ logging.warn('Retrieving queue list for %s' % request.user) return render_to_response( 'archive_admin/queues.html', {'queues' : [ sqs._get_queue() ] } )
def delete_queue(request, queue_name): """ Erase a queue. """ logging.warn('%s requesting delete of %s' % (request.user, queue_name)) queue = sqs._get_queue() count = queue.count() queue.delete() return HttpResponse('Queue %s with %s messages was deleted' % (queue_name, count))
def delete_all_queues(request=None): """ Wipe all queues used by the archive. """ # TODO: delete_all_queues really shouldn't be exposed! logging.critical('Delete of all queues' % request.user) for queue in [sqs._get_queue()]: if queue is not None: queue.delete() return HttpResponse('All queues deleted')
def delete_all_queues(request = None): """ Wipe all queues used by the archive. """ # TODO: delete_all_queues really shouldn't be exposed! logging.critical('Delete of all queues' % request.user) for queue in [ sqs._get_queue() ]: if queue is not None: queue.delete() return HttpResponse('All queues deleted')
def delete_queue(request, queue_name): """ Erase a queue. """ logging.warn( '%s requesting delete of %s' % ( request.user, queue_name)) queue = sqs._get_queue() count = queue.count() queue.delete() return HttpResponse( 'Queue %s with %s messages was deleted' % ( queue_name, count ))
def test_create_infra(self): from donomo.archive.utils import s3, sqs self.assert_(sqs._get_queue() is not None) self.assert_(s3._get_bucket() is not None)
def queue_length(): """ Calculate the number of work items currently pending in the queue """ return sqs._get_queue().count()