def test_insert_max_priority_capped(self):
        q = StablePriorityQueue(maxsize=10, max_priority=20)
        a = mock.Mock()
        a.PRIORITY = 100
        q.put(a)

        self.assertIs(q.get(), a)
Beispiel #2
0
    def test_insert_max_priority_capped(self):
        q = StablePriorityQueue(maxsize=10, max_priority=20)
        a = mock.Mock()
        a.PRIORITY = 100
        q.put(a)

        self.assertIs(q.get(), a)
    def test_priority_attr_is_missing(self):
        # If priority attr is missing, we should add it
        # to the lowest priority.
        q = StablePriorityQueue(maxsize=10, max_priority=20)
        a = object()
        b = mock.Mock()
        b.PRIORITY = 5

        q.put(a)
        q.put(b)

        self.assertIs(q.get(), b)
        self.assertIs(q.get(), a)
Beispiel #4
0
 def __init__(self, num_threads, result_queue, quiet, max_queue_size,
              write_queue):
     self._max_queue_size = max_queue_size
     self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                      max_priority=20)
     self.num_threads = num_threads
     self.result_queue = result_queue
     self.quiet = quiet
     self.threads_list = []
     self.write_queue = write_queue
     self.print_thread = PrintThread(self.result_queue, self.quiet)
     self.print_thread.daemon = True
     self.io_thread = IOWriterThread(self.write_queue)
Beispiel #5
0
    def test_priority_attr_is_missing(self):
        # If priority attr is missing, we should add it
        # to the lowest priority.
        q = StablePriorityQueue(maxsize=10, max_priority=20)
        a = object()
        b = mock.Mock()
        b.PRIORITY = 5

        q.put(a)
        q.put(b)

        self.assertIs(q.get(), b)
        self.assertIs(q.get(), a)
    def test_queue_length(self):
        a = mock.Mock()
        a.PRIORITY = 5

        q = StablePriorityQueue(maxsize=10, max_priority=20)
        self.assertEqual(q.qsize(), 0)

        q.put(a)
        self.assertEqual(q.qsize(), 1)

        q.get()
        self.assertEqual(q.qsize(), 0)
Beispiel #7
0
 def _put(self, item):
     if isinstance(item, CompleteMultipartUploadTask):
         # Raising this exception will trigger the
         # "error" case shutdown in the executor.
         raise RuntimeError(
             "Forced error on enqueue of complete task.")
     return StablePriorityQueue._put(self, item)
 def _put(self, item):
     if isinstance(item, CompleteMultipartUploadTask):
         # Raising this exception will trigger the
         # "error" case shutdown in the executor.
         raise RuntimeError(
             "Forced error on enqueue of complete task.")
     return StablePriorityQueue._put(self, item)
Beispiel #9
0
 def __init__(self, num_threads, result_queue, quiet, only_show_errors,
              max_queue_size, write_queue):
     self._max_queue_size = max_queue_size
     LOGGER.debug("Using max queue size for s3 tasks of: %s",
                  self._max_queue_size)
     self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                      max_priority=20)
     self.num_threads = num_threads
     self.result_queue = result_queue
     self.quiet = quiet
     self.only_show_errors = only_show_errors
     self.threads_list = []
     self.write_queue = write_queue
     self.print_thread = PrintThread(self.result_queue, self.quiet,
                                     self.only_show_errors)
     self.print_thread.daemon = True
     self.io_thread = IOWriterThread(self.write_queue)
Beispiel #10
0
    def test_queue_length(self):
        a = mock.Mock()
        a.PRIORITY = 5

        q = StablePriorityQueue(maxsize=10, max_priority=20)
        self.assertEqual(q.qsize(), 0)

        q.put(a)
        self.assertEqual(q.qsize(), 1)

        q.get()
        self.assertEqual(q.qsize(), 0)
Beispiel #11
0
 def __init__(self, num_threads, result_queue,
              quiet, max_queue_size, write_queue):
     self._max_queue_size = max_queue_size
     self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                      max_priority=20)
     self.num_threads = num_threads
     self.result_queue = result_queue
     self.quiet = quiet
     self.threads_list = []
     self.write_queue = write_queue
     self.print_thread = PrintThread(self.result_queue,
                                     self.quiet)
     self.print_thread.daemon = True
     self.io_thread = IOWriterThread(self.write_queue)
Beispiel #12
0
 def __init__(self, num_threads, result_queue, quiet,
              only_show_errors, max_queue_size, write_queue):
     self._max_queue_size = max_queue_size
     LOGGER.debug("Using max queue size for s3 tasks of: %s",
                  self._max_queue_size)
     self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                      max_priority=20)
     self.num_threads = num_threads
     self.result_queue = result_queue
     self.quiet = quiet
     self.only_show_errors = only_show_errors
     self.threads_list = []
     self.write_queue = write_queue
     self.print_thread = PrintThread(self.result_queue, self.quiet,
                                     self.only_show_errors)
     self.print_thread.daemon = True
     self.io_thread = IOWriterThread(self.write_queue)
    def test_fifo_order_of_same_priorities(self):
        a = mock.Mock()
        a.PRIORITY = 5
        b = mock.Mock()
        b.PRIORITY = 5
        c = mock.Mock()
        c.PRIORITY = 1

        q = StablePriorityQueue(maxsize=10, max_priority=20)
        q.put(a)
        q.put(b)
        q.put(c)

        # First we should get c because it's the lowest priority.
        # We're using assertIs because we want the *exact* object.
        self.assertIs(q.get(), c)
        # Then a and b are the same priority, but we should get
        # a first because it was inserted first.
        self.assertIs(q.get(), a)
        self.assertIs(q.get(), b)
Beispiel #14
0
    def test_fifo_order_of_same_priorities(self):
        a = mock.Mock()
        a.PRIORITY = 5
        b = mock.Mock()
        b.PRIORITY = 5
        c = mock.Mock()
        c.PRIORITY = 1

        q = StablePriorityQueue(maxsize=10, max_priority=20)
        q.put(a)
        q.put(b)
        q.put(c)

        # First we should get c because it's the lowest priority.
        # We're using assertIs because we want the *exact* object.
        self.assertIs(q.get(), c)
        # Then a and b are the same priority, but we should get
        # a first because it was inserted first.
        self.assertIs(q.get(), a)
        self.assertIs(q.get(), b)
Beispiel #15
0
class Executor(object):
    """
    This class is in charge of all of the threads.  It starts up the threads
    and cleans up the threads when finished.  The two type of threads the
    ``Executor``runs is a worker and a print thread.
    """
    STANDARD_PRIORITY = 11
    IMMEDIATE_PRIORITY = 1

    def __init__(self, num_threads, result_queue, quiet, only_show_errors,
                 max_queue_size, write_queue):
        self._max_queue_size = max_queue_size
        LOGGER.debug("Using max queue size for s3 tasks of: %s",
                     self._max_queue_size)
        self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                         max_priority=20)
        self.num_threads = num_threads
        self.result_queue = result_queue
        self.quiet = quiet
        self.only_show_errors = only_show_errors
        self.threads_list = []
        self.write_queue = write_queue
        self.print_thread = PrintThread(self.result_queue, self.quiet,
                                        self.only_show_errors)
        self.print_thread.daemon = True
        self.io_thread = IOWriterThread(self.write_queue)

    @property
    def num_tasks_failed(self):
        tasks_failed = 0
        if self.print_thread is not None:
            tasks_failed = self.print_thread.num_errors_seen
        return tasks_failed

    @property
    def num_tasks_warned(self):
        tasks_warned = 0
        if self.print_thread is not None:
            tasks_warned = self.print_thread.num_warnings_seen
        return tasks_warned

    def start(self):
        self.io_thread.start()
        # Note that we're *not* adding the IO thread to the threads_list.
        # There's a specific shutdown order we need and we're going to be
        # explicit about it rather than relying on the threads_list order.
        # See .join() for more info.
        self.print_thread.start()
        LOGGER.debug("Using a threadpool size of: %s", self.num_threads)
        for i in range(self.num_threads):
            worker = Worker(queue=self.queue)
            worker.setDaemon(True)
            self.threads_list.append(worker)
            worker.start()

    def submit(self, task):
        """
        This is the function used to submit a task to the ``Executor``.
        """
        LOGGER.debug("Submitting task: %s", task)
        self.queue.put(task)

    def initiate_shutdown(self, priority=STANDARD_PRIORITY):
        """Instruct all threads to shutdown.

        This is a graceful shutdown.  It will wait until all
        currently queued tasks have been completed before the threads
        shutdown.  If the task queue is completely full, it may
        take a while for the threads to shutdown.

        This method does not block.  Once ``initiate_shutdown`` has
        been called, you can all ``wait_until_shutdown`` to block
        until the Executor has been shutdown.

        """
        # Implementation detail:  we only queue the worker threads
        # to shutdown.  The print/io threads are shutdown in the
        # ``wait_until_shutdown`` method.
        for i in range(self.num_threads):
            LOGGER.debug(
                "Queueing end sentinel for worker thread (priority: %s)",
                priority)
            self.queue.put(ShutdownThreadRequest(priority))

    def wait_until_shutdown(self):
        """Block until the Executor is fully shutdown.

        This will wait until all worker threads are shutdown, along
        with any additional helper threads used by the executor.

        """
        for thread in self.threads_list:
            LOGGER.debug("Waiting for thread to shutdown: %s", thread)
            while True:
                thread.join(timeout=1)
                if not thread.is_alive():
                    break
            LOGGER.debug("Thread has been shutdown: %s", thread)

        LOGGER.debug("Queueing end sentinel for result thread.")
        self.result_queue.put(ShutdownThreadRequest())
        LOGGER.debug("Queueing end sentinel for IO thread.")
        self.write_queue.put(ShutdownThreadRequest())

        LOGGER.debug("Waiting for result thread to shutdown.")
        self.print_thread.join()
        LOGGER.debug("Waiting for IO thread to shutdown.")
        self.io_thread.join()
        LOGGER.debug("All threads have been shutdown.")
Beispiel #16
0
class Executor(object):
    """
    This class is in charge of all of the threads.  It starts up the threads
    and cleans up the threads when finished.  The two type of threads the
    ``Executor``runs is a worker and a print thread.
    """
    STANDARD_PRIORITY = 11
    IMMEDIATE_PRIORITY= 1

    def __init__(self, num_threads, result_queue, quiet,
                 only_show_errors, max_queue_size, write_queue):
        self._max_queue_size = max_queue_size
        LOGGER.debug("Using max queue size for s3 tasks of: %s",
                     self._max_queue_size)
        self.queue = StablePriorityQueue(maxsize=self._max_queue_size,
                                         max_priority=20)
        self.num_threads = num_threads
        self.result_queue = result_queue
        self.quiet = quiet
        self.only_show_errors = only_show_errors
        self.threads_list = []
        self.write_queue = write_queue
        self.print_thread = PrintThread(self.result_queue, self.quiet,
                                        self.only_show_errors)
        self.print_thread.daemon = True
        self.io_thread = IOWriterThread(self.write_queue)

    @property
    def num_tasks_failed(self):
        tasks_failed = 0
        if self.print_thread is not None:
            tasks_failed = self.print_thread.num_errors_seen
        return tasks_failed

    @property
    def num_tasks_warned(self):
        tasks_warned = 0
        if self.print_thread is not None:
            tasks_warned = self.print_thread.num_warnings_seen
        return tasks_warned

    def start(self):
        self.io_thread.start()
        # Note that we're *not* adding the IO thread to the threads_list.
        # There's a specific shutdown order we need and we're going to be
        # explicit about it rather than relying on the threads_list order.
        # See .join() for more info.
        self.print_thread.start()
        LOGGER.debug("Using a threadpool size of: %s", self.num_threads)
        for i in range(self.num_threads):
            worker = Worker(queue=self.queue)
            worker.setDaemon(True)
            self.threads_list.append(worker)
            worker.start()

    def submit(self, task):
        """
        This is the function used to submit a task to the ``Executor``.
        """
        LOGGER.debug("Submitting task: %s", task)
        self.queue.put(task)

    def initiate_shutdown(self, priority=STANDARD_PRIORITY):
        """Instruct all threads to shutdown.

        This is a graceful shutdown.  It will wait until all
        currently queued tasks have been completed before the threads
        shutdown.  If the task queue is completely full, it may
        take a while for the threads to shutdown.

        This method does not block.  Once ``initiate_shutdown`` has
        been called, you can all ``wait_until_shutdown`` to block
        until the Executor has been shutdown.

        """
        # Implementation detail:  we only queue the worker threads
        # to shutdown.  The print/io threads are shutdown in the
        # ``wait_until_shutdown`` method.
        for i in range(self.num_threads):
            LOGGER.debug(
                "Queueing end sentinel for worker thread (priority: %s)",
                priority)
            self.queue.put(ShutdownThreadRequest(priority))

    def wait_until_shutdown(self):
        """Block until the Executor is fully shutdown.

        This will wait until all worker threads are shutdown, along
        with any additional helper threads used by the executor.

        """
        for thread in self.threads_list:
            LOGGER.debug("Waiting for thread to shutdown: %s", thread)
            while True:
                thread.join(timeout=1)
                if not thread.is_alive():
                    break
            LOGGER.debug("Thread has been shutdown: %s", thread)

        LOGGER.debug("Queueing end sentinel for result thread.")
        self.result_queue.put(ShutdownThreadRequest())
        LOGGER.debug("Queueing end sentinel for IO thread.")
        self.write_queue.put(ShutdownThreadRequest())

        LOGGER.debug("Waiting for result thread to shutdown.")
        self.print_thread.join()
        LOGGER.debug("Waiting for IO thread to shutdown.")
        self.io_thread.join()
        LOGGER.debug("All threads have been shutdown.")
Beispiel #17
0
class TestTaskOrdering(unittest.TestCase):
    def setUp(self):
        self.q = StablePriorityQueue(maxsize=10, max_priority=20)

    def create_task(self):
        # We don't actually care about the arguments, we just want to test
        # the ordering of the tasks.
        return CreateLocalFileTask(None, None)

    def complete_task(self):
        return CompleteDownloadTask(None, None, None, None, None)

    def download_task(self):
        return DownloadPartTask(None, None, None, None, mock.Mock(), None, None)

    def shutdown_task(self, priority=None):
        return ShutdownThreadRequest(priority)

    def test_order_unchanged_in_same_priority(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)

    def test_multiple_tasks(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()

        create2 = self.create_task()
        download2 = self.download_task()
        complete2 = self.complete_task()

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)

        self.q.put(create2)
        self.q.put(download2)
        self.q.put(complete2)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)

        self.assertIs(self.q.get(), create2)
        self.assertIs(self.q.get(), download2)
        self.assertIs(self.q.get(), complete2)

    def test_shutdown_tasks_are_last(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()
        shutdown = self.shutdown_task(priority=11)

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)
        self.q.put(shutdown)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)
        self.assertIs(self.q.get(), shutdown)
Beispiel #18
0
 def setUp(self):
     self.q = StablePriorityQueue(maxsize=10, max_priority=20)
Beispiel #19
0
class TestTaskOrdering(unittest.TestCase):
    def setUp(self):
        self.q = StablePriorityQueue(maxsize=10, max_priority=20)

    def create_task(self):
        # We don't actually care about the arguments, we just want to test
        # the ordering of the tasks.
        return CreateLocalFileTask(None, None)

    def complete_task(self):
        return CompleteDownloadTask(None, None, None, None, None)

    def download_task(self):
        return DownloadPartTask(None, None, None, None, mock.Mock(), None,
                                None)

    def shutdown_task(self, priority=None):
        return ShutdownThreadRequest(priority)

    def test_order_unchanged_in_same_priority(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)

    def test_multiple_tasks(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()

        create2 = self.create_task()
        download2 = self.download_task()
        complete2 = self.complete_task()

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)

        self.q.put(create2)
        self.q.put(download2)
        self.q.put(complete2)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)

        self.assertIs(self.q.get(), create2)
        self.assertIs(self.q.get(), download2)
        self.assertIs(self.q.get(), complete2)

    def test_shutdown_tasks_are_last(self):
        create = self.create_task()
        download = self.download_task()
        complete = self.complete_task()
        shutdown = self.shutdown_task(priority=11)

        self.q.put(create)
        self.q.put(download)
        self.q.put(complete)
        self.q.put(shutdown)

        self.assertIs(self.q.get(), create)
        self.assertIs(self.q.get(), download)
        self.assertIs(self.q.get(), complete)
        self.assertIs(self.q.get(), shutdown)
Beispiel #20
0
 def setUp(self):
     self.q = StablePriorityQueue(maxsize=10, max_priority=20)