Пример #1
0
 def base_case(self):
     queue = Queue()
     t = self.klass(queue=queue)
     t.start()
     t.join()
     eq_(queue.get(block=False), 7)
     ok_(queue.empty())
Пример #2
0
 def base_case(self):
     queue = Queue()
     t = EHThread(target=self.worker, args=[queue])
     t.start()
     t.join()
     eq_(queue.get(block=False), 7)
     ok_(queue.empty())
Пример #3
0
 def base_case(self):
     queue = Queue()
     t = self.klass(queue=queue)
     t.start()
     t.join()
     assert queue.get(block=False) == 7
     assert queue.empty()
Пример #4
0
 def base_case(self):
     queue = Queue()
     t = self.klass(queue=queue)
     t.start()
     t.join()
     eq_(queue.get(block=False), 7)
     ok_(queue.empty())
Пример #5
0
 def base_case(self):
     queue = Queue()
     t = EHThread(target=self.worker, args=[queue])
     t.start()
     t.join()
     assert queue.get(block=False) == 7
     assert queue.empty()
Пример #6
0
 def base_case(self):
     queue = Queue()
     t = EHThread(target=self.worker, args=[queue])
     t.start()
     t.join()
     eq_(queue.get(block=False), 7)
     ok_(queue.empty())
Пример #7
0
 def run(self, *args, **kwargs):
     results = GroupResult()
     queue = Queue()
     threads = []
     for cxn in self:
         my_kwargs = dict(
             cxn=cxn,
             queue=queue,
             args=args,
             kwargs=kwargs,
         )
         thread = ExceptionHandlingThread(
             target=thread_worker,
             kwargs=my_kwargs,
         )
         threads.append(thread)
     for thread in threads:
         thread.start()
     for thread in threads:
         # TODO: configurable join timeout
         # TODO: (in sudo's version) configurability around interactive
         # prompting resulting in an exception instead, as in v1
         thread.join()
     # Get non-exception results from queue
     while not queue.empty():
         # TODO: io-sleep? shouldn't matter if all threads are now joined
         cxn, result = queue.get(block=False)
         # TODO: outstanding musings about how exactly aggregate results
         # ought to ideally operate...heterogenous obj like this, multiple
         # objs, ??
         results[cxn] = result
     # Get exceptions from the threads themselves.
     # TODO: in a non-thread setup, this would differ, e.g.:
     # - a queue if using multiprocessing
     # - some other state-passing mechanism if using e.g. coroutines
     # - ???
     excepted = False
     for thread in threads:
         wrapper = thread.exception()
         if wrapper is not None:
             # Outer kwargs is Thread instantiation kwargs, inner is kwargs
             # passed to thread target/body.
             cxn = wrapper.kwargs['kwargs']['cxn']
             results[cxn] = wrapper.value
             excepted = True
     if excepted:
         raise GroupException(results)
     return results
Пример #8
0
 def run(self, *args, **kwargs):
     results = GroupResult()
     queue = Queue()
     threads = []
     for cxn in self:
         my_kwargs = dict(
             cxn=cxn,
             queue=queue,
             args=args,
             kwargs=kwargs,
         )
         thread = ExceptionHandlingThread(
             target=thread_worker,
             kwargs=my_kwargs,
         )
         threads.append(thread)
     for thread in threads:
         thread.start()
     for thread in threads:
         # TODO: configurable join timeout
         # TODO: (in sudo's version) configurability around interactive
         # prompting resulting in an exception instead, as in v1
         thread.join()
     # Get non-exception results from queue
     while not queue.empty():
         # TODO: io-sleep? shouldn't matter if all threads are now joined
         cxn, result = queue.get(block=False)
         # TODO: outstanding musings about how exactly aggregate results
         # ought to ideally operate...heterogenous obj like this, multiple
         # objs, ??
         results[cxn] = result
     # Get exceptions from the threads themselves.
     # TODO: in a non-thread setup, this would differ, e.g.:
     # - a queue if using multiprocessing
     # - some other state-passing mechanism if using e.g. coroutines
     # - ???
     excepted = False
     for thread in threads:
         wrapper = thread.exception()
         if wrapper is not None:
             # Outer kwargs is Thread instantiation kwargs, inner is kwargs
             # passed to thread target/body.
             cxn = wrapper.kwargs['kwargs']['cxn']
             results[cxn] = wrapper.value
             excepted = True
     if excepted:
         raise GroupException(results)
     return results
Пример #9
0
 def manual_threading_works_okay(self):
     # TODO: needs https://github.com/pyinvoke/invoke/issues/438 fixed
     # before it will reliably pass
     skip()
     # Kind of silly but a nice base case for "how would someone thread this
     # stuff; and are there any bizarre gotchas lurking in default
     # config/context/connection state?"
     # Specifically, cut up the local (usually 100k's long) words dict into
     # per-thread chunks, then read those chunks via shell command, as a
     # crummy "make sure each thread isn't polluting things like stored
     # stdout" sanity test
     queue = Queue()
     # TODO: skip test on Windows or find suitable alternative file
     with codecs.open(_words, encoding='utf-8') as fd:
         data = [x.strip() for x in fd.readlines()]
     threads = []
     num_words = len(data)
     chunksize = len(data) / len(self.cxns)  # will be an int, which is fine
     for i, cxn in enumerate(self.cxns):
         start = i * chunksize
         end = max([start + chunksize, num_words])
         chunk = data[start:end]
         kwargs = dict(
             queue=queue,
             cxn=cxn,
             start=start,
             num_words=num_words,
             count=len(chunk),
             expected=chunk,
         )
         thread = ExceptionHandlingThread(target=_worker, kwargs=kwargs)
         threads.append(thread)
     for t in threads:
         t.start()
     for t in threads:
         t.join(5)  # Kinda slow, but hey, maybe the test runner is hot
     while not queue.empty():
         cxn, result, expected = queue.get(block=False)
         for resultword, expectedword in zip_longest(result, expected):
             err = u"({2!r}, {3!r}->{4!r}) {0!r} != {1!r}".format(
                 resultword,
                 expectedword,
                 cxn,
                 expected[0],
                 expected[-1],
             )
             assert resultword == expectedword, err
Пример #10
0
    def keeps_track_of_seen_index_per_thread(self):
        # Instantiate a single object which will be used in >1 thread
        r = Responder(pattern="foo", response="bar fight")  # meh
        # Thread body func allowing us to mimic actual IO thread behavior, with
        # Queues used in place of actual pipes/files
        def body(responder, in_q, out_q, finished):
            while not finished.is_set():
                try:
                    # NOTE: use nowait() so our loop is hot & can shutdown ASAP
                    # if finished gets set.
                    stream = in_q.get_nowait()
                    for response in r.submit(stream):
                        out_q.put_nowait(response)
                except Empty:
                    pass

        # Create two threads from that body func, and queues/etc for each
        t1_in, t1_out, t1_finished = Queue(), Queue(), Event()
        t2_in, t2_out, t2_finished = Queue(), Queue(), Event()
        t1 = Thread(target=body, args=(r, t1_in, t1_out, t1_finished))
        t2 = Thread(target=body, args=(r, t2_in, t2_out, t2_finished))
        # Start the threads
        t1.start()
        t2.start()
        try:
            stream = "foo fighters"
            # First thread will basically always work
            t1_in.put(stream)
            assert t1_out.get() == "bar fight"
            # Second thread get() will block/timeout if threadlocals aren't in
            # use, because the 2nd thread's copy of the responder will not have
            # its own index & will thus already be 'past' the `foo` in the
            # stream.
            t2_in.put(stream)
            assert t2_out.get(timeout=1) == "bar fight"
        except Empty:
            assert (
                False
            ), "Unable to read from thread 2 - implies threadlocal indices are broken!"  # noqa
        # Close up.
        finally:
            t1_finished.set()
            t2_finished.set()
            t1.join()
            t2.join()
Пример #11
0
 def exhibits_is_dead_flag(self):
     t = self.klass(queue=None)
     t.start()
     t.join()
     ok_(t.is_dead)
     t = self.klass(queue=Queue())
     t.start()
     t.join()
     ok_(not t.is_dead)
Пример #12
0
 def exhibits_is_dead_flag(self):
     t = EHThread(target=self.worker, args=[None])
     t.start()
     t.join()
     ok_(t.is_dead)
     t = EHThread(target=self.worker, args=[Queue()])
     t.start()
     t.join()
     ok_(not t.is_dead)
Пример #13
0
 def exhibits_is_dead_flag(self):
     t = self.klass(queue=None)
     t.start()
     t.join()
     assert t.is_dead
     t = self.klass(queue=Queue())
     t.start()
     t.join()
     assert not t.is_dead
Пример #14
0
 def manual_threading_works_okay(self):
     # TODO: needs https://github.com/pyinvoke/invoke/issues/438 fixed
     # before it will reliably pass
     skip()
     # Kind of silly but a nice base case for "how would someone thread this
     # stuff; and are there any bizarre gotchas lurking in default
     # config/context/connection state?"
     # Specifically, cut up the local (usually 100k's long) words dict into
     # per-thread chunks, then read those chunks via shell command, as a
     # crummy "make sure each thread isn't polluting things like stored
     # stdout" sanity test
     queue = Queue()
     # TODO: skip test on Windows or find suitable alternative file
     with codecs.open(_words, encoding='utf-8') as fd:
         data = [x.strip() for x in fd.readlines()]
     threads = []
     num_words = len(data)
     chunksize = len(data) / len(self.cxns) # will be an int, which is fine
     for i, cxn in enumerate(self.cxns):
         start = i * chunksize
         end = max([start + chunksize, num_words])
         chunk = data[start:end]
         kwargs = dict(
             queue=queue,
             cxn=cxn,
             start=start,
             num_words=num_words,
             count=len(chunk),
             expected=chunk,
         )
         thread = ExceptionHandlingThread(target=_worker, kwargs=kwargs)
         threads.append(thread)
     for t in threads:
         t.start()
     for t in threads:
         t.join(5) # Kinda slow, but hey, maybe the test runner is hot
     while not queue.empty():
         cxn, result, expected = queue.get(block=False)
         for resultword, expectedword in zip_longest(result, expected):
             err = u"({2!r}, {3!r}->{4!r}) {0!r} != {1!r}".format(
                 resultword, expectedword, cxn, expected[0], expected[-1],
             )
             assert resultword == expectedword, err
Пример #15
0
 def exhibits_is_dead_flag(self):
     # Spin up a thread that will except internally (can't put() on a
     # None object)
     t = self.klass(queue=None)
     t.start()
     t.join()
     # Excepted -> it's dead
     assert t.is_dead
     # Spin up a happy thread that can exit peacefully (it's not "dead",
     # though...maybe we should change that terminology)
     t = self.klass(queue=Queue())
     t.start()
     t.join()
     # Not dead, just uh...sleeping?
     assert not t.is_dead