예제 #1
0
    def __init__(self, outputSlot, roiIterator, totalVolume=None, batchSize=2, allowParallelResults=False):
        """
        Constructor.

        :param outputSlot: The slot to request data from.
        :param roiIterator: An iterator providing new rois.
        :param totalVolume: The total volume to be processed.
                            Used to provide the progress reporting signal.
                            If not provided, then no intermediate progress will be signaled.
        :param batchSize: The maximum number of requests to launch in parallel.
        :param allowParallelResults: If False, The resultSignal will not be called in parallel.
                                     In that case, your handler function has no need for locks.
        """
        self._resultSignal = OrderedSignal()
        self._progressSignal = OrderedSignal()

        assert isinstance(
            outputSlot.stype, lazyflow.stype.ArrayLike
        ), "Only Array-like slots supported."  # Because progress reporting depends on the roi shape
        self._outputSlot = outputSlot
        self._roiIter = roiIterator
        self._batchSize = batchSize
        self._allowParallelResults = allowParallelResults

        self._condition = SimpleRequestCondition()

        self._activated_count = 0
        self._completed_count = 0

        self._failure_excinfo = None

        # Progress bookkeeping
        self._totalVolume = totalVolume
        self._processedVolume = 0
예제 #2
0
    def testBasic(self):
        """
        Test the SimpleRequestCondition, which is like threading.Condition, but with a subset of the functionality.
        (See the docs for details.)
        """
        # num_workers = Request.global_thread_pool.num_workers
        # Request.reset_thread_pool(num_workers=1)
        N_ELEMENTS = 100

        # It's tempting to simply use threading.Condition here,
        #  but that doesn't quite work if the thread calling wait() is also a worker thread.
        # (threading.Condition uses threading.Lock() as it's 'waiter' lock, which blocks the entire worker.)
        # cond = threading.Condition( RequestLock() )
        cond = SimpleRequestCondition()

        produced = []
        consumed = []

        def wait_for_all():
            def f(i):
                time.sleep(0.2 * random.random())
                with cond:
                    produced.append(i)
                    cond.notify()

            reqs = []
            for i in range(N_ELEMENTS):
                req = Request(partial(f, i))
                reqs.append(req)

            for req in reqs:
                req.submit()

            _consumed = consumed
            with cond:
                while len(_consumed) < N_ELEMENTS:
                    while len(_consumed) == len(produced):
                        cond.wait()
                    logger.debug("copying {} elements".format(
                        len(produced) - len(consumed)))
                    _consumed += produced[len(_consumed):]

        # Force the request to run in a worker thread.
        # This should catch failures that can occur if the Condition's "waiter" lock isn't a request lock.
        req = Request(wait_for_all)
        req.submit()

        # Now block for completion
        req.wait()

        logger.debug("produced: {}".format(produced))
        logger.debug("consumed: {}".format(consumed))
        assert set(consumed) == set(
            range(N_ELEMENTS)
        ), "Expected set(range(N_ELEMENTS)), got {}".format(consumed)