Ejemplo n.º 1
0
    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set of celery AsyncResults and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc
        backend = current_app().backend
        amqp_backend = backend.__class__.__name__.startswith("AMQP")
        rset = ResultSet(self.results)
        for task_id, result_dict in rset.iter_native():
            check_mem_usage()  # warn if too much memory is used
            result = result_dict["result"]
            if isinstance(result, BaseException):
                raise result
            self.received.append(len(result))
            acc = agg(acc, result.unpickle())
            if amqp_backend:
                # work around a celery bug
                del backend._cache[task_id]
        return acc
Ejemplo n.º 2
0
        def aggregate_result_set(self, agg, acc):
            """
            Loop on a set of celery AsyncResults and update the accumulator
            by using the aggregation function.

            :param agg: the aggregation function, (acc, val) -> new acc
            :param acc: the initial value of the accumulator
            :returns: the final value of the accumulator
            """
            if isinstance(self.oqtask, types.FunctionType):
                # don't use celery
                return super(OqTaskManager, self).aggregate_result_set(
                    agg, acc)
            if not self.results:
                return acc
            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                parallel.check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc
Ejemplo n.º 3
0
        def aggregate_result_set(self, agg, acc):
            """
            Loop on a set of celery AsyncResults and update the accumulator
            by using the aggregation function.

            :param agg: the aggregation function, (acc, val) -> new acc
            :param acc: the initial value of the accumulator
            :returns: the final value of the accumulator
            """
            if isinstance(self.oqtask, types.FunctionType):
                # don't use celery
                return super(OqTaskManager,
                             self).aggregate_result_set(agg, acc)
            if not self.results:
                return acc
            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                parallel.check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc
Ejemplo n.º 4
0
    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set results and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc

        distribute = oq_distribute()  # not called for distribute == 'no'

        if distribute == 'celery':

            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc

        elif distribute == 'futures':

            for future in as_completed(self.results):
                check_mem_usage()
                # log a warning if too much memory is used
                result = future.result()
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
            return acc
Ejemplo n.º 5
0
    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set results and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc

        distribute = oq_distribute()  # not called for distribute == 'no'

        if distribute == 'celery':

            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc

        elif distribute == 'futures':

            for future in as_completed(self.results):
                check_mem_usage()
                # log a warning if too much memory is used
                result = future.result()
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
            return acc
Ejemplo n.º 6
0
    def _iterfutures(self):
        # compatibility wrapper for different concurrency frameworks

        if self.distribute == 'no':
            for result in self.results:
                yield mkfuture(result)

        elif self.distribute == 'celery':
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                fut = mkfuture(result_dict['result'])
                # work around a celery bug
                del app.backend._cache[task_id]
                yield fut

        else:  # future interface
            for fut in as_completed(self.results):
                yield fut
Ejemplo n.º 7
0
    def _iterfutures(self):
        # compatibility wrapper for different concurrency frameworks

        if self.distribute == 'no':
            for result in self.results:
                yield mkfuture(result)

        elif self.distribute == 'celery':
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                fut = mkfuture(result_dict['result'])
                # work around a celery bug
                del app.backend._cache[task_id]
                yield fut

        else:  # future interface
            for fut in as_completed(self.results):
                yield fut
Ejemplo n.º 8
0
    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set of celery AsyncResults and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc
        backend = current_app().backend
        rset = ResultSet(self.results)
        for task_id, result_dict in rset.iter_native():
            check_mem_usage()  # log a warning if too much memory is used
            result = result_dict['result']
            if isinstance(result, BaseException):
                raise result
            acc = agg(acc, result.unpickle())
            del backend._cache[task_id]  # work around a celery bug
        return acc
Ejemplo n.º 9
0
    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set of celery AsyncResults and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc
        backend = current_app().backend
        rset = ResultSet(self.results)
        for task_id, result_dict in rset.iter_native():
            check_mem_usage()  # warn if too much memory is used
            result = result_dict['result']
            if isinstance(result, BaseException):
                raise result
            self.received += len(result)
            acc = agg(acc, result.unpickle())
            del backend._cache[task_id]  # work around a celery bug
        return acc