Esempio n. 1
0
class AsyncPlugin(BasePlugin):
    """ Abstract base class for plugins utilizing asynchronous workers """

    __meta__ = abc.ABCMeta

    def __init__(self, *args, **kwargs):
        super(AsyncPlugin, self).__init__(*args, **kwargs)
        self.workers = cpu_count()
        self.queue = Queue(backlog=self.workers) # work queue

    def launch(self, session):
        def spawn(e):
            if e is not None:
                log.exception("error in '{}': '{}'".format(self.ident, e.error))
            worker = self.queue.dequeue()
            worker.on_success(self.work)
            worker.on_success(lambda _: spawn(None))
            worker.on_failure(spawn)

        self.fill(session)

        for _ in xrange(self.workers):
            spawn(None)

        super(AsyncPlugin, self).launch(session)

    @abc.abstractmethod
    def work(self, item):
        """ Perform further processing on the model item, synchronously

        The mechanism for storage of the results of this processing for
        eventual commit by 'self.update()', is completely up to the individual
        plugin implementation. An easy way to manage this is by storing model
        instances on the queue, whose mere mutation will be reflected in the
        next database commit.

        Note that this method is called in its own thread, and may therefore
        utilize blocking I/O without disrupting any other components.
        """
        pass

    @abc.abstractmethod
    def fill(self, session):
        """ Fill the work queue 'self.queue' using the initial model state """
        pass
Esempio n. 2
0
 def __init__(self, *args, **kwargs):
     super(AsyncPlugin, self).__init__(*args, **kwargs)
     self.workers = cpu_count()
     self.queue = Queue(backlog=self.workers) # work queue