class Producer: """ Produce for simple queue.queue In order too use producer you need RedisQMixin setup first. Message Queue uses Redis as message storage, so anyway redis must be connected. >>>namespace_producer = Producer("some_namespace") >>>namespace_producer.do_smth_action(a=1, b=2) following message will be constructed and inserted to redis list. ```{"id": 123456789, "some_namespace": "some_namespace", "action": "do_smth_action", "data": {"a": 1, "b": 2}``` """ serializer = json.dumps def __init__(self, conn, namespace, qname=None): """ :param namespace: name of namespace for this handler container :type namespace: ``str`` :param qname: queue to listen :type qname: ``str`` """ self.namespace = namespace self._queue = RedisQueue(conn, qname) def __getattr__(self, action): """Create method with required action, and passes it to client.""" @asyncio.coroutine def add_job(self, **kw): msg = self._build_msg(action, **kw) yield from self._queue.put(msg) return add_job.__get__(self) def _build_msg(self, action, **kw): """Form job metadata as string with JSON data.""" job_id = kw.pop('id', None) or str(uuid.uuid4()) data = { 'id': job_id, 'namespace': self.namespace, 'action': action, 'data': kw } return json.dumps(data)
class Consumer: """ Consumer for simple message queue. """ def __init__(self, conn, concurrency=1, qname=None, loop=None): self._qname = qname or settings.DEFAULT_Q self._queue = RedisQueue(conn, 'test') self.concurrency = concurrency self._loop = loop or asyncio.get_event_loop() self._is_running = True def wait_for_data(self): """Fetch data from queue ant pass it to next user.""" task = asyncio.Task(self._queue.get(), loop=self._loop) return task def _create_generator(self): """ Creates infinite generator. Method pulls data from queue and yields it for further processing. :return: ``iterable``generator to generate jobs """ while self._is_running: task = self.wait_for_data() yield task @asyncio.coroutine def return_msg_back(self, msg): yield from self._queue.put(msg) @asyncio.coroutine def cooperate(self, iter): for future in iter: msg_raw = yield from future logger.debug("Goe message from queue: {}".format(msg_raw)) try: yield from self._dispatch(msg_raw) except (TMQNoNamespaceHandler, TMQNoActionMethod): log_msg = 'No handlers for msg: {}'.format(msg_raw) logger.error(log_msg) except Exception as e: # TODO: should be better way logger.error('Unhandled error in occurred: {},' 'wait {} sec until next job'.format(e, 5)) yield self.return_msg_back(msg_raw) yield from asyncio.sleep(5, loop=self._loop) def _dispatch(self, msg_raw): job = Job(msg_raw) handle_class = MetaRegister.REGISTRY.get(job.namespace, None) if not handle_class: logger.warning("Namespace does not recognized, there is no" "handler class for this namespace ?") raise TMQNoNamespaceHandler() handler = handle_class(job) yield from handler._handle() def work(self, generator=None): """Start listening and processing tasks""" job_gen = generator or self._create_generator() futures = [] for i in range(self.concurrency): f = asyncio.async(self.cooperate(job_gen), loop=self._loop) futures.append(f) return futures