Example #1
0
    async def _rpc_handle(self, message: aio_pika.IncomingMessage) -> None:
        """
        Handles JSON-RPC request.

        :param message: incoming message
        """

        try:
            reply_to = message.reply_to
            response_text = await self._dispatcher.dispatch(message.body,
                                                            context=message)

            if response_text is not None:
                if reply_to is None:
                    logger.warning("property 'reply_to' is missing")
                else:
                    async with self._connection.channel() as channel:
                        await channel.default_exchange.publish(
                            aio_pika.Message(
                                body=response_text.encode(),
                                reply_to=reply_to,
                                correlation_id=message.correlation_id,
                                content_type=pjrpc.common.DEFAULT_CONTENT_TYPE,
                            ),
                            routing_key=reply_to,
                        )

            message.ack()

        except Exception as e:
            logger.exception("jsonrpc request handling error: %s", e)
Example #2
0
    async def handle_message(self, message: IncomingMessage) -> None:
        message.ack()

        correlation_id = message.correlation_id
        future = self._reply_futures.pop(correlation_id, None)

        if future is not None:
            try:
                try:
                    accept = self.accept = prepare_accept_content(
                        self.cluster_proxy.accept)
                    body = loads(message.body,
                                 content_type=message.content_type,
                                 content_encoding=message.content_encoding,
                                 accept=accept)
                except Exception as e:
                    future.set_exception(e)
                else:
                    future.set_result(body)
            except asyncio.InvalidStateError as e:
                # for catching the errors after the future obj was cancelled by the asyncio.wait_for timeout.
                sys.stdout.write("{}, correlation id: {}".format(
                    e, correlation_id))
        else:
            sys.stdout.write(
                "Unknown correlation id: {}".format(correlation_id))
Example #3
0
 async def _process_message(self, message: IncomingMessage,
                            registered_obj: RegisteredCoroOrGen,
                            **callable_kwargs):
     logger.debug(f"Processing message {message.delivery_tag}")
     try:
         if inspect.iscoroutinefunction(registered_obj.coro_or_gen):
             # it returns
             return_val = await registered_obj.coro_or_gen(**callable_kwargs
                                                           )
             if return_val is not None:
                 await self._handle_return_value(message, return_val)
         else:
             # it yields
             async for v in registered_obj.coro_or_gen(**callable_kwargs):
                 if v is not None:
                     await self._handle_return_value(message, v)
         # At this point, we're processed the message and sent along new ones successfully. We can ack the original message
         # TODO consider doing all of this transactionally
         logger.debug(f"Acking message {message.delivery_tag}")
         message.ack()
     except asyncio.CancelledError:
         # We were told to cancel. nack with requeue so someone else will pick up the work
         message.nack(requeue=True)
         logger.info("Cancellation requested.")
         raise
     except Exception as e_:
         await self._handle_message_exception(message, e_)
Example #4
0
async def consumer_func(message: aio_pika.IncomingMessage):
    json_body = json.loads(message.body.decode('utf-8'))
    task = loop.run_in_executor(executor, check, json_body['ip'], json_body['port'])
    res, _ = await asyncio.wait([task])
    if list(res)[0].result():
        message.delivery_mode = 2
        await pika_channel.default_exchange.publish(message, routing_key=os.environ['RABBIT_MOTD_QUEUE'])
    message.ack()
Example #5
0
def on_message(message: IncomingMessage):
    try:
        data_dict = json.loads(message.body.decode())
        # check if the packet={'key':[rg]<String>,'path_param':[_SHA1_attr/d]<String>,'data':[adapter packet]<JSON>} from the queue is empty
        if data_dict is None or not bool(data_dict):
            print('> data_dict is empty. data_dict---- ' + str(data_dict))
            return 'failed'
        else:
            # print('> redis_key: ' + data_dict['key'] + '\nredis_path_param: ' + data_dict['path_param'] + '\nadapter_data_packet: ' + data_dict['data'])
            # this needs to be tested
            exp_val = data_dict['data']['id'] + '_' + data_dict['data'][
                'observationDateTime']
    except json.decoder.JSONDecodeError as json_error:
        print('> JsonDecodeError!!!!' + str(json_error))
        return 'failed'

    try:
        # check if redis already has existing data?
        chck_if_rg_exists = redis_client.jsonget(data_dict['key'],
                                                 Path.rootPath())
        # (nil) - never seen packet -> None, Insert
        # {}, !exists
        # data, upsert
        # redis already has previous data
        if chck_if_rg_exists is not None:
            print('> ' + str(data_dict['key']) + 'exists.')
            print('> Upserting ' + str(data_dict['data']) + ' at .' +
                  data_dict['path_param'])
            print('> Upsertion still in progress...')
            redis_client.jsonset(
                data_dict['key'],
                '.' + data_dict['key'] + '.' + data_dict['path_param'],
                data_dict['data'])
            print('> Upsertion successful!')
            message.ack()
        # First time the ingestor receives a packet belonging to RG
        else:
            print('> RG=' + data_dict['key'] +
                  ' is not present in Redis. Inserting RG with {} at root.')
            # create first entry in the redis server
            # origin = {rg: {SHA : {}}}
            origin = {data_dict['key']: {data_dict['path_param']: {}}}
            redis_client.jsonset(data_dict['key'], Path.rootPath(), origin)
            print('> Insertion still in progress...')
            # insert data now
            # JSON.GET resource-group-key/redis-key .path_param (SHA1...)
            # JSON.GET resource-group .resource-group.SHA1.... {adapter_data}
            redis_client.jsonset(
                data_dict['key'],
                '.' + data_dict['key'] + '.' + data_dict['path_param'],
                data_dict['data'])
            print('> Insertion successful!')
            message.ack()

    except redis.exceptions.ResponseError as r_error:
        print('> Response Error from Redis!!!! ' + str(r_error))
        return 'failed'
 async def _on_message(self, message: aio_pika.IncomingMessage):
     logging.debug(f'Received message {str(message.body)}.')
     if getattr(self, 'custom_consume_function', None):
         if not await self.custom_consume_function(**self.custom_consume_function_kwargs):
             self.logger.error('Failed process message.')
             message.nack()
             return
     self.logger.info('Message has been successfully processed.')
     message.ack()
Example #7
0
 def __call__(self, message: IncomingMessage):
     value = int(message.body.decode())
     if value == 3 and self.rejected < 3:
         print('[blue]Nacking[/blue] 3')
         message.nack()
         self.rejected += 1
     elif value == 5:
         print('[red]Rejecting[/red] 5')
         message.reject()
     else:
         print(f'[green]Processed[/green] {value}')
         message.ack()
Example #8
0
async def on_message(message: IncomingMessage):
    print(message.body)
    body = loads(message.body)
    event = body['event']
    #print('# {} [received]'.format(event))
    print('# {} [received] sleep 1 sec..'.format(event))
    await asyncio.sleep(1)  # Represents async I/O operations
    del body['event']
    await dispatch(event, body)
    # manual ack here
    # not with message.process() context, which auto ack even when error
    message.ack()
Example #9
0
    async def orderValidation(self, msg: IncomingMessage):
        startTime = time.time()
        self.app['EVENT_PROGRESS'].labels('ORDER').inc()
        data = msg.body.decode()
        now = datetime.now()
        now = now.strftime('%Y-%m-%d %H:%M:%S')
        data = data.split('|')
        account = data[0]
        action = data[1]
        order = data[2].split('.')
        order = {
            'orderId': order[3],
            'timestamp': now,
            'account': account,
            'action': action,
            'stock': order[0],
            'price': int(order[1]),
            'vol': int(order[2]),
            'cumVol': 0,
            'status': '0',
            'selected': False
        }

        params = {
            'account':
            account if action == 'BUY' else f"{account}.{order['stock']}",
            'amount':
            order['price'] * order['vol'] if action == 'BUY' else order['vol']
        }

        # Check account availability to account services API
        async with self.webservice.request('POST',
                                           'http://localhost:8001/order',
                                           json=params) as resp:
            valid = await resp.text()
            if valid == 'OK':
                self.app['EVENT_COUNTER'].labels('ORDER', 'SUCCESS').inc()
                await self.orderCollection.insert_one(order)
                order['eventLatency'] = self.app['EVENT_LATENCY']
                order['eventProgress'] = self.app['EVENT_PROGRESS']
                order['orderCounter'] = self.app['ORDER_COUNTER']
                order['eventTime'] = startTime
                self.messages.on_next(
                    (msg, order))  # Send valid order to order observable
            else:
                self.app['EVENT_COUNTER'].labels('ORDER', 'FAIL').inc()
                self.app['EVENT_PROGRESS'].labels('ORDER').dec()
                order['status'] = 'R'
                await self.orderCollection.insert_one(order)
                msg.ack()
                latency = time.time() - startTime
                self.app['EVENT_LATENCY'].labels('ORDER',
                                                 'FAIL').observe(latency)
Example #10
0
async def on_message(message: IncomingMessage):
    print('> *****Inside key_generator*****')
    global data_redis_q, rg_dict

    data_redis_q = {}

    # data_dict is actual adapter packet
    data_dict = json.loads(message.body.decode())
    # default value is for all aqm/flood type sensors
    default = '_d'

    # extract resource-group from the data packet
    res_id = data_dict['id']
    rg = res_id.split('/')[3]

    # generate the SHA1 of the id
    sha_id = hashlib.sha1(res_id.encode())

    # print("> RG from data---- " + rg)
    # print("> rg_dict---- " + str(rg_dict))
    # print("> SHA1[res_id]---- " + sha_id.hexdigest())

    # Check if _rg is present in rg_dict{}
    if rg in rg_dict.keys():
        print('> RG is present.')

        # encode SHA1 of resource-id
        attribute = rg_dict[rg]
        path_param = '_' + sha_id.hexdigest() + '_' + data_dict[attribute]
    else:
        print('> RG is not present.')
        path_param = '_' + sha_id.hexdigest() + '_' + default

    # generate a dict = { 'key' : <resource-group-name>, 'path_param': <_SHA1(id)_attr/d>, 'data': adapter packet }
    data_redis_q['key'] = rg.replace('-', '_')
    data_redis_q['path_param'] = path_param
    data_redis_q['data'] = data_dict

    # print('> (on_message) routing_key---- '+routing_key)

    # publish the data into redis-ingestion-queue
    print('> bool(data_redis_q):---- ' + str(bool(data_redis_q)))
    if data_redis_q is not None and bool(data_redis_q):
        await latest_exchange.publish(message=Message(
            (json.dumps(data_redis_q)).encode()),
                                      routing_key=routing_key)
        print('> Message published.')
        # message.ack()
    else:
        # do nothing
        pass
    message.ack()
Example #11
0
 async def handle_msg(msg: IncomingMessage):
     print(f"dcmq: got message with routing key {msg.routing_key}")
     ds = datasetFromBinary(msg.body)
     uri = ""
     if "uri" in msg.headers:
         uri = msg.headers["uri"]
     if ds != None:
         try:
             await dcmhandler(channel, ds, uri, msg.routing_key, *additional_args)
             msg.ack()
         except Exception as e:
             msg.reject(requeue=True)
             raise(e)
Example #12
0
File: client.py Project: sdss/clu
    def __init__(
        self,
        message: apika.IncomingMessage,
        log: Optional[logging.Logger] = None,
    ):

        self.command_id: str | None = None
        self.sender: str | None = None
        self.body = {}

        self.message = message
        self._log = log

        self.is_valid = True

        # Acknowledges receipt of message
        message.ack()

        self.info: Dict[Any, Any] = message.info()

        self.headers = self.info["headers"]
        for key in self.headers:
            if isinstance(self.headers[key], bytes):
                self.headers[key] = self.headers[key].decode()

        self.message_code = self.headers.get("message_code", None)

        if self.message_code is None:
            self.is_valid = False
            if self._log:
                self._log.warning(
                    f"received message without message_code: {message}")
            return

        self.sender = self.headers.get("sender", None)
        if self.sender is None and self._log:
            self._log.warning(f"received message without sender: {message}")

        self.command_id = message.correlation_id

        command_id_header = self.headers.get("command_id", None)
        if command_id_header and command_id_header != self.command_id:
            if self._log:
                self._log.error(f"mismatch between message "
                                f"correlation_id={self.command_id} "
                                f"and header command_id={command_id_header} "
                                f"in message {message}")
            self.is_valid = False
            return

        self.body = json.loads(self.message.body.decode())
Example #13
0
 async def on_rabbitmq_message(self, message: aio_pika.IncomingMessage):
     self.concurrent_requests += 1
     if self.concurrent_requests >= self.max_concurrent_requests:
         self.logger.warning('TOO MANY CONCURRENT REQUESTS... requeuing')
         message.reject(requeue=True)
         self.concurrent_requests -= 1
         return
     if not self.status.is_active():
         message.reject(requeue=True)
         self.concurrent_requests -= 1
         return
     callback_url = self.get_callback_for_message(message)
     if not callback_url:
         self.logger.error('WTF? unknown routing key: {}'.format(
             message.routing_key))
         message.reject(requeue=True)
         self.concurrent_requests -= 1
         return
     try:
         msg = json.dumps({
             'headers':
             message.headers,
             'content_encoding':
             message.content_encoding,
             'message_id':
             message.message_id,
             'type':
             message.type,
             'routing_key':
             message.routing_key,
             'body':
             message.body.decode(message.content_encoding or 'utf8')
         })
         await self.kiss_api.send_msg(msg, callback_url)
         message.ack()
     except KissApiException as e:
         message.reject(requeue=True)
         raise e
     except KissOfflineException as e:
         message.reject(requeue=True)
         self.root_service.loop.create_task(
             self.take_a_break(
                 60, 'Kiss is Offline. Stopping {} for 1 min. Details: {}'.
                 format(self, e)))
     finally:
         self.concurrent_requests -= 1
Example #14
0
 async def callback(self, message: IncomingMessage):
     """
     Process all messages from 'core' queue by self.API object
     :param channel:
     :param body:
     :param envelope:
     :param properties:
     :return:
     """
     with message.process():
         try:
             logging.debug(" [x] Received %r" % message.body)
             await self.api.process(message.body.decode("utf-8"))
             message.ack()
         except Exception as e:
             logging.error("Broker callback error")
             logging.error(e)
Example #15
0
async def received_job(message: IncomingMessage) -> None:
    """ AMQP job hook """

    log.info("Received new job", extra=message.info())
    try:
        job = _validate_message(message)
    except ValidationError as e:
        log.error(f"{e}. Rejecting without requeuing", extra=message.info())
        message.reject(requeue=False)
        return

    result = await _process_job(job)
    message.ack()

    log.info("Job completed",
             extra={
                 "job_id": result.job.id,
                 "url": result.job.url,
                 "status": result.status,
                 "size": result.size
             })
Example #16
0
File: aiopika.py Project: dyus/sbus
    async def on_message(self, subscriber: AbstractSubscriber, message: IncomingMessage):
        try:
            context_data = {
                Headers.message_id: message.message_id or str(uuid.uuid4()),
                Headers.routing_key: message.routing_key,
            }
            context_data.update(**message.headers or {})
            context = Context(**context_data)

            if message.type == 'call':
                result = await self.rpc.on_call_message(message.routing_key, message, context,
                                                        subscriber, self.serializer)
            else:
                logger.info('sbus <~~~ %s: %s', message.routing_key, message.body[:256])
                result = await subscriber.on_message(message.body, message.routing_key, context,
                                                     serializer=self.serializer)

            message.ack()
            return result

        except Exception as error:
            await self._handle_exception(message, error)
Example #17
0
    async def process_incoming_message(self,
                                       message: aio_pika.IncomingMessage):
        request_payload = message.body
        request = None
        if self.request_class:
            request = self.request_class()
            request.ParseFromString(request_payload)
        else:
            request = eval(request_payload.decode())

        result = await self.handler(request)

        if self.response_class:
            result = self.generate_response(result)
        else:
            result = str(result).encode()

        await self.publisher.default_exchange.publish(
            aio_pika.Message(body=result,
                             correlation_id=message.correlation_id),
            routing_key=message.reply_to,
        )
        message.ack()
Example #18
0
    async def _handle_msg(
        self,
        action: BindConsumer,
        pika_msg: PikaIncomingMessage,
        msg: AmqpMsg,
    ):
        self.log.info('received msg {} in queue {} '
                      'from exchange {} topic {}'.format(
                          pika_msg.delivery_tag,
                          action.queue,
                          msg.exchange,
                          msg.topic,
                      ))
        self.log.debug('msg received: {}'.format(str(msg)))

        result = True
        try:
            result = await action.callback(msg)
            self.log.debug('msg processed')
        except Exception as e:
            self.log.error('an error occurred when processing message')
            result = False
            if self._consumer_error_handlers:
                for handler in self._consumer_error_handlers:
                    handler(e)
            else:
                traceback.print_exc()

        if not action.auto_ack and result:
            self.log.debug('sending ack for message {}'.format(
                pika_msg.delivery_tag))
            pika_msg.ack()
        elif not action.auto_ack:
            self.log.debug('sending nack for message {}'.format(
                pika_msg.delivery_tag))
            pika_msg.nack(requeue=action.nack_requeue)
 async def message_consume(self, exchange: aio_pika.Exchange,
                           message: aio_pika.IncomingMessage):
     await self.work(message)
     message.ack()
 def on_response(self, message: IncomingMessage):
     message.ack()
     if message.correlation_id in self.futures:
         future = self.futures.pop(message.correlation_id)
         future.set_result(message.body)
 async def on_message(self, message: IncomingMessage):
     message.ack()
     routing_key = message.routing_key
     body = message.body.decode()
     await self.listener(routing_key, json.loads(body))
Example #22
0
 async def consumer(message: IncomingMessage):
     entry = json.loads(message.body.decode('utf-8'))
     await db.save(entry)
     message.ack()
Example #23
0
async def on_message(message: IncomingMessage):
    print(" [x] Received message")
    event = json.loads(message.body.decode('utf-8'))
    print("Message body is:", message.body)
    await bot.check_message(event)
    message.ack()
Example #24
0
async def on_message(message: aio_pika.IncomingMessage):
    # TODO process messages
    print(message.body)
    message.ack()
    return message