async def qos( self, prefetch_size: int = 0, prefetch_count: int = 0, global_qos: bool = False, ) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_QOS, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) fut: asyncio.Future = asyncio.Future() cb = partial(self._on_basic_qos_ok, fut) self._ch.basic_qos( prefetch_size=prefetch_size, prefetch_count=prefetch_count, global_qos=global_qos, callback=cb, ) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.bind_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def channel( self, on_close: Optional[Callable[[pika.channel.Channel, Exception], Awaitable[None]]], name: Optional[str], ) -> pika.channel.Channel: with wrap2span( name=AmqpSpan.NAME_CHANNEL, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.pika.app, ) as span: if name is not None: span.tag(AmqpSpan.TAG_CHANNEL_NAME, name) async with self._lock: if self._conn is None: raise pika.exceptions.AMQPConnectionError() self._fut = asyncio.Future() self._conn.channel( on_open_callback=partial(self._on_channel_open, on_close)) try: channel: Any = await asyncio.wait_for( self._fut, timeout=self.cfg.channel_open_timeout) if not isinstance(channel, pika.channel.Channel): raise RuntimeError() finally: self._fut = None span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(channel.channel_number)) return channel
async def queue_bind( self, queue: str, exchange: str, routing_key: Optional[str] = None, arguments: Optional[dict] = None, ) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_BIND, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) fut: asyncio.Future = asyncio.Future() cb = partial(self._on_bindok, fut) self._ch.queue_bind( queue=queue, exchange=exchange, routing_key=routing_key, arguments=arguments, callback=cb, ) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.bind_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def connect( self, on_close: Callable[['_Connection', Exception], Awaitable[None]], ) -> None: with wrap2span( name=AmqpSpan.NAME_CONNECT, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.pika.app, ) as span: span.tag(AmqpSpan.TAG_URL, self.pika._masked_url) async with self._lock: self._state = self.STATE_CONNECTING self._on_close = on_close self._fut = asyncio.Future() self._conn = AsyncioConnection( parameters=pika.URLParameters(self.cfg.url), on_open_callback=self._on_connection_open, on_open_error_callback=self._on_connection_open_error, on_close_callback=self._on_connection_closed, ) try: await asyncio.wait_for(self._fut, timeout=self.cfg.connect_timeout) if self._fut.result() != 1: raise RuntimeError() self._state = self.STATE_CONNECTED except Exception: self._state = self.STATE_CLOSED raise finally: self._fut = None
async def cancel(self, consumer_tag: Optional[str] = None) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_CANCEL, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) if consumer_tag is None: consumer_tag = self._consumer_tag if consumer_tag is None: raise UserWarning('consumer_tag is empty') fut: asyncio.Future = asyncio.Future() cb = partial(self._on_cancel_ok, fut) self._ch.basic_cancel(consumer_tag=consumer_tag, callback=cb) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.exchange_declare_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def execute(self, query: str, *args: Any, query_name: Optional[str] = None) -> Optional[int]: with wrap2span( name=OraSpan.NAME_EXECUTE, kind=OraSpan.KIND_CLIENT, cls=OraSpan, app=self._conn._db.app, ) as span: span.set_name4adapter( self._conn._db.app.logger.ADAPTER_PROMETHEUS, OraSpan.P8S_NAME_EXECUTE, ) if query_name is not None: span.tag(OraSpan.TAG_QUERY_NAME, query_name) async with self._lock: if self._conn._db.cfg.log_query: span.annotate(OraSpan.ANN_QUERY, query) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_QUERY, json_encode({'query': str(query)}), ) span.annotate(OraSpan.ANN_PARAMS, str(args)) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_PARAMS, json_encode({'query_params': str(args)}), ) await self._loop.run_in_executor(None, self._ora_cur.execute, query, args) return self._ora_cur.rowcount
async def _connect(self, max_attempts: Optional[int] = None) -> None: with wrap2span( name=AmqpSpan.NAME_PREPARE, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.app, ): attempt: int = 0 while max_attempts is None or attempt < max_attempts: attempt += 1 try: self._conn = _Connection(self, self.cfg) self.app.log_info("Connecting to %s", self._masked_url) await self._conn.connect(self._on_disconnect) self.app.log_info("Connected to %s", self._masked_url) break except Exception as err: self.app.log_err(err) if self._conn is not None: try: await self._conn.close() except Exception: # nosec pass await asyncio.sleep(self.cfg.connect_retry_delay) if max_attempts is not None and attempt >= max_attempts: raise PrepareError("Could not connect to %s" % self._masked_url) await self._open_channels() if self._started: await asyncio.gather(*[ch.start() for ch in self._channels])
async def ack(self, delivery_tag: int, multiple: bool = False) -> None: with wrap2span( name=AmqpSpan.NAME_ACK, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) self._ch.basic_ack(delivery_tag=delivery_tag, multiple=multiple)
async def query_all( self, query: str, *args: Any, timeout: float = None, query_name: Optional[str] = None, model_cls: Optional[Type[BaseModel]] = None, ) -> List[Union[asyncpg.Record, BaseModel]]: with wrap2span( name=PgSpan.NAME_QUERY_ALL, kind=PgSpan.KIND_CLIENT, cls=PgSpan, app=self._db.app, ) as span: span.set_name4adapter( self._db.app.logger.ADAPTER_PROMETHEUS, PgSpan.P8S_NAME_QUERY_ALL, ) if query_name is not None: span.tag(PgSpan.TAG_QUERY_NAME, query_name) async with self._lock: span.annotate(PgSpan.ANN_PID, self.pid) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PID, self._json_encode({'pid': str(self.pid)}), ) if self._db.cfg.log_query: span.annotate(PgSpan.ANN_QUERY, query) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_QUERY, self._json_encode({'query': dedent(query).strip()}), ) args_enc = self._json_encode(args) span.annotate(PgSpan.ANN_PARAMS, args_enc) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PARAMS, self._json_encode({'query_params': args_enc}), ) res = await self._conn.fetch(query, *args, timeout=timeout) if self._db.cfg.log_result: res_dict = [dict(row) for row in res] res_enc = self._json_encode(res_dict) span.annotate(PgSpan.ANN_RESULT, res_enc) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_RESULT, self._json_encode({'result': res_enc}), ) if model_cls is not None: return [model_cls(**(dict(row))) for row in res] else: return res
async def executemany( self, query: str, args: Any, timeout: float = None, query_name: Optional[str] = None, ) -> str: with wrap2span( name=PgSpan.NAME_EXECUTEMANY, kind=PgSpan.KIND_CLIENT, cls=PgSpan, app=self._db.app, ) as span: span.set_name4adapter( self._db.app.logger.ADAPTER_PROMETHEUS, PgSpan.P8S_NAME_EXECUTEMANY, ) if query_name is not None: span.tag(PgSpan.TAG_QUERY_NAME, query_name) async with self._lock: span.annotate(PgSpan.ANN_PID, self.pid) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PID, self._json_encode({'pid': str(self.pid)}), ) if self._db.cfg.log_query: span.annotate(PgSpan.ANN_QUERY, query) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_QUERY, self._json_encode({'query': dedent(query).strip()}), ) args_enc = self._json_encode(args) span.annotate(PgSpan.ANN_PARAMS, args_enc) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PARAMS, self._json_encode({'query_params': args_enc}), ) res = await self._conn.executemany(query, args, timeout=timeout) if self._db.cfg.log_result: span.annotate(PgSpan.ANN_RESULT, str(res)) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_RESULT, self._json_encode({'result': str(res)}), ) return res
async def query_all(self, *args: Any, timeout: float = None) -> List[asyncpg.Record]: with wrap2span( name=PgSpan.NAME_QUERY_ALL_PREPARED, kind=PgSpan.KIND_CLIENT, cls=PgSpan, app=self._conn._db.app, ) as span: span.set_name4adapter( self._conn._db.app.logger.ADAPTER_PROMETHEUS, PgSpan.P8S_NAME_QUERY_ALL_PREPARED, ) span.annotate(PgSpan.ANN_PID, self._conn.pid) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PID, self._json_encode({'pid': str(self._conn.pid)}), ) span.annotate(PgSpan.ANN_STMT_NAME, self.stmt_name) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_STMT_NAME, self._json_encode({'statement_name': self.stmt_name}), ) if self._query_name is not None: span.tag(PgSpan.TAG_QUERY_NAME, self._query_name) async with self._conn._lock: if self._conn._db.cfg.log_query: args_enc = self._json_encode(args) span.annotate(PgSpan.ANN_PARAMS, args_enc) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PARAMS, self._json_encode({'query_params': args_enc}), ) res = await self._pg_stmt.fetch(*args, timeout=timeout) if self._conn._db.cfg.log_result: res_dict = [dict(row) for row in res] res_enc = self._json_encode(res_dict) span.annotate(PgSpan.ANN_RESULT, res_enc) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_RESULT, self._json_encode({'result': res_enc}), ) return res
async def callfunc(self, name: str, return_type: Type, args: list) -> Any: with wrap2span( name=OraSpan.NAME_CALLFUNC, kind=OraSpan.KIND_CLIENT, cls=OraSpan, app=self._conn._db.app, ) as span: span.set_name4adapter( self._conn._db.app.logger.ADAPTER_PROMETHEUS, OraSpan.P8S_NAME_CALLFUNC, ) span.tag(OraSpan.TAG_FUNC_NAME, name) async with self._lock: if self._conn._db.cfg.log_query: span.annotate(OraSpan.ANN_QUERY, '%s:%r' % (name, return_type)) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_QUERY, json_encode({'proc': '%s:%r' % (name, return_type)}), ) span.annotate(OraSpan.ANN_PARAMS, str(args)) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_PARAMS, json_encode({'params': str(args)}), ) # todo timeout res = await self._conn._db.loop.run_in_executor( None, self._ora_cur.callfunc, name, return_type, args) if self._conn._db.cfg.log_result: span.annotate(OraSpan.ANN_RESULT, str(res)) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_RESULT, json_encode({ 'result': str(res), 'args': str(args) }), ) return res
async def _scan(self) -> None: if self.app is None or self._lock is None: # pragma: no cover raise UserWarning while True: if self._stopping: return async with self._lock: delay = 1.0 # default: 1 second try: with wrap2span( name=TaskManagerSpan.NAME_SCAN, kind=Span.KIND_SERVER, # ignore_ctx=True, cls=TaskManagerSpan, app=self.app, ) as span: try: tasks, delay = await self._search_and_exec() if len(tasks) == 0: span.skip() except Exception as err: span.error(err) self.app.log_err(err) finally: if not self._stopping: span.annotate( TaskManagerSpan.ANN_NEXT_SCAN, 'next: %s' % delay, ) finally: if not self._stopping: self._scan_fut = None eta = time() + delay self.stamp_early = eta sleep = self.stamp_early - time() if sleep > 0: try: self._scan_sleep_fut = asyncio.create_task( asyncio.sleep(sleep)) await self._scan_sleep_fut except asyncio.CancelledError: pass self._scan_sleep_fut = None
async def prepare(self) -> None: if self.app is None: # pragma: no cover raise UserWarning('Unattached component') for i in range(self.cfg.connect_max_attempts): try: self.app.log_info("Connecting to %s", self.cfg.dsn) with wrap2span( name=OraSpan.NAME_CONNECT, kind=OraSpan.KIND_CLIENT, app=self.app, ): await self.app.loop.run_in_executor(None, self._connect) self.app.log_info("Connected to %s", self.cfg.dsn) return except Exception as e: self.app.log_err(str(e)) await asyncio.sleep(self.cfg.connect_retry_delay) raise PrepareError("Could not connect to %s" % self.cfg.dsn)
async def prepare( self, query: str, timeout: float = None, query_name: Optional[str] = None, ) -> PreparedStatement: with wrap2span( name=PgSpan.NAME_PREPARE, kind=PgSpan.KIND_CLIENT, cls=PgSpan, app=self._db.app, ) as span: span.set_name4adapter(self._db.app.logger.ADAPTER_PROMETHEUS, PgSpan.P8S_NAME_PREPARE) if query_name is not None: span.tag(PgSpan.TAG_QUERY_NAME, query_name) async with self._lock: span.annotate(PgSpan.ANN_PID, self.pid) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_PID, self._json_encode({'pid': str(self.pid)}), ) if self._db.cfg.log_query: span.annotate(PgSpan.ANN_QUERY, query) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_QUERY, self._json_encode({'query': dedent(query).strip()}), ) pg_stmt = await self._conn.prepare(query, timeout=timeout) stmt_name = pg_stmt._state.name stmt = PreparedStatement(self, pg_stmt, stmt_name, query_name, self._json_encode) span.annotate(PgSpan.ANN_STMT_NAME, stmt_name) span.annotate4adapter( self._db.app.logger.ADAPTER_ZIPKIN, PgSpan.ANN_STMT_NAME, self._json_encode({'statement_name': stmt_name}), ) return stmt
async def exchange_declare( self, exchange: str, exchange_type: str = 'direct', passive: bool = False, durable: bool = False, auto_delete: bool = False, internal: bool = False, arguments: Optional[dict] = None, ) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_DECLARE_EXCHANGE, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: if self.amqp is None: raise UserWarning async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) fut: asyncio.Future = asyncio.Future() cb = partial(self._on_exchange_declareok, fut) self._ch.exchange_declare( exchange=exchange, exchange_type=exchange_type, passive=passive, durable=durable, auto_delete=auto_delete, internal=internal, arguments=arguments, callback=cb, ) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.exchange_declare_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def consume( self, queue: str, on_message_callback: Callable[ [bytes, pika.spec.Basic.Deliver, pika.spec.BasicProperties], Awaitable[None], ], auto_ack: bool = False, exclusive: bool = False, consumer_tag: Optional[str] = None, arguments: Optional[dict] = None, ) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_CONSUME, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) fut: asyncio.Future = asyncio.Future() cb = partial(self._on_basic_consume_ok, fut) self._consumer_tag = self._ch.basic_consume( queue=queue, on_message_callback=partial(self._on_message_callback, on_message_callback), auto_ack=auto_ack, exclusive=exclusive, consumer_tag=consumer_tag, arguments=arguments, callback=cb, ) self.amqp.app.log_info('Consuming %s', queue) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.bind_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def _connect(self) -> None: if self.app is None: # pragma: no cover raise UserWarning('Unattached component') self.app.log_info("Connecting to %s", self._masked_url) with wrap2span(name=PgSpan.NAME_CONNECT, kind=PgSpan.KIND_CLIENT, app=self.app): self._pool = await asyncpg.create_pool( dsn=self.cfg.url, max_size=self.cfg.pool_max_size, min_size=self.cfg.pool_min_size, max_queries=self.cfg.pool_max_queries, max_inactive_connection_lifetime=( self.cfg.pool_max_inactive_connection_lifetime), statement_cache_size=self.cfg.statement_cache_size, init=Postgres._conn_init, ) self.app.log_info("Connected to %s", self._masked_url)
async def fetchone( self, *, model_cls: Optional[Type[BaseModel]] = None, query_name: Optional[str] = None, ) -> Optional[Union[dict, BaseModel]]: with wrap2span( name=OraSpan.NAME_FETCH, kind=OraSpan.KIND_CLIENT, cls=OraSpan, app=self._conn._db.app, ) as span: span.set_name4adapter( self._conn._db.app.logger.ADAPTER_PROMETHEUS, OraSpan.P8S_NAME_FETCH, ) if query_name is not None: span.tag(OraSpan.TAG_QUERY_NAME, query_name) async with self._lock: row = await self._loop.run_in_executor( None, self._ora_cur.fetchone ) res: Optional[Union[dict, BaseModel]] = None if row is not None: column_names = [ d[0].lower() for d in self._ora_cur.description ] res = dict(zip(column_names, row)) if self._conn._db.cfg.log_result: span.annotate(OraSpan.ANN_RESULT, json_encode(res)) span.annotate4adapter( self._conn._db.app.logger.ADAPTER_ZIPKIN, OraSpan.ANN_RESULT, json_encode({'result': str(res)}), ) if model_cls is not None and res is not None: return model_cls(**res) # type: ignore else: return res
async def queue_declare( self, queue: str, passive: bool = False, durable: bool = False, exclusive: bool = False, auto_delete: bool = False, arguments: dict = None, ) -> pika.frame.Method: with wrap2span( name=AmqpSpan.NAME_DECLARE_QUEUE, kind=AmqpSpan.KIND_CLIENT, cls=AmqpSpan, app=self.amqp.app, ) as span: async with self._lock: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) fut: asyncio.Future = asyncio.Future() cb = partial(self._on_queue_declareok, fut) self._ch.queue_declare( queue=queue, passive=passive, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments, callback=cb, ) await asyncio.wait( [fut, self._close_fut], timeout=self.amqp.cfg.queue_declare_timeout, return_when=asyncio.FIRST_COMPLETED, ) if self._close_fut.done(): self._close_fut.result() return fut.result()
async def _scan(self) -> List[int]: if self.app is None or self._lock is None: # pragma: no cover raise UserWarning if self._stopping: return [] async with self._lock: delay = 1.0 # default: 1 second try: with wrap2span( name=TaskManagerSpan.NAME_SCAN, kind=Span.KIND_SERVER, # ignore_ctx=True, cls=TaskManagerSpan, app=self.app, ) as span: try: tasks, delay = await self._search_and_exec() if len(tasks) == 0: span.skip() return [task.id for task in tasks] except Exception as err: span.error(err) self.app.log_err(err) finally: if not self._stopping: span.annotate( TaskManagerSpan.ANN_NEXT_SCAN, 'next: %s' % delay, ) return [] finally: if not self._stopping: self._scan_fut = None eta = self.loop.time() + delay self.stamp_early = eta self.loop.call_at(eta, self._scan_later, eta)
async def _exec(self, parent_trace_id: str, task: Task) -> None: with wrap2span( name=TaskManagerSpan.NAME_EXEC, kind=Span.KIND_SERVER, ignore_ctx=True, cls=TaskManagerSpan, app=self.app, ) as span: if task.trace_id is not None: span.trace_id = task.trace_id span.parent_id = task.trace_span_id if self._db is None or self._executor is None: # pragma: no cover raise UserWarning span.name = '%s::%s' % (TaskManagerSpan.NAME_EXEC, task.name) span.tag(TaskManagerSpan.TAG_PARENT_TRACE_ID, parent_trace_id) span.tag(TaskManagerSpan.TAG_TASK_ID, task.id) span.tag(TaskManagerSpan.TAG_TASK_NAME, task.name) try: err: Optional[Exception] = None err_str: Optional[str] = None err_trace: Optional[str] = None res: Any = None time_begin = time.time() try: res = await self._executor.exec(task.name, kwargs=task.params) except Exception as e: err = e if isinstance(err, Retry): err_str = str(err.err) else: err_str = str(err) err_trace = traceback.format_exc() span.error(err) self.app.log_err(err) time_finish = time.time() await self._db.task_log_add( task.id, task.eta, time_begin, time_finish, res, err_str, err_trace, lock=True, ) if task.retries is None: retries = 0 else: retries = task.retries + 1 if err is not None: if isinstance(err, Retry): if retries >= task.max_retries: await self._db.task_move_arch( task.id, STATUS_ERROR, retries, lock=True, with_trace_id=task.trace_id is not None, ) else: await self._db.task_retry( task.id, retries, task.retry_delay.total_seconds(), lock=True, ) else: await self._db.task_move_arch( task.id, STATUS_ERROR, retries, lock=True, with_trace_id=task.trace_id is not None, ) else: await self._db.task_move_arch( task.id, STATUS_SUCCESSFUL, retries, lock=True, with_trace_id=task.trace_id is not None, ) except Exception as err: span.error(err) self.app.log_err(err) raise
async def schedule( self, func: TaskHandler, params: dict, reference: Optional[str] = None, eta: Optional[ETA] = None, max_retries: Optional[int] = None, retry_delay: Optional[float] = None, propagate_trace: bool = False, ) -> int: with wrap2span( name=TaskManagerSpan.NAME_SCHEDULE, kind=Span.KIND_CLIENT, cls=TaskManagerSpan, app=self.app, ) as span: if self._db is None: # pragma: no cover raise UserWarning if not isinstance(func, str): if not hasattr(func, '__rpc_name__'): # pragma: no cover raise UserWarning('Invalid task handler') func_name = getattr(func, '__rpc_name__') else: func_name = func if max_retries is None: max_retries = getattr(func, '__task_max_retries__', 0) if retry_delay is None: retry_delay = getattr(func, '__task_retry_delay__', 60.0) span.name = '%s::%s' % (TaskManagerSpan.NAME_SCHEDULE, func_name) eta_dt: Optional[datetime] = None if isinstance(eta, int) or isinstance(eta, float): eta_dt = datetime.fromtimestamp(eta, tz=timezone.utc) elif isinstance(eta, datetime): eta_dt = eta elif eta is not None: # pragma: no cover raise UserWarning if eta_dt is not None: span.annotate(TaskManagerSpan.ANN_ETA, 'ETA: %s' % eta_dt.isoformat()) add_params: List[Any] = [ eta_dt, func_name, params, reference, max_retries, retry_delay, ] if propagate_trace: add_params.append(span.trace_id) add_params.append(span.id) task_id, task_delay = await self._db.task_add( *add_params, lock=True, ) span.annotate(TaskManagerSpan.ANN_DELAY, 'Delay: %s' % task_delay) eta_float = self.loop.time() + task_delay self.stamp_early = eta_float self.loop.call_at(eta_float, self._scan_later, eta_float) return task_id
async def request( self, method: str, url: StrOrURL, *, body: Optional[bytes] = None, headers: Dict[str, str] = None, timeout: Optional[ClientTimeout] = None, ssl: Optional[SSLContext] = None, session_kwargs: Optional[Dict[str, Any]] = None, request_kwargs: Optional[Dict[str, Any]] = None, propagate_trace: bool = True, ) -> ClientResponse: span: 'ClientHttpSpan' with wrap2span( # type: ignore kind=HttpSpan.KIND_CLIENT, cls=ClientHttpSpan, app=self.app) as span: if not isinstance(url, URL): url = URL(url) span.tag(HttpSpan.TAG_HTTP_URL, self._mask_url(url)) span.tag(HttpSpan.TAG_HTTP_HOST, url.host) span.tag(HttpSpan.TAG_HTTP_METHOD, method) span.tag(HttpSpan.TAG_HTTP_PATH, url.path) if body is not None: span.tag(HttpSpan.TAG_HTTP_REQUEST_SIZE, len(body)) else: span.tag(HttpSpan.TAG_HTTP_REQUEST_SIZE, 0) if timeout is None: timeout = ClientTimeout() if headers is None: headers = {} if propagate_trace: headers.update(span.to_headers()) if 'User-Agent' not in headers: headers['User-Agent'] = USER_AGENT async with ClientSession( timeout=timeout, **(session_kwargs or {}), ) as session: ts1 = time.time() resp = await session.request( method=method, url=url, data=body, headers=headers, ssl=ssl, **(request_kwargs or {}), ) ts2 = time.time() if self.cfg.log_req_hdrs: self._span_annotate_req_hdrs(span, resp.request_info.headers, ts1) if self.cfg.log_req_body: self._span_annotate_req_body(span, body, ts1) if self.cfg.log_resp_hdrs: self._span_annotate_resp_hdrs(span, resp.headers, ts2) if self.cfg.log_resp_body: resp_body = await resp.read() self._span_annotate_resp_body(span, resp_body, ts2) span.tag(HttpSpan.TAG_HTTP_RESPONSE_SIZE, resp.content_length) span.tag(HttpSpan.TAG_HTTP_STATUS_CODE, str(resp.status)) return resp
async def publish( self, exchange: str, routing_key: str, body: bytes, properties: Optional[pika.spec.BasicProperties] = None, mandatory: bool = False, propagate_trace: bool = True, ) -> None: with wrap2span( name=AmqpSpan.NAME_PUBLISH, kind=AmqpSpan.KIND_CLIENT, cls=AmqpOutSpan, app=self.amqp.app, ) as span: span.tag(AmqpSpan.TAG_CHANNEL_NUMBER, str(self._ch.channel_number)) span.tag(AmqpSpan.TAG_EXCHANGE, str(exchange)) span.tag(AmqpSpan.TAG_ROUTING_KEY, str(routing_key)) span.tag(AmqpSpan.TAG_URL, self._conn.pika._masked_url) with timeout(self.amqp.cfg.publish_timeout): if not self._ch.is_closed or self.name is None: if propagate_trace: hdrs = span.to_headers() if properties is None: properties = pika.spec.BasicProperties( headers=hdrs) elif properties.headers is None: properties.headers = hdrs else: properties.headers = dict_merge( properties.headers, hdrs) if self.amqp.cfg.log_out_props: span.annotate(AmqpSpan.ANN_OUT_PROPS, props2ann(properties)) span.annotate4adapter( self.amqp.app.logger.ADAPTER_ZIPKIN, AmqpSpan.ANN_OUT_PROPS, self._json_encode({ "properties": repr({ k: v for k, v in properties.__dict__.items() if v is not None }) }), ) if self.amqp.cfg.log_out_body: _body = decode_bytes(body) span.annotate(AmqpSpan.ANN_OUT_BODY, _body) span.annotate4adapter( self.amqp.app.logger.ADAPTER_ZIPKIN, AmqpSpan.ANN_OUT_BODY, self._json_encode({"message": _body}), ) self._ch.basic_publish(exchange, routing_key, body, properties, mandatory) else: while True: ch = self.amqp.channel(self.name) if ch is None: await asyncio.sleep(0.1) continue return await ch.publish( exchange, routing_key, body, properties, mandatory, propagate_trace, )