async def handle_queue_loop() -> None: try: queue = self._get_drainining_queue(room_id) for item in queue: try: with opentracing.start_active_span_follows_from( "persist_event_batch", item.parent_opentracing_span_contexts, inherit_force_tracing=True, ) as scope: if scope: item.opentracing_span_context = scope.span.context ret = await self._per_item_callback( item.events_and_contexts, item.backfilled) except Exception: with PreserveLoggingContext(): item.deferred.errback() else: with PreserveLoggingContext(): item.deferred.callback(ret) finally: remaining_queue = self._event_persist_queues.pop(room_id, None) if remaining_queue: self._event_persist_queues[room_id] = remaining_queue self._currently_persisting_rooms.discard(room_id)
async def add_to_queue( self, room_id: str, events_and_contexts: Iterable[Tuple[EventBase, EventContext]], backfilled: bool, ) -> _PersistResult: """Add events to the queue, with the given persist_event options. If we are not already processing events in this room, starts off a background process to to so, calling the per_item_callback for each item. Args: room_id (str): events_and_contexts (list[(EventBase, EventContext)]): backfilled (bool): Returns: the result returned by the `_per_item_callback` passed to `__init__`. """ queue = self._event_persist_queues.setdefault(room_id, deque()) # if the last item in the queue has the same `backfilled` setting, # we can just add these new events to that item. if queue and queue[-1].backfilled == backfilled: end_item = queue[-1] else: # need to make a new queue item deferred: ObservableDeferred[_PersistResult] = ObservableDeferred( defer.Deferred(), consumeErrors=True) end_item = _EventPersistQueueItem( events_and_contexts=[], backfilled=backfilled, deferred=deferred, ) queue.append(end_item) # add our events to the queue item end_item.events_and_contexts.extend(events_and_contexts) # also add our active opentracing span to the item so that we get a link back span = opentracing.active_span() if span: end_item.parent_opentracing_span_contexts.append(span.context) # start a processor for the queue, if there isn't one already self._handle_queue(room_id) # wait for the queue item to complete res = await make_deferred_yieldable(end_item.deferred.observe()) # add another opentracing span which links to the persist trace. with opentracing.start_active_span_follows_from( "persist_event_batch_complete", (end_item.opentracing_span_context, )): pass return res
def test_nested_spans(self) -> None: """Starting two spans off inside each other should work""" with LoggingContext("root context"): with start_active_span("root span", tracer=self._tracer) as root_scope: self.assertEqual(self._tracer.active_span, root_scope.span) scope1 = start_active_span( "child1", tracer=self._tracer, ) self.assertEqual( self._tracer.active_span, scope1.span, "child1 was not activated" ) self.assertEqual( scope1.span.context.parent_id, root_scope.span.context.span_id ) scope2 = start_active_span_follows_from( "child2", contexts=(scope1,), tracer=self._tracer, ) self.assertEqual(self._tracer.active_span, scope2.span) self.assertEqual( scope2.span.context.parent_id, scope1.span.context.span_id ) with scope1, scope2: pass # the root scope should be restored self.assertEqual(self._tracer.active_span, root_scope.span) self.assertIsNotNone(scope2.span.end_time) self.assertIsNotNone(scope1.span.end_time) self.assertIsNone(self._tracer.active_span) # the spans should be reported in order of their finishing. self.assertEqual( self._reporter.get_spans(), [scope2.span, scope1.span, root_scope.span] )
async def new_func(request: SynapseRequest, *args: Any, **kwargs: str) -> Optional[Tuple[int, Any]]: """A callback which can be passed to HttpServer.RegisterPaths Args: request: *args: unused? **kwargs: the dict mapping keys to path components as specified in the path match regexp. Returns: (response code, response object) as returned by the callback method. None if the request has already been handled. """ content = None if request.method in [b"PUT", b"POST"]: # TODO: Handle other method types? other content types? content = parse_json_object_from_request(request) try: with start_active_span("authenticate_request"): origin: Optional[ str] = await authenticator.authenticate_request( request, content) except NoAuthenticationError: origin = None if self.REQUIRE_AUTH: logger.warning( "authenticate_request failed: missing authentication") raise except Exception as e: logger.warning("authenticate_request failed: %s", e) raise # update the active opentracing span with the authenticated entity set_tag("authenticated_entity", origin) # if the origin is authenticated and whitelisted, use its span context # as the parent. context = None if origin and whitelisted_homeserver(origin): context = span_context_from_request(request) if context: servlet_span = active_span() # a scope which uses the origin's context as a parent processing_start_time = time.time() scope = start_active_span_follows_from( "incoming-federation-request", child_of=context, contexts=(servlet_span, ), start_time=processing_start_time, ) else: # just use our context as a parent scope = start_active_span("incoming-federation-request", ) try: with scope: if origin and self.RATELIMIT: with ratelimiter.ratelimit(origin) as d: await d if request._disconnected: logger.warning( "client disconnected before we started processing " "request") return None response = await func(origin, content, request.args, *args, **kwargs) else: response = await func(origin, content, request.args, *args, **kwargs) finally: # if we used the origin's context as the parent, add a new span using # the servlet span as a parent, so that we have a link if context: scope2 = start_active_span_follows_from( "process-federation_request", contexts=(scope.span, ), start_time=processing_start_time, ) scope2.close() return response
def send_new_transaction(self, destination, pending_pdus, pending_edus): # Make a transaction-sending opentracing span. This span follows on from # all the edus in that transaction. This needs to be done since there is # no active span here, so if the edus were not received by the remote the # span would have no causality and it would be forgotten. # The span_contexts is a generator so that it won't be evaluated if # opentracing is disabled. (Yay speed!) span_contexts = [] keep_destination = whitelisted_homeserver(destination) for edu in pending_edus: context = edu.get_context() if context: span_contexts.append(extract_text_map(json.loads(context))) if keep_destination: edu.strip_context() with start_active_span_follows_from("send_transaction", span_contexts): # Sort based on the order field pending_pdus.sort(key=lambda t: t[1]) pdus = [x[0] for x in pending_pdus] edus = pending_edus success = True logger.debug("TX [%s] _attempt_new_transaction", destination) txn_id = str(self._next_txn_id) logger.debug( "TX [%s] {%s} Attempting new transaction (pdus: %d, edus: %d)", destination, txn_id, len(pdus), len(edus), ) transaction = Transaction.create_new( origin_server_ts=int(self.clock.time_msec()), transaction_id=txn_id, origin=self._server_name, destination=destination, pdus=pdus, edus=edus, ) self._next_txn_id += 1 logger.info( "TX [%s] {%s} Sending transaction [%s], (PDUs: %d, EDUs: %d)", destination, txn_id, transaction.transaction_id, len(pdus), len(edus), ) # Actually send the transaction # FIXME (erikj): This is a bit of a hack to make the Pdu age # keys work def json_data_cb(): data = transaction.get_dict() now = int(self.clock.time_msec()) if "pdus" in data: for p in data["pdus"]: if "age_ts" in p: unsigned = p.setdefault("unsigned", {}) unsigned["age"] = now - int(p["age_ts"]) del p["age_ts"] return data try: response = yield self._transport_layer.send_transaction( transaction, json_data_cb) code = 200 except HttpResponseException as e: code = e.code response = e.response if e.code in (401, 404, 429) or 500 <= e.code: logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) raise e logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) if code == 200: for e_id, r in response.get("pdus", {}).items(): if "error" in r: logger.warning( "TX [%s] {%s} Remote returned error for %s: %s", destination, txn_id, e_id, r, ) else: for p in pdus: logger.warning( "TX [%s] {%s} Failed to send event %s", destination, txn_id, p.event_id, ) success = False set_tag(tags.ERROR, not success) return success
async def send_new_transaction( self, destination: str, pdus: List[EventBase], edus: List[Edu], ) -> None: """ Args: destination: The destination to send to (e.g. 'example.org') pdus: In-order list of PDUs to send edus: List of EDUs to send """ # Make a transaction-sending opentracing span. This span follows on from # all the edus in that transaction. This needs to be done since there is # no active span here, so if the edus were not received by the remote the # span would have no causality and it would be forgotten. span_contexts = [] keep_destination = whitelisted_homeserver(destination) for edu in edus: context = edu.get_context() if context: span_contexts.append( extract_text_map(json_decoder.decode(context))) if keep_destination: edu.strip_context() with start_active_span_follows_from("send_transaction", span_contexts): logger.debug("TX [%s] _attempt_new_transaction", destination) txn_id = str(self._next_txn_id) logger.debug( "TX [%s] {%s} Attempting new transaction (pdus: %d, edus: %d)", destination, txn_id, len(pdus), len(edus), ) transaction = Transaction.create_new( origin_server_ts=int(self.clock.time_msec()), transaction_id=txn_id, origin=self._server_name, destination=destination, pdus=pdus, edus=edus, ) self._next_txn_id += 1 logger.info( "TX [%s] {%s} Sending transaction [%s], (PDUs: %d, EDUs: %d)", destination, txn_id, transaction.transaction_id, len(pdus), len(edus), ) # Actually send the transaction # FIXME (erikj): This is a bit of a hack to make the Pdu age # keys work # FIXME (richardv): I also believe it no longer works. We (now?) store # "age_ts" in "unsigned" rather than at the top level. See # https://github.com/matrix-org/synapse/issues/8429. def json_data_cb(): data = transaction.get_dict() now = int(self.clock.time_msec()) if "pdus" in data: for p in data["pdus"]: if "age_ts" in p: unsigned = p.setdefault("unsigned", {}) unsigned["age"] = now - int(p["age_ts"]) del p["age_ts"] return data try: response = await self._transport_layer.send_transaction( transaction, json_data_cb) except HttpResponseException as e: code = e.code response = e.response set_tag(tags.ERROR, True) logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) raise logger.info("TX [%s] {%s} got 200 response", destination, txn_id) for e_id, r in response.get("pdus", {}).items(): if "error" in r: logger.warning( "TX [%s] {%s} Remote returned error for %s: %s", destination, txn_id, e_id, r, ) if pdus and destination in self._federation_metrics_domains: last_pdu = pdus[-1] last_pdu_ts_metric.labels(server_name=destination).set( last_pdu.origin_server_ts / 1000)
async def send_new_transaction( self, destination: str, pdus: List[EventBase], edus: List[Edu], ) -> bool: """ Args: destination: The destination to send to (e.g. 'example.org') pdus: In-order list of PDUs to send edus: List of EDUs to send Returns: True iff the transaction was successful """ # Make a transaction-sending opentracing span. This span follows on from # all the edus in that transaction. This needs to be done since there is # no active span here, so if the edus were not received by the remote the # span would have no causality and it would be forgotten. span_contexts = [] keep_destination = whitelisted_homeserver(destination) for edu in edus: context = edu.get_context() if context: span_contexts.append( extract_text_map(json_decoder.decode(context))) if keep_destination: edu.strip_context() with start_active_span_follows_from("send_transaction", span_contexts): success = True logger.debug("TX [%s] _attempt_new_transaction", destination) txn_id = str(self._next_txn_id) logger.debug( "TX [%s] {%s} Attempting new transaction (pdus: %d, edus: %d)", destination, txn_id, len(pdus), len(edus), ) transaction = Transaction.create_new( origin_server_ts=int(self.clock.time_msec()), transaction_id=txn_id, origin=self._server_name, destination=destination, pdus=pdus, edus=edus, ) self._next_txn_id += 1 logger.info( "TX [%s] {%s} Sending transaction [%s], (PDUs: %d, EDUs: %d)", destination, txn_id, transaction.transaction_id, len(pdus), len(edus), ) # Actually send the transaction # FIXME (erikj): This is a bit of a hack to make the Pdu age # keys work def json_data_cb(): data = transaction.get_dict() now = int(self.clock.time_msec()) if "pdus" in data: for p in data["pdus"]: if "age_ts" in p: unsigned = p.setdefault("unsigned", {}) unsigned["age"] = now - int(p["age_ts"]) del p["age_ts"] return data try: response = await self._transport_layer.send_transaction( transaction, json_data_cb) code = 200 except HttpResponseException as e: code = e.code response = e.response if e.code in (401, 404, 429) or 500 <= e.code: logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) raise e logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) if code == 200: for e_id, r in response.get("pdus", {}).items(): if "error" in r: logger.warning( "TX [%s] {%s} Remote returned error for %s: %s", destination, txn_id, e_id, r, ) else: for p in pdus: logger.warning( "TX [%s] {%s} Failed to send event %s", destination, txn_id, p.event_id, ) success = False set_tag(tags.ERROR, not success) return success
async def wrap( self, key: KV, callback: Callable[..., Awaitable[RV]], *args: Any, cache_context: bool = False, **kwargs: Any, ) -> RV: """Wrap together a *get* and *set* call, taking care of logcontexts First looks up the key in the cache, and if it is present makes it follow the synapse logcontext rules and returns it. Otherwise, makes a call to *callback(*args, **kwargs)*, which should follow the synapse logcontext rules, and adds the result to the cache. Example usage: async def handle_request(request): # etc return result result = await response_cache.wrap( key, handle_request, request, ) Args: key: key to get/set in the cache callback: function to call if the key is not found in the cache *args: positional parameters to pass to the callback, if it is used cache_context: if set, the callback will be given a `cache_context` kw arg, which will be a ResponseCacheContext object. **kwargs: named parameters to pass to the callback, if it is used Returns: The result of the callback (from the cache, or otherwise) """ entry = self._get(key) if not entry: logger.debug( "[%s]: no cached result for [%s], calculating new one", self._name, key ) context = ResponseCacheContext(cache_key=key) if cache_context: kwargs["cache_context"] = context span_context: Optional[opentracing.SpanContext] = None async def cb() -> RV: # NB it is important that we do not `await` before setting span_context! nonlocal span_context with start_active_span(f"ResponseCache[{self._name}].calculate"): span = active_span() if span: span_context = span.context return await callback(*args, **kwargs) d = run_in_background(cb) entry = self._set(context, d, span_context) return await make_deferred_yieldable(entry.result.observe()) result = entry.result.observe() if result.called: logger.info("[%s]: using completed cached result for [%s]", self._name, key) else: logger.info( "[%s]: using incomplete cached result for [%s]", self._name, key ) span_context = entry.opentracing_span_context with start_active_span_follows_from( f"ResponseCache[{self._name}].wait", contexts=(span_context,) if span_context else (), ): return await make_deferred_yieldable(result)