def _check_sigs_and_hash(self, pdu): """Throws a SynapseError if the PDU does not have the correct signatures. Returns: FrozenEvent: Either the given event or it redacted if it failed the content hash check. """ # Check signatures are correct. redacted_event = prune_event(pdu) redacted_pdu_json = redacted_event.get_pdu_json() try: yield self.keyring.verify_json_for_server( pdu.origin, redacted_pdu_json ) except SynapseError: logger.warn( "Signature check failed for %s redacted to %s", encode_canonical_json(pdu.get_pdu_json()), encode_canonical_json(redacted_pdu_json), ) raise if not check_event_content_hash(pdu): logger.warn( "Event content has been tampered, redacting %s, %s", pdu.event_id, encode_canonical_json(pdu.get_dict()) ) defer.returnValue(redacted_event) defer.returnValue(pdu)
def _check_sigs_and_hash(self, pdu): """Throws a SynapseError if the PDU does not have the correct signatures. Returns: FrozenEvent: Either the given event or it redacted if it failed the content hash check. """ # Check signatures are correct. redacted_event = prune_event(pdu) redacted_pdu_json = redacted_event.get_pdu_json() try: yield self.keyring.verify_json_for_server(pdu.origin, redacted_pdu_json) except SynapseError: logger.warn( "Signature check failed for %s redacted to %s", encode_canonical_json(pdu.get_pdu_json()), encode_canonical_json(redacted_pdu_json), ) raise if not check_event_content_hash(pdu): logger.warn("Event content has been tampered, redacting %s, %s", pdu.event_id, encode_canonical_json(pdu.get_dict())) defer.returnValue(redacted_event) defer.returnValue(pdu)
def compute_event_signature(event, signature_name, signing_key): tmp_event = prune_event(event) redact_json = tmp_event.get_pdu_json() redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) logger.debug("Signing event: %s", encode_canonical_json(redact_json)) redact_json = sign_json(redact_json, signature_name, signing_key) logger.debug("Signed event: %s", encode_canonical_json(redact_json)) return redact_json["signatures"]
def set_received_txn_response(self, transaction_id, origin, code, response_dict): """Persist the response we returened for an incoming transaction, and should return for subsequent transactions with the same transaction_id and origin. Args: txn transaction_id (str) origin (str) code (int) response_json (str) """ return self._simple_insert( table=ReceivedTransactionsTable.table_name, values={ "transaction_id": transaction_id, "origin": origin, "response_code": code, "response_json": buffer(encode_canonical_json(response_dict)), }, or_ignore=True, desc="set_received_txn_response", )
def add_pusher(self, user_name, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, lang, data): try: next_id = yield self._pushers_id_gen.get_next() yield self._simple_upsert( PushersTable.table_name, dict( app_id=app_id, pushkey=pushkey, user_name=user_name, ), dict( access_token=access_token, kind=kind, profile_tag=profile_tag, app_display_name=app_display_name, device_display_name=device_display_name, ts=pushkey_ts, lang=lang, data=encode_canonical_json(data), ), insertion_values=dict(id=next_id, ), desc="add_pusher", ) except Exception as e: logger.error("create_pusher with failed: %s", e) raise StoreError(500, "Problem creating pusher.")
def add_pusher(self, user_name, access_token, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, lang, data): try: next_id = yield self._pushers_id_gen.get_next() yield self._simple_upsert( PushersTable.table_name, dict( app_id=app_id, pushkey=pushkey, user_name=user_name, ), dict( access_token=access_token, kind=kind, profile_tag=profile_tag, app_display_name=app_display_name, device_display_name=device_display_name, ts=pushkey_ts, lang=lang, data=encode_canonical_json(data), ), insertion_values=dict( id=next_id, ), desc="add_pusher", ) except Exception as e: logger.error("create_pusher with failed: %s", e) raise StoreError(500, "Problem creating pusher.")
def sign_json(json_object, signature_name, signing_key): """Sign the JSON object. Stores the signature in json_object["signatures"]. Args: json_object (dict): The JSON object to sign. signature_name (str): The name of the signing entity. signing_key (syutil.crypto.SigningKey): The key to sign the JSON with. Returns: The modified, signed JSON object.""" signatures = json_object.pop("signatures", {}) unsigned = json_object.pop("unsigned", None) message_bytes = encode_canonical_json(json_object) signed = signing_key.sign(message_bytes) signature_base64 = encode_base64(signed.signature) key_id = "%s:%s" % (signing_key.alg, signing_key.version) signatures.setdefault(signature_name, {})[key_id] = signature_base64 #logger.debug("SIGNING: %s %s %s", signature_name, key_id, message_bytes) json_object["signatures"] = signatures if unsigned is not None: json_object["unsigned"] = unsigned return json_object
def __init__(self, hs): self.hs = hs self.version_string = hs.version_string self.response_body = encode_canonical_json( self.response_json_object(hs.config) ) Resource.__init__(self)
def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False): if not pretty_print: json_bytes = encode_pretty_printed_json(json_object) else: json_bytes = encode_canonical_json(json_object) return respond_with_json_bytes(request, code, json_bytes, send_cors, response_code_message=response_code_message)
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): tmp_event = prune_event(event) event_json = tmp_event.get_pdu_json() event_json.pop("signatures", None) event_json.pop("age_ts", None) event_json.pop("unsigned", None) event_json_bytes = encode_canonical_json(event_json) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest())
def on_POST(self, request, device_id): auth_user, client_info = yield self.auth.get_user_by_req(request) user_id = auth_user.to_string() # TODO: Check that the device_id matches that in the authentication # or derive the device_id from the authentication instead. try: body = json.loads(request.content.read()) except: raise SynapseError(400, "Invalid key JSON") time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. device_keys = body.get("device_keys", None) if device_keys: logger.info( "Updating device_keys for device %r for user %r at %d", device_id, auth_user, time_now ) # TODO: Sign the JSON with the server key yield self.store.set_e2e_device_keys( user_id, device_id, time_now, encode_canonical_json(device_keys) ) one_time_keys = body.get("one_time_keys", None) if one_time_keys: logger.info( "Adding %d one_time_keys for device %r for user %r at %d", len(one_time_keys), device_id, user_id, time_now ) key_list = [] for key_id, key_json in one_time_keys.items(): algorithm, key_id = key_id.split(":") key_list.append(( algorithm, key_id, encode_canonical_json(key_json) )) yield self.store.add_e2e_one_time_keys( user_id, device_id, time_now, key_list ) result = yield self.store.count_e2e_one_time_keys(user_id, device_id) defer.returnValue((200, {"one_time_key_counts": result}))
def _compute_content_hash(event, hash_algorithm): event_json = event.get_pdu_json() event_json.pop("age_ts", None) event_json.pop("unsigned", None) event_json.pop("signatures", None) event_json.pop("hashes", None) event_json.pop("outlier", None) event_json.pop("destinations", None) event_json_bytes = encode_canonical_json(event_json) return hash_algorithm(event_json_bytes)
def delivered(self, transaction, response_code, response_dict): """ Marks the given `Transaction` as having been successfully delivered to the remote homeserver, and what the response was. Returns: Deferred """ return self.store.delivered_txn(transaction.transaction_id, transaction.destination, response_code, encode_canonical_json(response_dict))
def _async_render_GET(self, request): try: server_keys, certificate = yield fetch_server_key( self.server_name, self.key_server.ssl_context_factory ) resp_server_name = server_keys[u"server_name"] verify_key_b64 = server_keys[u"signature_verify_key"] tls_certificate_b64 = server_keys[u"tls_certificate"] verify_key = VerifyKey(decode_base64(verify_key_b64)) if resp_server_name != self.server_name: raise ValueError("Wrong server name '%s' != '%s'" % (resp_server_name, self.server_name)) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, certificate ) if encode_base64(x509_certificate_bytes) != tls_certificate_b64: raise ValueError("TLS certificate doesn't match") verify_signed_json(server_keys, self.server_name, verify_key) signed_json = sign_json( server_keys, self.key_server.server_name, self.key_server.signing_key ) json_bytes = encode_canonical_json(signed_json) respond_with_json_bytes(request, 200, json_bytes) except Exception as e: json_bytes = encode_canonical_json({ u"error": {u"code": 502, u"message": e.message} }) respond_with_json_bytes(request, 502, json_bytes)
def set_response(self, transaction, code, response): """ Persist how we responded to a transaction. Returns: Deferred """ if not transaction.transaction_id: raise RuntimeError("Cannot persist a transaction with no " "transaction_id") return self.store.set_received_txn_response( transaction.transaction_id, transaction.origin, code, encode_canonical_json(response))
def delivered(self, transaction, response_code, response_dict): """ Marks the given `Transaction` as having been successfully delivered to the remote homeserver, and what the response was. Returns: Deferred """ return self.store.delivered_txn( transaction.transaction_id, transaction.destination, response_code, encode_canonical_json(response_dict) )
def compute_content_hash(event, hash_algorithm): event_json = event.get_pdu_json() event_json.pop("age_ts", None) event_json.pop("unsigned", None) event_json.pop("signatures", None) event_json.pop("hashes", None) event_json.pop("outlier", None) event_json.pop("destinations", None) event_json_bytes = encode_canonical_json(event_json) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest())
def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string=""): if pretty_print: json_bytes = encode_pretty_printed_json(json_object) + "\n" else: json_bytes = encode_canonical_json(json_object) return respond_with_json_bytes( request, code, json_bytes, send_cors=send_cors, response_code_message=response_code_message, version_string=version_string )
def on_POST(self, request, device_id): auth_user, client_info = yield self.auth.get_user_by_req(request) user_id = auth_user.to_string() # TODO: Check that the device_id matches that in the authentication # or derive the device_id from the authentication instead. try: body = json.loads(request.content.read()) except: raise SynapseError(400, "Invalid key JSON") time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. device_keys = body.get("device_keys", None) if device_keys: logger.info("Updating device_keys for device %r for user %r at %d", device_id, auth_user, time_now) # TODO: Sign the JSON with the server key yield self.store.set_e2e_device_keys( user_id, device_id, time_now, encode_canonical_json(device_keys)) one_time_keys = body.get("one_time_keys", None) if one_time_keys: logger.info( "Adding %d one_time_keys for device %r for user %r at %d", len(one_time_keys), device_id, user_id, time_now) key_list = [] for key_id, key_json in one_time_keys.items(): algorithm, key_id = key_id.split(":") key_list.append( (algorithm, key_id, encode_canonical_json(key_json))) yield self.store.add_e2e_one_time_keys(user_id, device_id, time_now, key_list) result = yield self.store.count_e2e_one_time_keys(user_id, device_id) defer.returnValue((200, {"one_time_key_counts": result}))
def delivered_txn(self, transaction_id, destination, code, response_dict): """Persists the response for an outgoing transaction. Args: transaction_id (str) destination (str) code (int) response_json (str) """ return self.runInteraction( "delivered_txn", self._delivered_txn, transaction_id, destination, code, buffer(encode_canonical_json(response_dict)), )
def set_response(self, transaction, code, response): """ Persist how we responded to a transaction. Returns: Deferred """ if not transaction.transaction_id: raise RuntimeError("Cannot persist a transaction with no " "transaction_id") return self.store.set_received_txn_response( transaction.transaction_id, transaction.origin, code, encode_canonical_json(response) )
def _add_pusher_to_store(self, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data): yield self.store.add_pusher( user_name=user_name, profile_tag=profile_tag, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=self.hs.get_clock().time_msec(), lang=lang, data=encode_canonical_json(data).decode("UTF-8"), ) self._refresh_pusher((app_id, pushkey))
def post_json_get_json(self, uri, post_json): json_str = encode_canonical_json(post_json) logger.info("HTTP POST %s -> %s", json_str, uri) response = yield self.agent.request( "POST", uri.encode("ascii"), headers=Headers({ "Content-Type": ["application/json"] }), bodyProducer=FileBodyProducer(StringIO(json_str)) ) body = yield readBody(response) defer.returnValue(json.loads(body))
def post_json_get_json(self, uri, post_json): json_str = encode_canonical_json(post_json) logger.info("HTTP POST %s -> %s", json_str, uri) response = yield self.request( "POST", uri.encode("ascii"), headers=Headers({ "Content-Type": ["application/json"] }), bodyProducer=FileBodyProducer(StringIO(json_str)) ) body = yield readBody(response) defer.returnValue(json.loads(body))
def verify_signed_json(json_object, signature_name, verify_key): """Check a signature on a signed JSON object. Args: json_object (dict): The signed JSON object to check. signature_name (str): The name of the signature to check. verify_key (syutil.crypto.VerifyKey): The key to verify the signature. Raises: InvalidSignature: If the signature isn't valid """ try: signatures = json_object["signatures"] except KeyError: raise SignatureVerifyException("No signatures on this object") key_id = "%s:%s" % (verify_key.alg, verify_key.version) try: signature_b64 = signatures[signature_name][key_id] except: raise SignatureVerifyException("Missing signature for %s, %s" % (signature_name, key_id)) try: signature = decode_base64(signature_b64) except: raise SignatureVerifyException("Invalid signature base64 for %s, %s" % (signature_name, key_id)) json_object_copy = dict(json_object) del json_object_copy["signatures"] json_object_copy.pop("unsigned", None) message = encode_canonical_json(json_object_copy) #logger.debug("VERIFY: %s %s %s", signature_name, key_id, message) try: verify_key.verify(message, signature) except: logger.exception("Error verifying signature") raise SignatureVerifyException("Unable to verify signature for %s " % signature_name)
def _send_response(self, request, code, response_json_object, response_code_message=None): # could alternatively use request.notifyFinish() and flip a flag when # the Deferred fires, but since the flag is RIGHT THERE it seems like # a waste. if request._disconnected: logger.warn( "Not sending response to request %s, already disconnected.", request) return if not self._request_user_agent_is_curl(request): json_bytes = encode_canonical_json(response_json_object) else: json_bytes = encode_pretty_printed_json(response_json_object) # TODO: Only enable CORS for the requests that need it. respond_with_json_bytes(request, code, json_bytes, send_cors=True, response_code_message=response_code_message)
def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string="", canonical_json=True): if pretty_print: json_bytes = encode_pretty_printed_json(json_object) + "\n" else: if canonical_json: json_bytes = encode_canonical_json(json_object) else: json_bytes = encode_json( json_object, using_frozen_dicts=synapse.events.USE_FROZEN_DICTS ) return respond_with_json_bytes( request, code, json_bytes, send_cors=send_cors, response_code_message=response_code_message, version_string=version_string )
def put_json(self, uri, json_body, args={}): """ Puts some json to the given URI. Args: uri (str): The URI to request, not including query parameters json_body (dict): The JSON to put in the HTTP body, args (dict): A dictionary used to create query strings, defaults to None. **Note**: The value of each key is assumed to be an iterable and *not* a string. Returns: Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. Raises: On a non-2xx HTTP response. """ if len(args): query_bytes = urllib.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) json_str = encode_canonical_json(json_body) response = yield self.request( "PUT", uri.encode("ascii"), headers=Headers({ b"User-Agent": [self.version_string], "Content-Type": ["application/json"] }), bodyProducer=FileBodyProducer(StringIO(json_str)) ) body = yield readBody(response) if 200 <= response.code < 300: defer.returnValue(json.loads(body)) else: # NB: This is explicitly not json.loads(body)'d because the contract # of CodeMessageException is a *string* message. Callers can always # load it into JSON if they want. raise CodeMessageException(response.code, body)
def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): if hasattr(event, "old_state_events"): state_json_bytes = encode_canonical_json( [e.event_id for e in event.old_state_events.values()] ) hashed = hash_algorithm(state_json_bytes) event.state_hash = { hashed.name: encode_base64(hashed.digest()) } hashed = _compute_content_hash(event, hash_algorithm=hash_algorithm) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[hashed.name] = encode_base64(hashed.digest()) event.signatures = compute_event_signature( event, signature_name=signature_name, signing_key=signing_key, )
def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string="", canonical_json=True): if pretty_print: json_bytes = encode_pretty_printed_json(json_object) + "\n" else: if canonical_json: json_bytes = encode_canonical_json(json_object) else: json_bytes = encode_json( json_object, using_frozen_dicts=synapse.events.USE_FROZEN_DICTS) return respond_with_json_bytes(request, code, json_bytes, send_cors=send_cors, response_code_message=response_code_message, version_string=version_string)
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) outlier = event.internal_metadata.is_outlier() event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } metadata_json = encode_canonical_json( event.internal_metadata.get_dict() ) self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": json.dumps(event.get_dict()["content"]), "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = json.dumps(unrec) try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if not outlier: self._store_state_groups_txn(txn, event, context) if current_state: txn.execute( "DELETE FROM current_state_events WHERE room_id = ?", (event.room_id,) ) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) is_state = hasattr(event, "state_key") and event.state_key is not None if is_state: vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) if not backfilled: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn( txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, } ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn( txn, event.event_id, prev_event_id, alg, hash_bytes ) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes ) if not outlier: self._update_min_depth_for_room_txn( txn, event.room_id, event.depth )
def on_receive_pdu(self, origin, pdu, backfilled, state=None): """ Called by the ReplicationLayer when we have a new pdu. We need to do auth checks and put it through the StateHandler. """ event = pdu logger.debug("Got event: %s", event.event_id) # If we are currently in the process of joining this room, then we # queue up events for later processing. if event.room_id in self.room_queues: self.room_queues[event.room_id].append((pdu, origin)) return logger.debug("Processing event: %s", event.event_id) redacted_event = prune_event(event) redacted_pdu_json = redacted_event.get_pdu_json() try: yield self.keyring.verify_json_for_server( event.origin, redacted_pdu_json ) except SynapseError as e: logger.warn( "Signature check failed for %s redacted to %s", encode_canonical_json(pdu.get_pdu_json()), encode_canonical_json(redacted_pdu_json), ) raise FederationError( "ERROR", e.code, e.msg, affected=event.event_id, ) if not check_event_content_hash(event): logger.warn( "Event content has been tampered, redacting %s, %s", event.event_id, encode_canonical_json(event.get_full_dict()) ) event = redacted_event logger.debug("Event: %s", event) # FIXME (erikj): Awful hack to make the case where we are not currently # in the room work current_state = None is_in_room = yield self.auth.check_host_in_room( event.room_id, self.server_name ) if not is_in_room and not event.outlier: logger.debug("Got event for room we're not in.") replication_layer = self.replication_layer auth_chain = yield replication_layer.get_event_auth( origin, context=event.room_id, event_id=event.event_id, ) for e in auth_chain: e.outlier = True try: yield self._handle_new_event(e, fetch_missing=False) except: logger.exception( "Failed to parse auth event %s", e.event_id, ) if not state: state = yield replication_layer.get_state_for_context( origin, context=event.room_id, event_id=event.event_id, ) current_state = state if state: for e in state: e.outlier = True try: yield self._handle_new_event(e) except: logger.exception( "Failed to parse state event %s", e.event_id, ) try: yield self._handle_new_event( event, state=state, backfilled=backfilled, current_state=current_state, ) except AuthError as e: raise FederationError( "ERROR", e.code, e.msg, affected=event.event_id, ) room = yield self.store.get_room(event.room_id) if not room: try: yield self.store.store_room( room_id=event.room_id, room_creator_user_id="", is_public=False, ) except StoreError: logger.exception("Failed to store room.") if not backfilled: extra_users = [] if event.type == RoomMemberEvent.TYPE: target_user_id = event.state_key target_user = self.hs.parse_userid(target_user_id) extra_users.append(target_user) yield self.notifier.on_new_room_event( event, extra_users=extra_users ) if event.type == RoomMemberEvent.TYPE: if event.membership == Membership.JOIN: user = self.hs.parse_userid(event.state_key) yield self.distributor.fire( "user_joined_room", user=user, room_id=event.room_id )
def update_response_body(self, time_now_msec): refresh_interval = self.config.key_refresh_interval self.valid_until_ts = int(time_now_msec + refresh_interval) self.response_body = encode_canonical_json(self.response_json_object())
def reinsert_events(cursor, server_name, signing_key): print "Running delta: v10" cursor.executescript(delta_sql) cursor.execute( "SELECT * FROM events ORDER BY rowid ASC" ) print "Getting events..." rows = store.cursor_to_dict(cursor) events = store._generate_event_json(cursor, rows) print "Got events from DB." algorithms = { "sha256": hashlib.sha256, } key_id = "%s:%s" % (signing_key.alg, signing_key.version) verify_key = signing_key.verify_key verify_key.alg = signing_key.alg verify_key.version = signing_key.version server_keys = { server_name: { key_id: verify_key } } i = 0 N = len(events) for event in events: if i % 100 == 0: print "Processed: %d/%d events" % (i,N,) i += 1 # for alg_name in event.hashes: # if check_event_content_hash(event, algorithms[alg_name]): # pass # else: # pass # print "FAIL content hash %s %s" % (alg_name, event.event_id, ) have_own_correctly_signed = False for host, sigs in event.signatures.items(): pruned = prune_event(event) for key_id in sigs: if host not in server_keys: server_keys[host] = {} # get_key(host) if key_id in server_keys[host]: try: verify_signed_json( pruned.get_pdu_json(), host, server_keys[host][key_id] ) if host == server_name: have_own_correctly_signed = True except SignatureVerifyException: print "FAIL signature check %s %s" % ( key_id, event.event_id ) # TODO: Re sign with our own server key if not have_own_correctly_signed: sigs = compute_event_signature(event, server_name, signing_key) event.signatures.update(sigs) pruned = prune_event(event) for key_id in event.signatures[server_name]: verify_signed_json( pruned.get_pdu_json(), server_name, server_keys[server_name][key_id] ) event_json = encode_canonical_json( event.get_dict() ).decode("UTF-8") metadata_json = encode_canonical_json( event.internal_metadata.get_dict() ).decode("UTF-8") store._simple_insert_txn( cursor, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": event_json, }, or_replace=True, )
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id txn.call_after(self._invalidate_get_event_cache, event.event_id) if stream_ordering is None: with self._stream_id_gen.get_next_txn(txn) as stream_ordering: return self._persist_event_txn( txn, event, context, backfilled, stream_ordering=stream_ordering, is_new_state=is_new_state, current_state=current_state, ) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: self._simple_delete_txn( txn, table="current_state_events", keyvalues={"room_id": event.room_id}, ) for s in current_state: if s.type == EventTypes.Member: txn.call_after(self.get_rooms_for_user.invalidate, s.state_key) txn.call_after(self.get_joined_hosts_for_room.invalidate, s.room_id) self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict()).decode("UTF-8") # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ("UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?") txn.execute(sql, ( metadata_json, event.event_id, )) sql = ("UPDATE events SET outlier = ?" " WHERE event_id = ?") txn.execute(sql, ( False, event.event_id, )) return self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": encode_canonical_json(event_dict).decode("UTF-8"), }, ) content = encode_canonical_json(event.content).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json(unrec).decode( "UTF-8") sql = ("INSERT INTO events" " (stream_ordering, topological_ordering, event_id, type," " room_id, content, processed, outlier, depth)" " VALUES (?,?,?,?,?,?,?,?,?)") txn.execute(sql, (stream_ordering, event.depth, event.event_id, event.type, event.room_id, content, True, outlier, event.depth)) if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) self._simple_insert_many_txn( txn, table="event_auth", values=[{ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, } for auth_id, _ in event.auth_events], ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, ) self._simple_insert_many_txn( txn, table="event_edges", values=[{ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True, } for e_id, h in event.prev_state], ) if is_new_state and not context.rejected: self._simple_upsert_txn(txn, "current_state_events", keyvalues={ "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, values={ "event_id": event.event_id, }) return
def rows_v2(server, json): valid_until = json["valid_until_ts"] key_json = encode_canonical_json(json) for key_id in json["verify_keys"]: yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
def reset(self, jsn): self.body = encode_canonical_json(jsn) self.length = len(self.body)
def __init__(self, jsn): self.body = encode_canonical_json(jsn) self.length = len(self.body)
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id self._get_event_cache.pop(event.event_id) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: txn.execute("DELETE FROM current_state_events WHERE room_id = ?", (event.room_id, )) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) if event.is_state() and is_new_state: if not backfilled and not context.rejected: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn(txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, }) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict()) # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ("UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?") txn.execute(sql, ( metadata_json.decode("UTF-8"), event.event_id, )) sql = ("UPDATE events SET outlier = 0" " WHERE event_id = ?") txn.execute(sql, (event.event_id, )) return if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) content = encode_canonical_json(event.content).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json(unrec).decode( "UTF-8") try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state and not context.rejected: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes)
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) outlier = event.internal_metadata.is_outlier() event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } metadata_json = encode_canonical_json( event.internal_metadata.get_dict()) self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": json.dumps(event.get_dict()["content"]), "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = json.dumps(unrec) try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if not outlier: self._store_state_groups_txn(txn, event, context) if current_state: txn.execute("DELETE FROM current_state_events WHERE room_id = ?", (event.room_id, )) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) is_state = hasattr(event, "state_key") and event.state_key is not None if is_state: vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) if not backfilled: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn(txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, }) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn(txn, event.event_id, prev_event_id, alg, hash_bytes) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn(txn, event.event_id, ref_alg, ref_hash_bytes) if not outlier: self._update_min_depth_for_room_txn(txn, event.room_id, event.depth)
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id txn.call_after(self._invalidate_get_event_cache, event.event_id) if stream_ordering is None: with self._stream_id_gen.get_next_txn(txn) as stream_ordering: return self._persist_event_txn( txn, event, context, backfilled, stream_ordering=stream_ordering, is_new_state=is_new_state, current_state=current_state, ) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: self._simple_delete_txn( txn, table="current_state_events", keyvalues={"room_id": event.room_id}, ) for s in current_state: if s.type == EventTypes.Member: txn.call_after( self.get_rooms_for_user.invalidate, s.state_key ) txn.call_after( self.get_joined_hosts_for_room.invalidate, s.room_id ) self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, } ) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn( txn, event.room_id, event.depth ) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict() ).decode("UTF-8") # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ( "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?" ) txn.execute( sql, (metadata_json, event.event_id,) ) sql = ( "UPDATE events SET outlier = ?" " WHERE event_id = ?" ) txn.execute( sql, (False, event.event_id,) ) return self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": encode_canonical_json(event_dict).decode("UTF-8"), }, ) content = encode_canonical_json( event.content ).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json( unrec ).decode("UTF-8") sql = ( "INSERT INTO events" " (stream_ordering, topological_ordering, event_id, type," " room_id, content, processed, outlier, depth)" " VALUES (?,?,?,?,?,?,?,?,?)" ) txn.execute( sql, ( stream_ordering, event.depth, event.event_id, event.type, event.room_id, content, True, outlier, event.depth ) ) if context.rejected: self._store_rejections_txn( txn, event.event_id, context.rejected ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn( txn, event.event_id, prev_event_id, alg, hash_bytes ) self._simple_insert_many_txn( txn, table="event_auth", values=[ { "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, } for auth_id, _ in event.auth_events ], ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes ) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, ) self._simple_insert_many_txn( txn, table="event_edges", values=[ { "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": True, } for e_id, h in event.prev_state ], ) if is_new_state and not context.rejected: self._simple_upsert_txn( txn, "current_state_events", keyvalues={ "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, values={ "event_id": event.event_id, } ) return
def reinsert_events(cursor, server_name, signing_key): print "Running delta: v10" cursor.executescript(delta_sql) cursor.execute("SELECT * FROM events ORDER BY rowid ASC") print "Getting events..." rows = store.cursor_to_dict(cursor) events = store._generate_event_json(cursor, rows) print "Got events from DB." algorithms = { "sha256": hashlib.sha256, } key_id = "%s:%s" % (signing_key.alg, signing_key.version) verify_key = signing_key.verify_key verify_key.alg = signing_key.alg verify_key.version = signing_key.version server_keys = {server_name: {key_id: verify_key}} i = 0 N = len(events) for event in events: if i % 100 == 0: print "Processed: %d/%d events" % ( i, N, ) i += 1 # for alg_name in event.hashes: # if check_event_content_hash(event, algorithms[alg_name]): # pass # else: # pass # print "FAIL content hash %s %s" % (alg_name, event.event_id, ) have_own_correctly_signed = False for host, sigs in event.signatures.items(): pruned = prune_event(event) for key_id in sigs: if host not in server_keys: server_keys[host] = {} # get_key(host) if key_id in server_keys[host]: try: verify_signed_json(pruned.get_pdu_json(), host, server_keys[host][key_id]) if host == server_name: have_own_correctly_signed = True except SignatureVerifyException: print "FAIL signature check %s %s" % (key_id, event.event_id) # TODO: Re sign with our own server key if not have_own_correctly_signed: sigs = compute_event_signature(event, server_name, signing_key) event.signatures.update(sigs) pruned = prune_event(event) for key_id in event.signatures[server_name]: verify_signed_json(pruned.get_pdu_json(), server_name, server_keys[server_name][key_id]) event_json = encode_canonical_json(event.get_dict()).decode("UTF-8") metadata_json = encode_canonical_json( event.internal_metadata.get_dict()).decode("UTF-8") store._simple_insert_txn( cursor, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json, "json": event_json, }, or_replace=True, )
def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): # Remove the any existing cache entries for the event_id self._get_event_cache.pop(event.event_id) # We purposefully do this first since if we include a `current_state` # key, we *want* to update the `current_state_events` table if current_state: txn.execute( "DELETE FROM current_state_events WHERE room_id = ?", (event.room_id,) ) for s in current_state: self._simple_insert_txn( txn, "current_state_events", { "event_id": s.event_id, "room_id": s.room_id, "type": s.type, "state_key": s.state_key, }, or_replace=True, ) if event.is_state() and is_new_state: if not backfilled and not context.rejected: self._simple_insert_txn( txn, table="state_forward_extremities", values={ "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for prev_state_id, _ in event.prev_state: self._simple_delete_txn( txn, table="state_forward_extremities", keyvalues={ "event_id": prev_state_id, } ) outlier = event.internal_metadata.is_outlier() if not outlier: self._store_state_groups_txn(txn, event, context) self._update_min_depth_for_room_txn( txn, event.room_id, event.depth ) self._handle_prev_events( txn, outlier=outlier, event_id=event.event_id, prev_events=event.prev_events, room_id=event.room_id, ) have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", keyvalues={"event_id": event.event_id}, retcol="event_id", allow_none=True, ) metadata_json = encode_canonical_json( event.internal_metadata.get_dict() ) # If we have already persisted this event, we don't need to do any # more processing. # The processing above must be done on every call to persist event, # since they might not have happened on previous calls. For example, # if we are persisting an event that we had persisted as an outlier, # but is no longer one. if have_persisted: if not outlier: sql = ( "UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?" ) txn.execute( sql, (metadata_json.decode("UTF-8"), event.event_id,) ) sql = ( "UPDATE events SET outlier = 0" " WHERE event_id = ?" ) txn.execute( sql, (event.event_id,) ) return if event.type == EventTypes.Member: self._store_room_member_txn(txn, event) elif event.type == EventTypes.Feedback: self._store_feedback_txn(txn, event) elif event.type == EventTypes.Name: self._store_room_name_txn(txn, event) elif event.type == EventTypes.Topic: self._store_room_topic_txn(txn, event) elif event.type == EventTypes.Redaction: self._store_redaction(txn, event) event_dict = { k: v for k, v in event.get_dict().items() if k not in [ "redacted", "redacted_because", ] } self._simple_insert_txn( txn, table="event_json", values={ "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": metadata_json.decode("UTF-8"), "json": encode_canonical_json(event_dict).decode("UTF-8"), }, or_replace=True, ) content = encode_canonical_json( event.content ).decode("UTF-8") vals = { "topological_ordering": event.depth, "event_id": event.event_id, "type": event.type, "room_id": event.room_id, "content": content, "processed": True, "outlier": outlier, "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering unrec = { k: v for k, v in event.get_dict().items() if k not in vals.keys() and k not in [ "redacted", "redacted_because", "signatures", "hashes", "prev_events", ] } vals["unrecognized_keys"] = encode_canonical_json( unrec ).decode("UTF-8") try: self._simple_insert_txn( txn, "events", vals, or_replace=(not outlier), or_ignore=bool(outlier), ) except: logger.warn( "Failed to persist, probably duplicate: %s", event.event_id, exc_info=True, ) raise _RollbackButIsFineException("_persist_event") if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, } # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state self._simple_insert_txn( txn, "state_events", vals, or_replace=True, ) if is_new_state and not context.rejected: self._simple_insert_txn( txn, "current_state_events", { "event_id": event.event_id, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, or_replace=True, ) for e_id, h in event.prev_state: self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event.event_id, "prev_event_id": e_id, "room_id": event.room_id, "is_state": 1, }, or_ignore=True, ) for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( txn, event.event_id, hash_alg, hash_bytes, ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_prev_event_hash_txn( txn, event.event_id, prev_event_id, alg, hash_bytes ) for auth_id, _ in event.auth_events: self._simple_insert_txn( txn, table="event_auth", values={ "event_id": event.event_id, "room_id": event.room_id, "auth_id": auth_id, }, or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes )
def __init__(self, key_server): self.key_server = key_server self.response_body = encode_canonical_json( self.response_json_object(key_server) ) Resource.__init__(self)
def on_receive_pdu(self, origin, pdu, backfilled, state=None, auth_chain=None): """ Called by the ReplicationLayer when we have a new pdu. We need to do auth checks and put it through the StateHandler. """ event = pdu logger.debug("Got event: %s", event.event_id) # If we are currently in the process of joining this room, then we # queue up events for later processing. if event.room_id in self.room_queues: self.room_queues[event.room_id].append((pdu, origin)) return logger.debug("Processing event: %s", event.event_id) redacted_event = prune_event(event) redacted_pdu_json = redacted_event.get_pdu_json() try: yield self.keyring.verify_json_for_server(event.origin, redacted_pdu_json) except SynapseError as e: logger.warn( "Signature check failed for %s redacted to %s", encode_canonical_json(pdu.get_pdu_json()), encode_canonical_json(redacted_pdu_json), ) raise FederationError( "ERROR", e.code, e.msg, affected=event.event_id, ) if not check_event_content_hash(event): logger.warn("Event content has been tampered, redacting %s, %s", event.event_id, encode_canonical_json(event.get_dict())) event = redacted_event logger.debug("Event: %s", event) # FIXME (erikj): Awful hack to make the case where we are not currently # in the room work current_state = None is_in_room = yield self.auth.check_host_in_room( event.room_id, self.server_name) if not is_in_room and not event.internal_metadata.outlier: logger.debug("Got event for room we're not in.") replication = self.replication_layer if not state: state, auth_chain = yield replication.get_state_for_context( origin, context=event.room_id, event_id=event.event_id, ) if not auth_chain: auth_chain = yield replication.get_event_auth( origin, context=event.room_id, event_id=event.event_id, ) for e in auth_chain: e.internal_metadata.outlier = True try: yield self._handle_new_event(e, fetch_auth_from=origin) except: logger.exception( "Failed to handle auth event %s", e.event_id, ) current_state = state if state: for e in state: logging.info("A :) %r", e) e.internal_metadata.outlier = True try: yield self._handle_new_event(e) except: logger.exception( "Failed to handle state event %s", e.event_id, ) try: yield self._handle_new_event( event, state=state, backfilled=backfilled, current_state=current_state, ) except AuthError as e: raise FederationError( "ERROR", e.code, e.msg, affected=event.event_id, ) # if we're receiving valid events from an origin, # it's probably a good idea to mark it as not in retry-state # for sending (although this is a bit of a leap) retry_timings = yield self.store.get_destination_retry_timings(origin) if (retry_timings and retry_timings.retry_last_ts): self.store.set_destination_retry_timings(origin, 0, 0) room = yield self.store.get_room(event.room_id) if not room: try: yield self.store.store_room( room_id=event.room_id, room_creator_user_id="", is_public=False, ) except StoreError: logger.exception("Failed to store room.") if not backfilled: extra_users = [] if event.type == EventTypes.Member: target_user_id = event.state_key target_user = self.hs.parse_userid(target_user_id) extra_users.append(target_user) yield self.notifier.on_new_room_event(event, extra_users=extra_users) if event.type == EventTypes.Member: if event.membership == Membership.JOIN: user = self.hs.parse_userid(event.state_key) yield self.distributor.fire("user_joined_room", user=user, room_id=event.room_id)