def __init__(self, sydent: "Sydent") -> None: super().__init__(sydent.config.general.server_name, {}) self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) globalAssocStore = GlobalAssociationStore(self.sydent) lastId = globalAssocStore.lastIdFromServer(self.servername) self.lastId = lastId if lastId is not None else -1
def __init__(self, sydent): super(LocalPeer, self).__init__(sydent.server_name, {}) self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) globalAssocStore = GlobalAssociationStore(self.sydent) self.lastId = globalAssocStore.lastIdFromServer(self.servername) if self.lastId is None: self.lastId = -1
class LocalPeer(Peer[bool]): """ The local peer (ourselves: essentially copying from the local associations table to the global one) """ def __init__(self, sydent: "Sydent") -> None: super().__init__(sydent.config.general.server_name, {}) self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) globalAssocStore = GlobalAssociationStore(self.sydent) lastId = globalAssocStore.lastIdFromServer(self.servername) self.lastId = lastId if lastId is not None else -1 def pushUpdates(self, sgAssocs: SignedAssociations) -> "Deferred[bool]": """ Saves the given associations in the global associations store. Only stores an association if its ID is greater than the last seen ID. :param sgAssocs: The associations to save. :return: A deferred that succeeds with the value `True`. """ globalAssocStore = GlobalAssociationStore(self.sydent) for localId in sgAssocs: if localId > self.lastId: assocObj = threePidAssocFromDict(sgAssocs[localId]) # ensure we are casefolding email addresses assocObj.address = normalise_address(assocObj.address, assocObj.medium) if assocObj.mxid is not None: # Assign a lookup_hash to this association pepper = self.hashing_store.get_lookup_pepper() if not pepper: raise RuntimeError("No lookup_pepper in the database.") str_to_hash = " ".join([ assocObj.address, assocObj.medium, pepper, ], ) assocObj.lookup_hash = sha256_and_url_safe_base64( str_to_hash) # We can probably skip verification for the local peer (although it could # be good as a sanity check) globalAssocStore.addAssociation( assocObj, json.dumps(sgAssocs[localId]), self.sydent.config.general.server_name, localId, ) else: globalAssocStore.removeAssociation(assocObj.medium, assocObj.address) d = defer.succeed(True) return d
class LocalPeer(Peer): """ The local peer (ourselves: essentially copying from the local associations table to the global one) """ def __init__(self, sydent): super(LocalPeer, self).__init__(sydent.server_name, {}) self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) globalAssocStore = GlobalAssociationStore(self.sydent) self.lastId = globalAssocStore.lastIdFromServer(self.servername) if self.lastId is None: self.lastId = -1 def pushUpdates(self, sgAssocs): """ Saves the given associations in the global associations store. Only stores an association if its ID is greater than the last seen ID. :param sgAssocs: The associations to save. :type sgAssocs: dict[int, dict[str, any]] :return: True :rtype: twisted.internet.defer.Deferred[bool] """ globalAssocStore = GlobalAssociationStore(self.sydent) for localId in sgAssocs: if localId > self.lastId: assocObj = threePidAssocFromDict(sgAssocs[localId]) if assocObj.mxid is not None: # Assign a lookup_hash to this association str_to_hash = u' '.join([ assocObj.address, assocObj.medium, self.hashing_store.get_lookup_pepper() ], ) assocObj.lookup_hash = sha256_and_url_safe_base64( str_to_hash) # We can probably skip verification for the local peer (although it could # be good as a sanity check) globalAssocStore.addAssociation( assocObj, json.dumps(sgAssocs[localId]), self.sydent.server_name, localId) else: globalAssocStore.removeAssociation(assocObj.medium, assocObj.address) d = defer.succeed(True) return d
class LocalPeer(Peer): """ The local peer (ourselves: essentially copying from the local associations table to the global one) """ def __init__(self, sydent): super(LocalPeer, self).__init__(sydent.server_name, {}) self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) globalAssocStore = GlobalAssociationStore(self.sydent) self.lastId = globalAssocStore.lastIdFromServer(self.servername) if self.lastId is None: self.lastId = -1 def pushUpdates(self, sgAssocs): globalAssocStore = GlobalAssociationStore(self.sydent) for localId in sgAssocs: if localId > self.lastId: assocObj = threePidAssocFromDict(sgAssocs[localId]) if assocObj.mxid is not None: # Assign a lookup_hash to this association str_to_hash = ' '.join([ assocObj.address, assocObj.medium, self.hashing_store.get_lookup_pepper() ], ) assocObj.lookup_hash = sha256_and_url_safe_base64( str_to_hash) # We can probably skip verification for the local peer (although it could # be good as a sanity check) globalAssocStore.addAssociation( assocObj, json.dumps(sgAssocs[localId]), self.sydent.server_name, localId) else: globalAssocStore.removeAssociation(assocObj.medium, assocObj.address) d = defer.succeed(True) return d
def __init__(self, sydent): self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent)
class ThreepidBinder: # the lifetime of a 3pid association THREEPID_ASSOCIATION_LIFETIME_MS = 100 * 365 * 24 * 60 * 60 * 1000 def __init__(self, sydent): self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) def addBinding(self, medium, address, mxid): """ Binds the given 3pid to the given mxid. It's assumed that we have somehow validated that the given user owns the given 3pid :param medium: The medium of the 3PID to bind. :type medium: unicode :param address: The address of the 3PID to bind. :type address: unicode :param mxid: The MXID to bind the 3PID to. :type mxid: unicode :return: The signed association. :rtype: dict[str, any] """ localAssocStore = LocalAssociationStore(self.sydent) # Fill out the association details createdAt = time_msec() expires = createdAt + ThreepidBinder.THREEPID_ASSOCIATION_LIFETIME_MS # Hash the medium + address and store that hash for the purposes of # later lookups str_to_hash = u' '.join( [address, medium, self.hashing_store.get_lookup_pepper()], ) lookup_hash = sha256_and_url_safe_base64(str_to_hash) assoc = ThreepidAssociation( medium, address, lookup_hash, mxid, createdAt, createdAt, expires, ) localAssocStore.addOrUpdateAssociation(assoc) self.sydent.pusher.doLocalPush() joinTokenStore = JoinTokenStore(self.sydent) pendingJoinTokens = joinTokenStore.getTokens(medium, address) invites = [] for token in pendingJoinTokens: token["mxid"] = mxid token["signed"] = { "mxid": mxid, "token": token["token"], } token["signed"] = signedjson.sign.sign_json( token["signed"], self.sydent.server_name, self.sydent.keyring.ed25519) invites.append(token) if invites: assoc.extra_fields["invites"] = invites joinTokenStore.markTokensAsSent(medium, address) signer = Signer(self.sydent) sgassoc = signer.signedThreePidAssociation(assoc) self._notify(sgassoc, 0) return sgassoc def removeBinding(self, threepid, mxid): """ Removes the binding between a given 3PID and a given MXID. :param threepid: The 3PID of the binding to remove. :type threepid: dict[unicode, unicode] :param mxid: The MXID of the binding to remove. :type mxid: unicode """ localAssocStore = LocalAssociationStore(self.sydent) localAssocStore.removeAssociation(threepid, mxid) self.sydent.pusher.doLocalPush() @defer.inlineCallbacks def _notify(self, assoc, attempt): """ Sends data about a new association (and, if necessary, the associated invites) to the associated MXID's homeserver. :param assoc: The association to send down to the homeserver. :type assoc: dict[str, any] :param attempt: The number of previous attempts to send this association. :type attempt: int """ mxid = assoc["mxid"] mxid_parts = mxid.split(":", 1) if len(mxid_parts) != 2: logger.error( "Can't notify on bind for unparseable mxid %s. Not retrying.", assoc["mxid"], ) return post_url = "matrix://%s/_matrix/federation/v1/3pid/onbind" % ( mxid_parts[1], ) logger.info("Making bind callback to: %s", post_url) # Make a POST to the chosen Synapse server http_client = FederationHttpClient(self.sydent) try: response = yield http_client.post_json_get_nothing( post_url, assoc, {}) except Exception as e: self._notifyErrback(assoc, attempt, e) return # If the request failed, try again with exponential backoff if response.code != 200: self._notifyErrback( assoc, attempt, "Non-OK error code received (%d)" % response.code) else: logger.info("Successfully notified on bind for %s" % (mxid, )) # Skip the deletion step if instructed so by the config. if not self.sydent.delete_tokens_on_bind: return # Only remove sent tokens when they've been successfully sent. try: joinTokenStore = JoinTokenStore(self.sydent) joinTokenStore.deleteTokens(assoc["medium"], assoc["address"]) logger.info( "Successfully deleted invite for %s from the store", assoc["address"], ) except Exception as e: logger.exception( "Couldn't remove invite for %s from the store", assoc["address"], ) def _notifyErrback(self, assoc, attempt, error): """ Handles errors when trying to send an association down to a homeserver by logging the error and scheduling a new attempt. :param assoc: The association to send down to the homeserver. :type assoc: dict[str, any] :param attempt: The number of previous attempts to send this association. :type attempt: int :param error: The error that was raised when trying to send the association. :type error: Exception """ logger.warning("Error notifying on bind for %s: %s - rescheduling", assoc["mxid"], error) self.sydent.reactor.callLater(math.pow(2, attempt), self._notify, assoc, attempt + 1) # The below is lovingly ripped off of synapse/http/endpoint.py _Server = collections.namedtuple("_Server", "priority weight host port")
def __init__(self, cfg, reactor=twisted.internet.reactor): self.reactor = reactor self.config_file = get_config_file_path() self.cfg = cfg logger.info("Starting Sydent server") self.pidfile = self.cfg.get('general', "pidfile.path") self.db = SqliteDatabase(self).db self.server_name = self.cfg.get('general', 'server.name') if self.server_name == '': self.server_name = os.uname()[1] logger.warn(( "You had not specified a server name. I have guessed that this server is called '%s' " + "and saved this in the config file. If this is incorrect, you should edit server.name in " + "the config file.") % (self.server_name, )) self.cfg.set('general', 'server.name', self.server_name) self.save_config() if self.cfg.has_option("general", "sentry_dsn"): # Only import and start sentry SDK if configured. import sentry_sdk sentry_sdk.init(dsn=self.cfg.get("general", "sentry_dsn"), ) with sentry_sdk.configure_scope() as scope: scope.set_tag("sydent_server_name", self.server_name) if self.cfg.has_option("general", "prometheus_port"): import prometheus_client prometheus_client.start_http_server( port=self.cfg.getint("general", "prometheus_port"), addr=self.cfg.get("general", "prometheus_addr"), ) self.enable_v1_associations = parse_cfg_bool( self.cfg.get("general", "enable_v1_associations")) self.delete_tokens_on_bind = parse_cfg_bool( self.cfg.get("general", "delete_tokens_on_bind")) # See if a pepper already exists in the database # Note: This MUST be run before we start serving requests, otherwise lookups for # 3PID hashes may come in before we've completed generating them hashing_metadata_store = HashingMetadataStore(self) lookup_pepper = hashing_metadata_store.get_lookup_pepper() if not lookup_pepper: # No pepper defined in the database, generate one lookup_pepper = generateAlphanumericTokenOfLength(5) # Store it in the database and rehash 3PIDs hashing_metadata_store.store_lookup_pepper( sha256_and_url_safe_base64, lookup_pepper) self.validators = Validators() self.validators.email = EmailValidator(self) self.validators.msisdn = MsisdnValidator(self) self.keyring = Keyring() self.keyring.ed25519 = SydentEd25519(self).signing_key self.keyring.ed25519.alg = 'ed25519' self.sig_verifier = Verifier(self) self.servlets = Servlets() self.servlets.v1 = V1Servlet(self) self.servlets.v2 = V2Servlet(self) self.servlets.emailRequestCode = EmailRequestCodeServlet(self) self.servlets.emailValidate = EmailValidateCodeServlet(self) self.servlets.msisdnRequestCode = MsisdnRequestCodeServlet(self) self.servlets.msisdnValidate = MsisdnValidateCodeServlet(self) self.servlets.lookup = LookupServlet(self) self.servlets.bulk_lookup = BulkLookupServlet(self) self.servlets.hash_details = HashDetailsServlet(self, lookup_pepper) self.servlets.lookup_v2 = LookupV2Servlet(self, lookup_pepper) self.servlets.pubkey_ed25519 = Ed25519Servlet(self) self.servlets.pubkeyIsValid = PubkeyIsValidServlet(self) self.servlets.ephemeralPubkeyIsValid = EphemeralPubkeyIsValidServlet( self) self.servlets.threepidBind = ThreePidBindServlet(self) self.servlets.threepidUnbind = ThreePidUnbindServlet(self) self.servlets.replicationPush = ReplicationPushServlet(self) self.servlets.getValidated3pid = GetValidated3pidServlet(self) self.servlets.storeInviteServlet = StoreInviteServlet(self) self.servlets.blindlySignStuffServlet = BlindlySignStuffServlet(self) self.servlets.termsServlet = TermsServlet(self) self.servlets.accountServlet = AccountServlet(self) self.servlets.registerServlet = RegisterServlet(self) self.servlets.logoutServlet = LogoutServlet(self) self.threepidBinder = ThreepidBinder(self) self.sslComponents = SslComponents(self) self.clientApiHttpServer = ClientApiHttpServer(self) self.replicationHttpsServer = ReplicationHttpsServer(self) self.replicationHttpsClient = ReplicationHttpsClient(self) self.pusher = Pusher(self) # A dedicated validation session store just to clean up old sessions every N minutes self.cleanupValSession = ThreePidValSessionStore(self) cb = task.LoopingCall(self.cleanupValSession.deleteOldSessions) cb.clock = self.reactor cb.start(10 * 60.0)
class ReplicationPushServlet(Resource): def __init__(self, sydent): self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) @jsonwrap def render_POST(self, request): peerCert = request.transport.getPeerCertificate() peerCertCn = peerCert.get_subject().commonName peerStore = PeerStore(self.sydent) peer = peerStore.getPeerByName(peerCertCn) if not peer: logger.warn( "Got connection from %s but no peer found by that name", peerCertCn) raise MatrixRestError(403, 'M_UNKNOWN_PEER', 'This peer is not known to this server') logger.info("Push connection made from peer %s", peer.servername) if not request.requestHeaders.hasHeader('Content-Type') or \ request.requestHeaders.getRawHeaders('Content-Type')[0] != 'application/json': logger.warn( "Peer %s made push connection with non-JSON content (type: %s)", peer.servername, request.requestHeaders.getRawHeaders('Content-Type')[0]) raise MatrixRestError(400, 'M_NOT_JSON', 'This endpoint expects JSON') try: # json.loads doesn't allow bytes in Python 3.5 inJson = json.loads(request.content.read().decode("UTF-8")) except ValueError: logger.warn("Peer %s made push connection with malformed JSON", peer.servername) raise MatrixRestError(400, 'M_BAD_JSON', 'Malformed JSON') if 'sgAssocs' not in inJson: logger.warn( "Peer %s made push connection with no 'sgAssocs' key in JSON", peer.servername) raise MatrixRestError(400, 'M_BAD_JSON', 'No "sgAssocs" key in JSON') failedIds = [] globalAssocsStore = GlobalAssociationStore(self.sydent) # Ensure items are pulled out of the dictionary in order of origin_id. sg_assocs = inJson.get('sgAssocs', {}) sg_assocs = sorted(sg_assocs.items(), key=lambda k: int(k[0])) for originId, sgAssoc in sg_assocs: try: peer.verifySignedAssociation(sgAssoc) logger.debug( "Signed association from %s with origin ID %s verified", peer.servername, originId) # Don't bother adding if one has already failed: we add all of them or none so # we're only going to roll back the transaction anyway (but we continue to try # & verify the rest so we can give a complete list of the ones that don't # verify) if len(failedIds) > 0: continue assocObj = threePidAssocFromDict(sgAssoc) if assocObj.mxid is not None: # Calculate the lookup hash with our own pepper for this association str_to_hash = u' '.join([ assocObj.address, assocObj.medium, self.hashing_store.get_lookup_pepper() ], ) assocObj.lookup_hash = sha256_and_url_safe_base64( str_to_hash) # Add this association globalAssocsStore.addAssociation(assocObj, json.dumps(sgAssoc), peer.servername, originId, commit=False) else: logger.info( "Incoming deletion: removing associations for %s / %s", assocObj.medium, assocObj.address) globalAssocsStore.removeAssociation( assocObj.medium, assocObj.address) logger.info("Stored association origin ID %s from %s", originId, peer.servername) except: failedIds.append(originId) logger.warn( "Failed to verify signed association from %s with origin ID %s", peer.servername, originId) twisted.python.log.err() if len(failedIds) > 0: self.sydent.db.rollback() request.setResponseCode(400) return { 'errcode': 'M_VERIFICATION_FAILED', 'error': 'Verification failed for one or more associations', 'failed_ids': failedIds } else: self.sydent.db.commit() return {'success': True}
def __init__(self, sydent: "Sydent") -> None: self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent)
class ReplicationPushServlet(Resource): def __init__(self, sydent: "Sydent") -> None: self.sydent = sydent self.hashing_store = HashingMetadataStore(sydent) @jsonwrap def render_POST(self, request: Request) -> JsonDict: # Cast safety: This request has an ISSLTransport because this servlet # is a resource under the ReplicationHttpsServer and nowhere else. request.transport = cast(ISSLTransport, request.transport) peerCert = cast(X509, request.transport.getPeerCertificate()) peerCertCn = peerCert.get_subject().commonName peerStore = PeerStore(self.sydent) peer = peerStore.getPeerByName(peerCertCn) if not peer: logger.warning( "Got connection from %s but no peer found by that name", peerCertCn) raise MatrixRestError(403, "M_UNKNOWN_PEER", "This peer is not known to this server") logger.info("Push connection made from peer %s", peer.servername) if (not request.requestHeaders.hasHeader("Content-Type") # Type safety: the hasHeader call returned True, so getRawHeaders() # returns a nonempty list. or request.requestHeaders.getRawHeaders("Content-Type")[ 0] # type: ignore[index] != "application/json"): logger.warning( "Peer %s made push connection with non-JSON content (type: %s)", peer.servername, # Type safety: the hasHeader call returned True, so getRawHeaders() # returns a nonempty list. request.requestHeaders.getRawHeaders("Content-Type") [0], # type: ignore[index] ) raise MatrixRestError(400, "M_NOT_JSON", "This endpoint expects JSON") try: # json.loads doesn't allow bytes in Python 3.5 inJson = json_decoder.decode( request.content.read().decode("UTF-8")) except ValueError: logger.warning("Peer %s made push connection with malformed JSON", peer.servername) raise MatrixRestError(400, "M_BAD_JSON", "Malformed JSON") if "sgAssocs" not in inJson: logger.warning( "Peer %s made push connection with no 'sgAssocs' key in JSON", peer.servername, ) raise MatrixRestError(400, "M_BAD_JSON", 'No "sgAssocs" key in JSON') failedIds: List[int] = [] globalAssocsStore = GlobalAssociationStore(self.sydent) # Ensure items are pulled out of the dictionary in order of origin_id. sg_assocs_raw: SignedAssociations = inJson.get("sgAssocs", {}) sg_assocs = sorted(sg_assocs_raw.items(), key=lambda k: int(k[0])) for originId, sgAssoc in sg_assocs: try: peer.verifySignedAssociation(sgAssoc) logger.debug( "Signed association from %s with origin ID %s verified", peer.servername, originId, ) # Don't bother adding if one has already failed: we add all of them or none so # we're only going to roll back the transaction anyway (but we continue to try # & verify the rest so we can give a complete list of the ones that don't # verify) if len(failedIds) > 0: continue assocObj = threePidAssocFromDict(sgAssoc) # ensure we are casefolding email addresses before hashing/storing assocObj.address = normalise_address(assocObj.address, assocObj.medium) if assocObj.mxid is not None: # Calculate the lookup hash with our own pepper for this association pepper = self.hashing_store.get_lookup_pepper() assert pepper is not None str_to_hash = " ".join( [assocObj.address, assocObj.medium, pepper], ) assocObj.lookup_hash = sha256_and_url_safe_base64( str_to_hash) # Add this association globalAssocsStore.addAssociation( assocObj, json.dumps(sgAssoc), peer.servername, originId, commit=False, ) else: logger.info( "Incoming deletion: removing associations for %s / %s", assocObj.medium, assocObj.address, ) globalAssocsStore.removeAssociation( assocObj.medium, assocObj.address) logger.info("Stored association origin ID %s from %s", originId, peer.servername) except Exception: failedIds.append(originId) logger.warning( "Failed to verify signed association from %s with origin ID %s", peer.servername, originId, ) twisted.python.log.err() if len(failedIds) > 0: self.sydent.db.rollback() request.setResponseCode(400) return { "errcode": "M_VERIFICATION_FAILED", "error": "Verification failed for one or more associations", "failed_ids": failedIds, } else: self.sydent.db.commit() return {"success": True}
def __init__( self, sydent_config: SydentConfig, reactor: SydentReactor = twisted.internet.reactor, # type: ignore[assignment] use_tls_for_federation: bool = True, ): self.config = sydent_config self.reactor = reactor self.use_tls_for_federation = use_tls_for_federation logger.info("Starting Sydent server") self.db: sqlite3.Connection = SqliteDatabase(self).db if self.config.general.sentry_enabled: import sentry_sdk sentry_sdk.init( dsn=self.config.general.sentry_dsn, release=get_version_string() ) with sentry_sdk.configure_scope() as scope: scope.set_tag("sydent_server_name", self.config.general.server_name) # workaround for https://github.com/getsentry/sentry-python/issues/803: we # disable automatic GC and run it periodically instead. gc.disable() cb = task.LoopingCall(run_gc) cb.clock = self.reactor cb.start(1.0) # See if a pepper already exists in the database # Note: This MUST be run before we start serving requests, otherwise lookups for # 3PID hashes may come in before we've completed generating them hashing_metadata_store = HashingMetadataStore(self) lookup_pepper = hashing_metadata_store.get_lookup_pepper() if not lookup_pepper: # No pepper defined in the database, generate one lookup_pepper = generateAlphanumericTokenOfLength(5) # Store it in the database and rehash 3PIDs hashing_metadata_store.store_lookup_pepper( sha256_and_url_safe_base64, lookup_pepper ) self.validators: Validators = Validators( EmailValidator(self), MsisdnValidator(self) ) self.keyring: Keyring = Keyring(self.config.crypto.signing_key) self.keyring.ed25519.alg = "ed25519" self.sig_verifier: Verifier = Verifier(self) self.servlets: Servlets = Servlets(self, lookup_pepper) self.threepidBinder: ThreepidBinder = ThreepidBinder(self) self.sslComponents: SslComponents = SslComponents(self) self.clientApiHttpServer = ClientApiHttpServer(self) self.replicationHttpsServer = ReplicationHttpsServer(self) self.replicationHttpsClient: ReplicationHttpsClient = ReplicationHttpsClient( self ) self.pusher: Pusher = Pusher(self)