def __init__(self, config: HomeServerConfig): super(MockHomeserver, self).__init__( hostname=config.server.server_name, config=config, reactor=reactor, version_string="Synapse/" + get_distribution_version_string("matrix-synapse"), )
def start(config_options: List[str]) -> None: try: config = HomeServerConfig.load_config("Synapse worker", config_options) except ConfigError as e: sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) # For backwards compatibility let any of the old app names. assert config.worker.worker_app in ( "synapse.app.appservice", "synapse.app.client_reader", "synapse.app.event_creator", "synapse.app.federation_reader", "synapse.app.federation_sender", "synapse.app.frontend_proxy", "synapse.app.generic_worker", "synapse.app.media_repository", "synapse.app.pusher", "synapse.app.synchrotron", "synapse.app.user_dir", ) synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage if config.server.gc_seconds: synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds hs = GenericWorkerServer( config.server.server_name, config=config, version_string="Synapse/" + get_distribution_version_string("matrix-synapse"), ) setup_logging(hs, config, use_worker_options=True) try: hs.setup() # Ensure the replication streamer is always started in case we write to any # streams. Will no-op if no streams can be written to by this worker. hs.get_replication_streamer() except Exception as e: handle_startup_exception(e) register_start(_base.start, hs) # redirect stdio to the logs, if configured. if not hs.config.logging.no_redirect_stdio: redirect_stdio_to_logs() _base.start_worker_reactor("synapse-generic-worker", config)
def setup_sentry(hs: "HomeServer") -> None: """Enable sentry integration, if enabled in configuration""" if not hs.config.metrics.sentry_enabled: return import sentry_sdk sentry_sdk.init( dsn=hs.config.metrics.sentry_dsn, release=get_distribution_version_string("matrix-synapse"), ) # We set some default tags that give some context to this instance with sentry_sdk.configure_scope() as scope: scope.set_tag("matrix_server_name", hs.config.server.server_name) app = (hs.config.worker.worker_app if hs.config.worker.worker_app else "synapse.app.homeserver") name = hs.get_instance_name() scope.set_tag("worker_app", app) scope.set_tag("worker_name", name)
def setup_logging( hs: "HomeServer", config: "HomeServerConfig", use_worker_options: bool = False, logBeginner: LogBeginner = globalLogBeginner, ) -> None: """ Set up the logging subsystem. Args: config (LoggingConfig | synapse.config.worker.WorkerConfig): configuration data use_worker_options (bool): True to use the 'worker_log_config' option instead of 'log_config'. logBeginner: The Twisted logBeginner to use. """ log_config_path = (config.worker.worker_log_config if use_worker_options else config.logging.log_config) # Perform one-time logging configuration. _setup_stdlib_logging(config, log_config_path, logBeginner=logBeginner) # Add a SIGHUP handler to reload the logging configuration, if one is available. from synapse.app import _base as appbase appbase.register_sighup(_reload_logging_config, log_config_path) # Log immediately so we can grep backwards. logging.warning("***** STARTING SERVER *****") logging.warning( "Server %s version %s", sys.argv[0], get_distribution_version_string("matrix-synapse"), ) logging.info("Server hostname: %s", config.server.server_name) logging.info("Instance name: %s", hs.get_instance_name())
# Twisted and canonicaljson will fail to import when this file is executed to # get the __version__ during a fresh install. That's OK and subsequent calls to # actually start Synapse will import these libraries fine. try: from twisted.internet import protocol from twisted.internet.protocol import Factory from twisted.names.dns import DNSDatagramProtocol protocol.Factory.noisy = False Factory.noisy = False DNSDatagramProtocol.noisy = False except ImportError: pass # Use the standard library json implementation instead of simplejson. try: from canonicaljson import set_json_library set_json_library(json) except ImportError: pass __version__ = get_distribution_version_string("matrix-synapse") if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when # running the packaging tox test. from synapse.util.patch_inline_callbacks import do_patch do_patch()
def setup(config_options: List[str]) -> SynapseHomeServer: """ Args: config_options_options: The options passed to Synapse. Usually `sys.argv[1:]`. Returns: A homeserver instance. """ try: config = HomeServerConfig.load_or_generate_config( "Synapse Homeserver", config_options) except ConfigError as e: sys.stderr.write("\n") for f in format_config_error(e): sys.stderr.write(f) sys.stderr.write("\n") sys.exit(1) if not config: # If a config isn't returned, and an exception isn't raised, we're just # generating config files and shouldn't try to continue. sys.exit(0) if config.worker.worker_app: raise ConfigError( "You have specified `worker_app` in the config but are attempting to start a non-worker " "instance. Please use `python -m synapse.app.generic_worker` instead (or remove the option if this is the main process)." ) sys.exit(1) events.USE_FROZEN_DICTS = config.server.use_frozen_dicts synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage if config.server.gc_seconds: synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds if (config.registration.enable_registration and not config.registration.enable_registration_without_verification): if (not config.captcha.enable_registration_captcha and not config.registration.registrations_require_3pid and not config.registration.registration_requires_token): raise ConfigError( "You have enabled open registration without any verification. This is a known vector for " "spam and abuse. If you would like to allow public registration, please consider adding email, " "captcha, or token-based verification. Otherwise this check can be removed by setting the " "`enable_registration_without_verification` config option to `true`." ) hs = SynapseHomeServer( config.server.server_name, config=config, version_string="Synapse/" + get_distribution_version_string("matrix-synapse"), ) synapse.config.logger.setup_logging(hs, config, use_worker_options=False) logger.info("Setting up server") try: hs.setup() except Exception as e: handle_startup_exception(e) async def start() -> None: # Load the OIDC provider metadatas, if OIDC is enabled. if hs.config.oidc.oidc_enabled: oidc = hs.get_oidc_handler() # Loading the provider metadata also ensures the provider config is valid. await oidc.load_metadata() await _base.start(hs) hs.get_datastores( ).main.db_pool.updates.start_doing_background_updates() register_start(start) return hs
def log_failure(failure: Failure, msg: str, consumeErrors: bool = True) -> Optional[Failure]: """Creates a function suitable for passing to `Deferred.addErrback` that logs any failures that occur. Args: failure: The Failure to log msg: Message to log consumeErrors: If true consumes the failure, otherwise passes on down the callback chain Returns: The Failure if consumeErrors is false. None, otherwise. """ logger.error( msg, exc_info=(failure.type, failure.value, failure.getTracebackObject() ) # type: ignore[arg-type] ) if not consumeErrors: return failure return None # Version string with git info. Computed here once so that we don't invoke git multiple # times. SYNAPSE_VERSION = get_distribution_version_string("matrix-synapse", __file__)
def __init__(self, hs: "HomeServer"): self.res = { "server_version": get_distribution_version_string("matrix-synapse"), "python_version": platform.python_version(), }
def __init__( self, sydent_config: SydentConfig, reactor: SydentReactor = twisted.internet. reactor, # type: ignore[assignment] use_tls_for_federation: bool = True, ): self.config = sydent_config self.reactor = reactor self.use_tls_for_federation = use_tls_for_federation logger.info("Starting Sydent server") self.db: sqlite3.Connection = SqliteDatabase(self).db if self.config.general.sentry_enabled: import sentry_sdk sentry_sdk.init( dsn=self.config.general.sentry_dsn, release=get_distribution_version_string("matrix-sydent"), ) with sentry_sdk.configure_scope() as scope: scope.set_tag("sydent_server_name", self.config.general.server_name) # workaround for https://github.com/getsentry/sentry-python/issues/803: we # disable automatic GC and run it periodically instead. gc.disable() cb = task.LoopingCall(run_gc) cb.clock = self.reactor cb.start(1.0) # See if a pepper already exists in the database # Note: This MUST be run before we start serving requests, otherwise lookups for # 3PID hashes may come in before we've completed generating them hashing_metadata_store = HashingMetadataStore(self) lookup_pepper = hashing_metadata_store.get_lookup_pepper() if not lookup_pepper: # No pepper defined in the database, generate one lookup_pepper = generateAlphanumericTokenOfLength(5) # Store it in the database and rehash 3PIDs hashing_metadata_store.store_lookup_pepper( sha256_and_url_safe_base64, lookup_pepper) self.validators: Validators = Validators(EmailValidator(self), MsisdnValidator(self)) self.keyring: Keyring = Keyring(self.config.crypto.signing_key) self.keyring.ed25519.alg = "ed25519" self.sig_verifier: Verifier = Verifier(self) self.servlets: Servlets = Servlets(self, lookup_pepper) self.threepidBinder: ThreepidBinder = ThreepidBinder(self) self.sslComponents: SslComponents = SslComponents(self) self.clientApiHttpServer = ClientApiHttpServer(self) self.replicationHttpsServer = ReplicationHttpsServer(self) self.replicationHttpsClient: ReplicationHttpsClient = ReplicationHttpsClient( self) self.pusher: Pusher = Pusher(self)
def __init__(self, config: HomeServerConfig): self.clock = Clock(reactor) self.config = config self.hostname = config.server.server_name self.version_string = "Synapse/" + get_distribution_version_string( "matrix-synapse")
def start(config_options: List[str]) -> None: parser = argparse.ArgumentParser(description="Synapse Admin Command") HomeServerConfig.add_arguments_to_parser(parser) subparser = parser.add_subparsers( title="Admin Commands", required=True, dest="command", metavar="<admin_command>", help="The admin command to perform.", ) export_data_parser = subparser.add_parser( "export-data", help="Export all data for a user") export_data_parser.add_argument("user_id", help="User to extra data from") export_data_parser.add_argument( "--output-directory", action="store", metavar="DIRECTORY", required=False, help= "The directory to store the exported data in. Must be empty. Defaults" " to creating a temp directory.", ) export_data_parser.set_defaults(func=export_data_command) try: config, args = HomeServerConfig.load_config_with_parser( parser, config_options) except ConfigError as e: sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) if config.worker.worker_app is not None: assert config.worker.worker_app == "synapse.app.admin_cmd" # Update the config with some basic overrides so that don't have to specify # a full worker config. config.worker.worker_app = "synapse.app.admin_cmd" if not config.worker.worker_daemonize and not config.worker.worker_log_config: # Since we're meant to be run as a "command" let's not redirect stdio # unless we've actually set log config. config.logging.no_redirect_stdio = True # Explicitly disable background processes config.worker.should_update_user_directory = False config.worker.run_background_tasks = False config.worker.start_pushers = False config.worker.pusher_shard_config.instances = [] config.worker.send_federation = False config.worker.federation_shard_config.instances = [] synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts ss = AdminCmdServer( config.server.server_name, config=config, version_string="Synapse/" + get_distribution_version_string("matrix-synapse"), ) setup_logging(ss, config, use_worker_options=True) ss.setup() # We use task.react as the basic run command as it correctly handles tearing # down the reactor when the deferreds resolve and setting the return value. # We also make sure that `_base.start` gets run before we actually run the # command. async def run() -> None: with LoggingContext("command"): await _base.start(ss) await args.func(ss, args) _base.start_worker_reactor( "synapse-admin-cmd", config, run_command=lambda: task.react(lambda _reactor: defer.ensureDeferred( run())), )