Пример #1
0
    def read_config(self, config: JsonDict, **kwargs: Any) -> None:
        self.spam_checkers: List[Tuple[Any, Dict]] = []

        spam_checkers = config.get("spam_checker") or []
        if isinstance(spam_checkers, dict):
            # The spam_checker config option used to only support one
            # spam checker, and thus was simply a dictionary with module
            # and config keys. Support this old behaviour by checking
            # to see if the option resolves to a dictionary
            self.spam_checkers.append(
                load_module(spam_checkers, ("spam_checker", )))
        elif isinstance(spam_checkers, list):
            for i, spam_checker in enumerate(spam_checkers):
                config_path = ("spam_checker", "<item %i>" % i)
                if not isinstance(spam_checker, dict):
                    raise ConfigError("expected a mapping", config_path)

                self.spam_checkers.append(
                    load_module(spam_checker, config_path))
        else:
            raise ConfigError("spam_checker syntax is incorrect")

        # If this configuration is being used in any way, warn the admin that it is going
        # away soon.
        if self.spam_checkers:
            logger.warning(LEGACY_SPAM_CHECKER_WARNING)
Пример #2
0
    def make_homeserver(self, reactor, clock):

        self.storage_path = self.mktemp()
        os.mkdir(self.storage_path)

        config = self.default_config()
        config.url_preview_enabled = True
        config.max_spider_size = 9999999
        config.url_preview_url_blacklist = []
        config.media_store_path = self.storage_path

        provider_config = {
            "module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
            "store_local": True,
            "store_synchronous": False,
            "store_remote": True,
            "config": {"directory": self.storage_path},
        }

        loaded = list(load_module(provider_config)) + [
            MediaStorageProviderConfig(False, False, False)
        ]

        config.media_storage_providers = [loaded]

        hs = self.setup_test_homeserver(config=config)

        return hs
Пример #3
0
    def make_homeserver(self, reactor, clock):

        self.storage_path = self.mktemp()
        os.mkdir(self.storage_path)

        config = self.default_config()
        config.url_preview_enabled = True
        config.max_spider_size = 9999999
        config.url_preview_url_blacklist = []
        config.media_store_path = self.storage_path

        provider_config = {
            "module":
            "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
            "store_local": True,
            "store_synchronous": False,
            "store_remote": True,
            "config": {
                "directory": self.storage_path
            },
        }

        loaded = list(load_module(provider_config)) + [
            MediaStorageProviderConfig(False, False, False)
        ]

        config.media_storage_providers = [loaded]

        hs = self.setup_test_homeserver(config=config)

        return hs
Пример #4
0
    def read_config(self, config, **kwargs):
        self.third_party_event_rules = None

        provider = config.get("third_party_event_rules", None)
        if provider is not None:
            self.third_party_event_rules = load_module(
                provider, ("third_party_event_rules", ))
Пример #5
0
    def _listener_http(self, config, listener_config):
        port = listener_config["port"]
        bind_addresses = listener_config["bind_addresses"]
        tls = listener_config.get("tls", False)
        site_tag = listener_config.get("tag", port)

        if tls and config.no_tls:
            return

        resources = {}
        for res in listener_config["resources"]:
            for name in res["names"]:
                resources.update(self._configure_named_resource(
                    name, res.get("compress", False),
                ))

        additional_resources = listener_config.get("additional_resources", {})
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = ModuleApi(self, self.get_auth_handler())
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(resmodule)
            handler = handler_cls(config, module_api)
            resources[path] = AdditionalResource(self, handler.handle_request)

        #if WEB_CLIENT_PREFIX in resources:
        #    root_resource = RootRedirect(WEB_CLIENT_PREFIX)
        #else:
        #    root_resource = NoResource()
        root_resource = NoResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
            )

        else:
            listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                )
            )
        logger.info("Synapse now listening on port %d", port)
Пример #6
0
    def read_config(self, config, **kwargs):
        self.oidc_enabled = False

        oidc_config = config.get("oidc_config")

        if not oidc_config or not oidc_config.get("enabled", False):
            return

        try:
            check_requirements("oidc")
        except DependencyException as e:
            raise ConfigError(e.message)

        public_baseurl = self.public_baseurl
        if public_baseurl is None:
            raise ConfigError("oidc_config requires a public_baseurl to be set")
        self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback"

        self.oidc_enabled = True
        self.oidc_discover = oidc_config.get("discover", True)
        self.oidc_issuer = oidc_config["issuer"]
        self.oidc_client_id = oidc_config["client_id"]
        self.oidc_client_secret = oidc_config["client_secret"]
        self.oidc_client_auth_method = oidc_config.get(
            "client_auth_method", "client_secret_basic"
        )
        self.oidc_scopes = oidc_config.get("scopes", ["openid"])
        self.oidc_authorization_endpoint = oidc_config.get("authorization_endpoint")
        self.oidc_token_endpoint = oidc_config.get("token_endpoint")
        self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
        self.oidc_jwks_uri = oidc_config.get("jwks_uri")
        self.oidc_skip_verification = oidc_config.get("skip_verification", False)
        self.oidc_user_profile_method = oidc_config.get("user_profile_method", "auto")
        self.oidc_allow_existing_users = oidc_config.get("allow_existing_users", False)

        ump_config = oidc_config.get("user_mapping_provider", {})
        ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
        ump_config.setdefault("config", {})

        (
            self.oidc_user_mapping_provider_class,
            self.oidc_user_mapping_provider_config,
        ) = load_module(ump_config)

        # Ensure loaded user mapping module has defined all necessary methods
        required_methods = [
            "get_remote_user_id",
            "map_user_attributes",
        ]
        missing_methods = [
            method
            for method in required_methods
            if not hasattr(self.oidc_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by oidc_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods),)
            )
    def read_config(self, config, **kwargs):
        self.password_providers = []  # type: List[Any]
        providers = []

        # We want to be backwards compatible with the old `ldap_config`
        # param.
        ldap_config = config.get("ldap_config", {})
        if ldap_config.get("enabled", False):
            providers.append({"module": LDAP_PROVIDER, "config": ldap_config})

        providers.extend(config.get("password_providers", []))
        for provider in providers:
            mod_name = provider["module"]

            # This is for backwards compat when the ldap auth provider resided
            # in this package.
            if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
                mod_name = LDAP_PROVIDER

            (provider_class, provider_config) = load_module({
                "module":
                mod_name,
                "config":
                provider["config"]
            })

            self.password_providers.append((provider_class, provider_config))
Пример #8
0
    def read_config(self, config):
        self.password_providers = []

        provider_config = None

        # We want to be backwards compatible with the old `ldap_config`
        # param.
        ldap_config = config.get("ldap_config", {})
        self.ldap_enabled = ldap_config.get("enabled", False)
        if self.ldap_enabled:
            from ldap_auth_provider import LdapAuthProvider
            parsed_config = LdapAuthProvider.parse_config(ldap_config)
            self.password_providers.append((LdapAuthProvider, parsed_config))

        providers = config.get("password_providers", [])
        for provider in providers:
            # This is for backwards compat when the ldap auth provider resided
            # in this package.
            if provider[
                    'module'] == "synapse.util.ldap_auth_provider.LdapAuthProvider":
                from ldap_auth_provider import LdapAuthProvider
                provider_class = LdapAuthProvider
                try:
                    provider_config = provider_class.parse_config(
                        provider["config"])
                except Exception as e:
                    raise ConfigError("Failed to parse config for %r: %r" %
                                      (provider['module'], e))
            else:
                (provider_class, provider_config) = load_module(provider)

            self.password_providers.append((provider_class, provider_config))
Пример #9
0
    def read_config(self, config):
        self.password_providers = []
        providers = []

        # We want to be backwards compatible with the old `ldap_config`
        # param.
        ldap_config = config.get("ldap_config", {})
        if ldap_config.get("enabled", False):
            providers.append({
                'module': LDAP_PROVIDER,
                'config': ldap_config,
            })

        providers.extend(config.get("password_providers", []))
        for provider in providers:
            mod_name = provider['module']

            # This is for backwards compat when the ldap auth provider resided
            # in this package.
            if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
                mod_name = LDAP_PROVIDER

            (provider_class, provider_config) = load_module({
                "module": mod_name,
                "config": provider['config'],
            })

            self.password_providers.append((provider_class, provider_config))
Пример #10
0
    def _listener_http(self, config, listener_config):
        port = listener_config["port"]
        bind_addresses = listener_config["bind_addresses"]
        tls = listener_config.get("tls", False)
        site_tag = listener_config.get("tag", port)

        if tls and config.no_tls:
            return

        resources = {}
        for res in listener_config["resources"]:
            for name in res["names"]:
                resources.update(self._configure_named_resource(
                    name, res.get("compress", False),
                ))

        additional_resources = listener_config.get("additional_resources", {})
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = ModuleApi(self, self.get_auth_handler())
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(resmodule)
            handler = handler_cls(config, module_api)
            resources[path] = AdditionalResource(self, handler.handle_request)

        if WEB_CLIENT_PREFIX in resources:
            root_resource = RootRedirect(WEB_CLIENT_PREFIX)
        else:
            root_resource = NoResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
            )

        else:
            listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                )
            )
        logger.info("Synapse now listening on port %d", port)
Пример #11
0
    def read_config(self, config: dict, **kwargs):
        self.loaded_modules: List[Tuple[Any, Dict]] = []

        configured_modules = config.get("modules") or []
        for i, module in enumerate(configured_modules):
            config_path = ("modules", "<item %i>" % i)
            if not isinstance(module, dict):
                raise ConfigError("expected a mapping", config_path)

            self.loaded_modules.append(load_module(module, config_path))
Пример #12
0
    def make_homeserver(self, reactor, clock):

        self.fetches = []

        def get_file(destination,
                     path,
                     output_stream,
                     args=None,
                     max_size=None):
            """
            Returns tuple[int,dict,str,int] of file length, response headers,
            absolute URI, and response code.
            """
            def write_to(r):
                data, response = r
                output_stream.write(data)
                return response

            d = Deferred()
            d.addCallback(write_to)
            self.fetches.append((d, destination, path, args))
            return make_deferred_yieldable(d)

        client = Mock()
        client.get_file = get_file

        self.storage_path = self.mktemp()
        os.mkdir(self.storage_path)

        config = self.default_config()
        config.media_store_path = self.storage_path
        config.thumbnail_requirements = {}
        config.max_image_pixels = 2000000

        provider_config = {
            "module":
            "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
            "store_local": True,
            "store_synchronous": False,
            "store_remote": True,
            "config": {
                "directory": self.storage_path
            },
        }

        loaded = list(load_module(provider_config)) + [
            MediaStorageProviderConfig(False, False, False)
        ]

        config.media_storage_providers = [loaded]

        hs = self.setup_test_homeserver(config=config, http_client=client)

        return hs
Пример #13
0
    def read_config(self, config, **kwargs):
        self.spam_checkers = []  # type: List[Tuple[Any, Dict]]

        spam_checkers = config.get("spam_checker") or []
        if isinstance(spam_checkers, dict):
            # The spam_checker config option used to only support one
            # spam checker, and thus was simply a dictionary with module
            # and config keys. Support this old behaviour by checking
            # to see if the option resolves to a dictionary
            self.spam_checkers.append(
                load_module(spam_checkers, ("spam_checker", )))
        elif isinstance(spam_checkers, list):
            for i, spam_checker in enumerate(spam_checkers):
                config_path = ("spam_checker", "<item %i>" % i)
                if not isinstance(spam_checker, dict):
                    raise ConfigError("expected a mapping", config_path)

                self.spam_checkers.append(
                    load_module(spam_checker, config_path))
        else:
            raise ConfigError("spam_checker syntax is incorrect")
Пример #14
0
    def read_config(self, config: JsonDict, **kwargs: Any) -> None:
        """Parses the old password auth providers config. The config format looks like this:

        password_providers:
           # Example config for an LDAP auth provider
           - module: "ldap_auth_provider.LdapAuthProvider"
             config:
               enabled: true
               uri: "ldap://ldap.example.com:389"
               start_tls: true
               base: "ou=users,dc=example,dc=com"
               attributes:
                  uid: "cn"
                  mail: "email"
                  name: "givenName"
               #bind_dn:
               #bind_password:
               #filter: "(objectClass=posixAccount)"

        We expect admins to use modules for this feature (which is why it doesn't appear
        in the sample config file), but we want to keep support for it around for a bit
        for backwards compatibility.
        """

        self.password_providers: List[Tuple[Type, Any]] = []
        providers = []

        # We want to be backwards compatible with the old `ldap_config`
        # param.
        ldap_config = config.get("ldap_config", {})
        if ldap_config.get("enabled", False):
            providers.append({"module": LDAP_PROVIDER, "config": ldap_config})

        providers.extend(config.get("password_providers") or [])
        for i, provider in enumerate(providers):
            mod_name = provider["module"]

            # This is for backwards compat when the ldap auth provider resided
            # in this package.
            if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
                mod_name = LDAP_PROVIDER

            (provider_class, provider_config) = load_module(
                {
                    "module": mod_name,
                    "config": provider["config"]
                },
                ("password_providers", "<item %i>" % i),
            )

            self.password_providers.append((provider_class, provider_config))
Пример #15
0
    def make_homeserver(self, reactor, clock):

        self.fetches = []

        def get_file(destination, path, output_stream, args=None, max_size=None):
            """
            Returns tuple[int,dict,str,int] of file length, response headers,
            absolute URI, and response code.
            """

            def write_to(r):
                data, response = r
                output_stream.write(data)
                return response

            d = Deferred()
            d.addCallback(write_to)
            self.fetches.append((d, destination, path, args))
            return make_deferred_yieldable(d)

        client = Mock()
        client.get_file = get_file

        self.storage_path = self.mktemp()
        os.mkdir(self.storage_path)

        config = self.default_config()
        config.media_store_path = self.storage_path
        config.thumbnail_requirements = {}
        config.max_image_pixels = 2000000

        provider_config = {
            "module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
            "store_local": True,
            "store_synchronous": False,
            "store_remote": True,
            "config": {"directory": self.storage_path},
        }

        loaded = list(load_module(provider_config)) + [
            MediaStorageProviderConfig(False, False, False)
        ]

        config.media_storage_providers = [loaded]

        hs = self.setup_test_homeserver(config=config, http_client=client)

        return hs
Пример #16
0
    def _listener_http(self, config, listener_config):
        port = listener_config["port"]
        bind_addresses = listener_config["bind_addresses"]
        tls = listener_config.get("tls", False)
        site_tag = listener_config.get("tag", port)

        resources = {}
        for res in listener_config["resources"]:
            for name in res["names"]:
                if name == "openid" and "federation" in res["names"]:
                    # Skip loading openid resource if federation is defined
                    # since federation resource will include openid
                    continue
                resources.update(self._configure_named_resource(
                    name, res.get("compress", False),
                ))

        additional_resources = listener_config.get("additional_resources", {})
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = ModuleApi(self, self.get_auth_handler())
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(resmodule)
            handler = handler_cls(config, module_api)
            resources[path] = AdditionalResource(self, handler.handle_request)

        # try to find something useful to redirect '/' to
        if WEB_CLIENT_PREFIX in resources:
            root_resource = RootRedirect(WEB_CLIENT_PREFIX)
        elif STATIC_PREFIX in resources:
            root_resource = RootRedirect(STATIC_PREFIX)
        else:
            root_resource = NoResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            ports = listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d (TLS)", port)

        else:
            ports = listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d", port)

        return ports
Пример #17
0
    def read_config(self, config, **kwargs):
        self.saml2_enabled = False

        saml2_config = config.get("saml2_config")

        if not saml2_config or not saml2_config.get("enabled", True):
            return

        if not saml2_config.get("sp_config") and not saml2_config.get(
                "config_path"):
            return

        try:
            check_requirements("saml2")
        except DependencyException as e:
            raise ConfigError(e.message)

        self.saml2_enabled = True

        self.saml2_grandfathered_mxid_source_attribute = saml2_config.get(
            "grandfathered_mxid_source_attribute", "uid")

        # user_mapping_provider may be None if the key is present but has no value
        ump_dict = saml2_config.get("user_mapping_provider") or {}

        # Use the default user mapping provider if not set
        ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)

        # Ensure a config is present
        ump_dict["config"] = ump_dict.get("config") or {}

        if ump_dict["module"] == DEFAULT_USER_MAPPING_PROVIDER:
            # Load deprecated options for use by the default module
            old_mxid_source_attribute = saml2_config.get(
                "mxid_source_attribute")
            if old_mxid_source_attribute:
                logger.warning(
                    "The config option saml2_config.mxid_source_attribute is deprecated. "
                    "Please use saml2_config.user_mapping_provider.config"
                    ".mxid_source_attribute instead.")
                ump_dict["config"][
                    "mxid_source_attribute"] = old_mxid_source_attribute

            old_mxid_mapping = saml2_config.get("mxid_mapping")
            if old_mxid_mapping:
                logger.warning(
                    "The config option saml2_config.mxid_mapping is deprecated. Please "
                    "use saml2_config.user_mapping_provider.config.mxid_mapping instead."
                )
                ump_dict["config"]["mxid_mapping"] = old_mxid_mapping

        # Retrieve an instance of the module's class
        # Pass the config dictionary to the module for processing
        (
            self.saml2_user_mapping_provider_class,
            self.saml2_user_mapping_provider_config,
        ) = load_module(ump_dict)

        # Ensure loaded user mapping module has defined all necessary methods
        # Note parse_config() is already checked during the call to load_module
        required_methods = [
            "get_saml_attributes",
            "saml_response_to_user_attributes",
            "get_remote_user_id",
        ]
        missing_methods = [
            method for method in required_methods
            if not hasattr(self.saml2_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by saml2_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods), ))

        # Get the desired saml auth response attributes from the module
        saml2_config_dict = self._default_saml_config_dict(
            *self.saml2_user_mapping_provider_class.get_saml_attributes(
                self.saml2_user_mapping_provider_config))
        _dict_merge(merge_dict=saml2_config.get("sp_config", {}),
                    into_dict=saml2_config_dict)

        config_path = saml2_config.get("config_path", None)
        if config_path is not None:
            mod = load_python_module(config_path)
            _dict_merge(merge_dict=mod.CONFIG, into_dict=saml2_config_dict)

        import saml2.config

        self.saml2_sp_config = saml2.config.SPConfig()
        self.saml2_sp_config.load(saml2_config_dict)

        # session lifetime: in milliseconds
        self.saml2_session_lifetime = self.parse_duration(
            saml2_config.get("saml_session_lifetime", "15m"))

        template_dir = saml2_config.get("template_dir")
        if not template_dir:
            template_dir = pkg_resources.resource_filename(
                "synapse",
                "res/templates",
            )

        loader = jinja2.FileSystemLoader(template_dir)
        # enable auto-escape here, to having to remember to escape manually in the
        # template
        env = jinja2.Environment(loader=loader, autoescape=True)
        self.saml2_error_html_template = env.get_template("saml_error.html")
Пример #18
0
    def read_config(self, config, **kwargs):
        self.saml2_enabled = False

        saml2_config = config.get("saml2_config")

        if not saml2_config or not saml2_config.get("enabled", True):
            return

        if not saml2_config.get("sp_config") and not saml2_config.get(
                "config_path"):
            return

        try:
            check_requirements("saml2")
        except DependencyException as e:
            raise ConfigError(
                e.
                message  # noqa: B306, DependencyException.message is a property
            )

        self.saml2_enabled = True

        attribute_requirements = saml2_config.get(
            "attribute_requirements") or []
        self.attribute_requirements = _parse_attribute_requirements_def(
            attribute_requirements)

        self.saml2_grandfathered_mxid_source_attribute = saml2_config.get(
            "grandfathered_mxid_source_attribute", "uid")

        self.saml2_idp_entityid = saml2_config.get("idp_entityid", None)

        # user_mapping_provider may be None if the key is present but has no value
        ump_dict = saml2_config.get("user_mapping_provider") or {}

        # Use the default user mapping provider if not set
        ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
        if ump_dict.get("module") == LEGACY_USER_MAPPING_PROVIDER:
            ump_dict["module"] = DEFAULT_USER_MAPPING_PROVIDER

        # Ensure a config is present
        ump_dict["config"] = ump_dict.get("config") or {}

        if ump_dict["module"] == DEFAULT_USER_MAPPING_PROVIDER:
            # Load deprecated options for use by the default module
            old_mxid_source_attribute = saml2_config.get(
                "mxid_source_attribute")
            if old_mxid_source_attribute:
                logger.warning(
                    "The config option saml2_config.mxid_source_attribute is deprecated. "
                    "Please use saml2_config.user_mapping_provider.config"
                    ".mxid_source_attribute instead.")
                ump_dict["config"][
                    "mxid_source_attribute"] = old_mxid_source_attribute

            old_mxid_mapping = saml2_config.get("mxid_mapping")
            if old_mxid_mapping:
                logger.warning(
                    "The config option saml2_config.mxid_mapping is deprecated. Please "
                    "use saml2_config.user_mapping_provider.config.mxid_mapping instead."
                )
                ump_dict["config"]["mxid_mapping"] = old_mxid_mapping

        # Retrieve an instance of the module's class
        # Pass the config dictionary to the module for processing
        (
            self.saml2_user_mapping_provider_class,
            self.saml2_user_mapping_provider_config,
        ) = load_module(ump_dict, ("saml2_config", "user_mapping_provider"))

        # Ensure loaded user mapping module has defined all necessary methods
        # Note parse_config() is already checked during the call to load_module
        required_methods = [
            "get_saml_attributes",
            "saml_response_to_user_attributes",
            "get_remote_user_id",
        ]
        missing_methods = [
            method for method in required_methods
            if not hasattr(self.saml2_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by saml2_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods), ))

        # Get the desired saml auth response attributes from the module
        saml2_config_dict = self._default_saml_config_dict(
            *self.saml2_user_mapping_provider_class.get_saml_attributes(
                self.saml2_user_mapping_provider_config))
        _dict_merge(merge_dict=saml2_config.get("sp_config", {}),
                    into_dict=saml2_config_dict)

        config_path = saml2_config.get("config_path", None)
        if config_path is not None:
            mod = load_python_module(config_path)
            config = getattr(mod, "CONFIG", None)
            if config is None:
                raise ConfigError(
                    "Config path specified by saml2_config.config_path does not "
                    "have a CONFIG property.")
            _dict_merge(merge_dict=config, into_dict=saml2_config_dict)

        import saml2.config

        self.saml2_sp_config = saml2.config.SPConfig()
        self.saml2_sp_config.load(saml2_config_dict)

        # session lifetime: in milliseconds
        self.saml2_session_lifetime = self.parse_duration(
            saml2_config.get("saml_session_lifetime", "15m"))
Пример #19
0
    def read_config(self, config: JsonDict, **kwargs: Any) -> None:
        self.server_name = config["server_name"]
        self.server_context = config.get("server_context", None)

        try:
            parse_and_validate_server_name(self.server_name)
        except ValueError as e:
            raise ConfigError(str(e))

        self.pid_file = self.abspath(config.get("pid_file"))
        self.soft_file_limit = config.get("soft_file_limit", 0)
        self.daemonize = bool(config.get("daemonize"))
        self.print_pidfile = bool(config.get("print_pidfile"))
        self.user_agent_suffix = config.get("user_agent_suffix")
        self.use_frozen_dicts = config.get("use_frozen_dicts", False)
        self.serve_server_wellknown = config.get("serve_server_wellknown", False)

        # Whether we should serve a "client well-known":
        #  (a) at .well-known/matrix/client on our client HTTP listener
        #  (b) in the response to /login
        #
        # ... which together help ensure that clients use our public_baseurl instead of
        # whatever they were told by the user.
        #
        # For the sake of backwards compatibility with existing installations, this is
        # True if public_baseurl is specified explicitly, and otherwise False. (The
        # reasoning here is that we have no way of knowing that the default
        # public_baseurl is actually correct for existing installations - many things
        # will not work correctly, but that's (probably?) better than sending clients
        # to a completely broken URL.
        self.serve_client_wellknown = False

        public_baseurl = config.get("public_baseurl")
        if public_baseurl is None:
            public_baseurl = f"https://{self.server_name}/"
            logger.info("Using default public_baseurl %s", public_baseurl)
        else:
            self.serve_client_wellknown = True
            if public_baseurl[-1] != "/":
                public_baseurl += "/"
        self.public_baseurl = public_baseurl

        # check that public_baseurl is valid
        try:
            splits = urllib.parse.urlsplit(self.public_baseurl)
        except Exception as e:
            raise ConfigError(f"Unable to parse URL: {e}", ("public_baseurl",))
        if splits.scheme not in ("https", "http"):
            raise ConfigError(
                f"Invalid scheme '{splits.scheme}': only https and http are supported"
            )
        if splits.query or splits.fragment:
            raise ConfigError(
                "public_baseurl cannot contain query parameters or a #-fragment"
            )

        self.extra_well_known_client_content = config.get(
            "extra_well_known_client_content", {}
        )

        if not isinstance(self.extra_well_known_client_content, dict):
            raise ConfigError(
                "extra_well_known_content must be a dictionary of key-value pairs"
            )

        if "m.homeserver" in self.extra_well_known_client_content:
            raise ConfigError(
                "m.homeserver is not supported in extra_well_known_content, "
                "use public_baseurl in base config instead."
            )
        if "m.identity_server" in self.extra_well_known_client_content:
            raise ConfigError(
                "m.identity_server is not supported in extra_well_known_content, "
                "use default_identity_server in base config instead."
            )

        # Whether to enable user presence.
        presence_config = config.get("presence") or {}
        self.use_presence = presence_config.get("enabled")
        if self.use_presence is None:
            self.use_presence = config.get("use_presence", True)

        # Custom presence router module
        # This is the legacy way of configuring it (the config should now be put in the modules section)
        self.presence_router_module_class = None
        self.presence_router_config = None
        presence_router_config = presence_config.get("presence_router")
        if presence_router_config:
            (
                self.presence_router_module_class,
                self.presence_router_config,
            ) = load_module(presence_router_config, ("presence", "presence_router"))

        # whether to enable the media repository endpoints. This should be set
        # to false if the media repository is running as a separate endpoint;
        # doing so ensures that we will not run cache cleanup jobs on the
        # master, potentially causing inconsistency.
        self.enable_media_repo = config.get("enable_media_repo", True)

        # Whether to require authentication to retrieve profile data (avatars,
        # display names) of other users through the client API.
        self.require_auth_for_profile_requests = config.get(
            "require_auth_for_profile_requests", False
        )

        # Whether to require sharing a room with a user to retrieve their
        # profile data
        self.limit_profile_requests_to_users_who_share_rooms = config.get(
            "limit_profile_requests_to_users_who_share_rooms",
            False,
        )

        # Whether to retrieve and display profile data for a user when they
        # are invited to a room
        self.include_profile_data_on_invite = config.get(
            "include_profile_data_on_invite", True
        )

        if "restrict_public_rooms_to_local_users" in config and (
            "allow_public_rooms_without_auth" in config
            or "allow_public_rooms_over_federation" in config
        ):
            raise ConfigError(
                "Can't use 'restrict_public_rooms_to_local_users' if"
                " 'allow_public_rooms_without_auth' and/or"
                " 'allow_public_rooms_over_federation' is set."
            )

        # Check if the legacy "restrict_public_rooms_to_local_users" flag is set. This
        # flag is now obsolete but we need to check it for backward-compatibility.
        if config.get("restrict_public_rooms_to_local_users", False):
            self.allow_public_rooms_without_auth = False
            self.allow_public_rooms_over_federation = False
        else:
            # If set to 'true', removes the need for authentication to access the server's
            # public rooms directory through the client API, meaning that anyone can
            # query the room directory. Defaults to 'false'.
            self.allow_public_rooms_without_auth = config.get(
                "allow_public_rooms_without_auth", False
            )
            # If set to 'true', allows any other homeserver to fetch the server's public
            # rooms directory via federation. Defaults to 'false'.
            self.allow_public_rooms_over_federation = config.get(
                "allow_public_rooms_over_federation", False
            )

        default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)

        # Ensure room version is a str
        default_room_version = str(default_room_version)

        if default_room_version not in KNOWN_ROOM_VERSIONS:
            raise ConfigError(
                "Unknown default_room_version: %s, known room versions: %s"
                % (default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
            )

        # Get the actual room version object rather than just the identifier
        self.default_room_version = KNOWN_ROOM_VERSIONS[default_room_version]

        # whether to enable search. If disabled, new entries will not be inserted
        # into the search tables and they will not be indexed. Users will receive
        # errors when attempting to search for messages.
        self.enable_search = config.get("enable_search", True)

        self.filter_timeline_limit = config.get("filter_timeline_limit", 100)

        # Whether we should block invites sent to users on this server
        # (other than those sent by local server admins)
        self.block_non_admin_invites = config.get("block_non_admin_invites", False)

        # Options to control access by tracking MAU
        self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
        self.max_mau_value = 0
        if self.limit_usage_by_mau:
            self.max_mau_value = config.get("max_mau_value", 0)
        self.mau_stats_only = config.get("mau_stats_only", False)

        self.mau_limits_reserved_threepids = config.get(
            "mau_limit_reserved_threepids", []
        )

        self.mau_trial_days = config.get("mau_trial_days", 0)
        self.mau_appservice_trial_days = config.get("mau_appservice_trial_days", {})
        self.mau_limit_alerting = config.get("mau_limit_alerting", True)

        # How long to keep redacted events in the database in unredacted form
        # before redacting them.
        redaction_retention_period = config.get("redaction_retention_period", "7d")
        if redaction_retention_period is not None:
            self.redaction_retention_period: Optional[int] = self.parse_duration(
                redaction_retention_period
            )
        else:
            self.redaction_retention_period = None

        # How long to keep entries in the `users_ips` table.
        user_ips_max_age = config.get("user_ips_max_age", "28d")
        if user_ips_max_age is not None:
            self.user_ips_max_age: Optional[int] = self.parse_duration(user_ips_max_age)
        else:
            self.user_ips_max_age = None

        # Options to disable HS
        self.hs_disabled = config.get("hs_disabled", False)
        self.hs_disabled_message = config.get("hs_disabled_message", "")

        # Admin uri to direct users at should their instance become blocked
        # due to resource constraints
        self.admin_contact = config.get("admin_contact", None)

        ip_range_blacklist = config.get(
            "ip_range_blacklist", DEFAULT_IP_RANGE_BLACKLIST
        )

        # Attempt to create an IPSet from the given ranges

        # Always blacklist 0.0.0.0, ::
        self.ip_range_blacklist = generate_ip_set(
            ip_range_blacklist, ["0.0.0.0", "::"], config_path=("ip_range_blacklist",)
        )

        self.ip_range_whitelist = generate_ip_set(
            config.get("ip_range_whitelist", ()), config_path=("ip_range_whitelist",)
        )
        # The federation_ip_range_blacklist is used for backwards-compatibility
        # and only applies to federation and identity servers.
        if "federation_ip_range_blacklist" in config:
            # Always blacklist 0.0.0.0, ::
            self.federation_ip_range_blacklist = generate_ip_set(
                config["federation_ip_range_blacklist"],
                ["0.0.0.0", "::"],
                config_path=("federation_ip_range_blacklist",),
            )
            # 'federation_ip_range_whitelist' was never a supported configuration option.
            self.federation_ip_range_whitelist = None
        else:
            # No backwards-compatiblity requrired, as federation_ip_range_blacklist
            # is not given. Default to ip_range_blacklist and ip_range_whitelist.
            self.federation_ip_range_blacklist = self.ip_range_blacklist
            self.federation_ip_range_whitelist = self.ip_range_whitelist

        # (undocumented) option for torturing the worker-mode replication a bit,
        # for testing. The value defines the number of milliseconds to pause before
        # sending out any replication updates.
        self.replication_torture_level = config.get("replication_torture_level")

        # Whether to require a user to be in the room to add an alias to it.
        # Defaults to True.
        self.require_membership_for_aliases = config.get(
            "require_membership_for_aliases", True
        )

        # Whether to allow per-room membership profiles through the send of membership
        # events with profile information that differ from the target's global profile.
        self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)

        # The maximum size an avatar can have, in bytes.
        self.max_avatar_size = config.get("max_avatar_size")
        if self.max_avatar_size is not None:
            self.max_avatar_size = self.parse_size(self.max_avatar_size)

        # The MIME types allowed for an avatar.
        self.allowed_avatar_mimetypes = config.get("allowed_avatar_mimetypes")
        if self.allowed_avatar_mimetypes and not isinstance(
            self.allowed_avatar_mimetypes,
            list,
        ):
            raise ConfigError("allowed_avatar_mimetypes must be a list")

        self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]

        # no_tls is not really supported any more, but let's grandfather it in
        # here.
        if config.get("no_tls", False):
            l2 = []
            for listener in self.listeners:
                if listener.tls:
                    logger.info(
                        "Ignoring TLS-enabled listener on port %i due to no_tls",
                        listener.port,
                    )
                else:
                    l2.append(listener)
            self.listeners = l2

        self.web_client_location = config.get("web_client_location", None)
        # Non-HTTP(S) web client location is not supported.
        if self.web_client_location and not (
            self.web_client_location.startswith("http://")
            or self.web_client_location.startswith("https://")
        ):
            raise ConfigError("web_client_location must point to a HTTP(S) URL.")

        self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
        self.gc_seconds = self.read_gc_intervals(config.get("gc_min_interval", None))

        self.limit_remote_rooms = LimitRemoteRoomsConfig(
            **(config.get("limit_remote_rooms") or {})
        )

        bind_port = config.get("bind_port")
        if bind_port:
            if config.get("no_tls", False):
                raise ConfigError("no_tls is incompatible with bind_port")

            self.listeners = []
            bind_host = config.get("bind_host", "")
            gzip_responses = config.get("gzip_responses", True)

            http_options = HttpListenerConfig(
                resources=[
                    HttpResourceConfig(names=["client"], compress=gzip_responses),
                    HttpResourceConfig(names=["federation"]),
                ],
            )

            self.listeners.append(
                ListenerConfig(
                    port=bind_port,
                    bind_addresses=[bind_host],
                    tls=True,
                    type="http",
                    http_options=http_options,
                )
            )

            unsecure_port = config.get("unsecure_port", bind_port - 400)
            if unsecure_port:
                self.listeners.append(
                    ListenerConfig(
                        port=unsecure_port,
                        bind_addresses=[bind_host],
                        tls=False,
                        type="http",
                        http_options=http_options,
                    )
                )

        manhole = config.get("manhole")
        if manhole:
            self.listeners.append(
                ListenerConfig(
                    port=manhole,
                    bind_addresses=["127.0.0.1"],
                    type="manhole",
                )
            )

        manhole_settings = config.get("manhole_settings") or {}
        validate_config(
            _MANHOLE_SETTINGS_SCHEMA, manhole_settings, ("manhole_settings",)
        )

        manhole_username = manhole_settings.get("username", "matrix")
        manhole_password = manhole_settings.get("password", "rabbithole")
        manhole_priv_key_path = manhole_settings.get("ssh_priv_key_path")
        manhole_pub_key_path = manhole_settings.get("ssh_pub_key_path")

        manhole_priv_key = None
        if manhole_priv_key_path is not None:
            try:
                manhole_priv_key = Key.fromFile(manhole_priv_key_path)
            except Exception as e:
                raise ConfigError(
                    f"Failed to read manhole private key file {manhole_priv_key_path}"
                ) from e

        manhole_pub_key = None
        if manhole_pub_key_path is not None:
            try:
                manhole_pub_key = Key.fromFile(manhole_pub_key_path)
            except Exception as e:
                raise ConfigError(
                    f"Failed to read manhole public key file {manhole_pub_key_path}"
                ) from e

        self.manhole_settings = ManholeConfig(
            username=manhole_username,
            password=manhole_password,
            priv_key=manhole_priv_key,
            pub_key=manhole_pub_key,
        )

        metrics_port = config.get("metrics_port")
        if metrics_port:
            logger.warning(METRICS_PORT_WARNING)

            self.listeners.append(
                ListenerConfig(
                    port=metrics_port,
                    bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")],
                    type="http",
                    http_options=HttpListenerConfig(
                        resources=[HttpResourceConfig(names=["metrics"])]
                    ),
                )
            )

        self.cleanup_extremities_with_dummy_events = config.get(
            "cleanup_extremities_with_dummy_events", True
        )

        # The number of forward extremities in a room needed to send a dummy event.
        self.dummy_events_threshold = config.get("dummy_events_threshold", 10)

        self.enable_ephemeral_messages = config.get("enable_ephemeral_messages", False)

        # Inhibits the /requestToken endpoints from returning an error that might leak
        # information about whether an e-mail address is in use or not on this
        # homeserver, and instead return a 200 with a fake sid if this kind of error is
        # met, without sending anything.
        # This is a compromise between sending an email, which could be a spam vector,
        # and letting the client know which email address is bound to an account and
        # which one isn't.
        self.request_token_inhibit_3pid_errors = config.get(
            "request_token_inhibit_3pid_errors",
            False,
        )

        # Whitelist of domain names that given next_link parameters must have
        next_link_domain_whitelist: Optional[List[str]] = config.get(
            "next_link_domain_whitelist"
        )

        self.next_link_domain_whitelist: Optional[Set[str]] = None
        if next_link_domain_whitelist is not None:
            if not isinstance(next_link_domain_whitelist, list):
                raise ConfigError("'next_link_domain_whitelist' must be a list")

            # Turn the list into a set to improve lookup speed.
            self.next_link_domain_whitelist = set(next_link_domain_whitelist)

        templates_config = config.get("templates") or {}
        if not isinstance(templates_config, dict):
            raise ConfigError("The 'templates' section must be a dictionary")

        self.custom_template_directory: Optional[str] = templates_config.get(
            "custom_template_directory"
        )
        if self.custom_template_directory is not None and not isinstance(
            self.custom_template_directory, str
        ):
            raise ConfigError("'custom_template_directory' must be a string")

        self.use_account_validity_in_account_status: bool = (
            config.get("use_account_validity_in_account_status") or False
        )

        self.rooms_to_exclude_from_sync: List[str] = (
            config.get("exclude_rooms_from_sync") or []
        )

        delete_stale_devices_after: Optional[str] = (
            config.get("delete_stale_devices_after") or None
        )

        if delete_stale_devices_after is not None:
            self.delete_stale_devices_after: Optional[int] = self.parse_duration(
                delete_stale_devices_after
            )
        else:
            self.delete_stale_devices_after = None
Пример #20
0
    def _listener_http(self, config: HomeServerConfig,
                       listener_config: ListenerConfig):
        port = listener_config.port
        bind_addresses = listener_config.bind_addresses
        tls = listener_config.tls
        site_tag = listener_config.http_options.tag
        if site_tag is None:
            site_tag = str(port)

        # We always include a health resource.
        resources = {"/health": HealthResource()}

        for res in listener_config.http_options.resources:
            for name in res.names:
                if name == "openid" and "federation" in res.names:
                    # Skip loading openid resource if federation is defined
                    # since federation resource will include openid
                    continue
                resources.update(
                    self._configure_named_resource(name, res.compress))

        additional_resources = listener_config.http_options.additional_resources
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = self.get_module_api()
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(
                resmodule,
                ("listeners", site_tag, "additional_resources", "<%s>" %
                 (path, )),
            )
            handler = handler_cls(config, module_api)
            if IResource.providedBy(handler):
                resource = handler
            elif hasattr(handler, "handle_request"):
                resource = AdditionalResource(self, handler.handle_request)
            else:
                raise ConfigError(
                    "additional_resource %s does not implement a known interface"
                    % (resmodule["module"], ))
            resources[path] = resource

        # try to find something useful to redirect '/' to
        if WEB_CLIENT_PREFIX in resources:
            root_resource = RootOptionsRedirectResource(WEB_CLIENT_PREFIX)
        elif STATIC_PREFIX in resources:
            root_resource = RootOptionsRedirectResource(STATIC_PREFIX)
        else:
            root_resource = OptionsResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            ports = listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag, ),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d (TLS)", port)

        else:
            ports = listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag, ),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d", port)

        return ports
Пример #21
0
def _parse_oidc_config_dict(
        oidc_config: JsonDict,
        config_path: Tuple[str, ...]) -> "OidcProviderConfig":
    """Take the configuration dict and parse it into an OidcProviderConfig

    Raises:
        ConfigError if the configuration is malformed.
    """
    ump_config = oidc_config.get("user_mapping_provider", {})
    ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
    if ump_config.get("module") == LEGACY_USER_MAPPING_PROVIDER:
        ump_config["module"] = DEFAULT_USER_MAPPING_PROVIDER
    ump_config.setdefault("config", {})

    (
        user_mapping_provider_class,
        user_mapping_provider_config,
    ) = load_module(ump_config, config_path + ("user_mapping_provider", ))

    # Ensure loaded user mapping module has defined all necessary methods
    required_methods = [
        "get_remote_user_id",
        "map_user_attributes",
    ]
    missing_methods = [
        method for method in required_methods
        if not hasattr(user_mapping_provider_class, method)
    ]
    if missing_methods:
        raise ConfigError(
            "Class %s is missing required "
            "methods: %s" % (
                user_mapping_provider_class,
                ", ".join(missing_methods),
            ),
            config_path + ("user_mapping_provider", "module"),
        )

    idp_id = oidc_config.get("idp_id", "oidc")

    # prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
    # clashes with other mechs (such as SAML, CAS).
    #
    # We allow "oidc" as an exception so that people migrating from old-style
    # "oidc_config" format (which has long used "oidc" as its idp_id) can migrate to
    # a new-style "oidc_providers" entry without changing the idp_id for their provider
    # (and thereby invalidating their user_external_ids data).

    if idp_id != "oidc":
        idp_id = "oidc-" + idp_id

    # MSC2858 also specifies that the idp_icon must be a valid MXC uri
    idp_icon = oidc_config.get("idp_icon")
    if idp_icon is not None:
        try:
            parse_and_validate_mxc_uri(idp_icon)
        except ValueError as e:
            raise ConfigError("idp_icon must be a valid MXC URI",
                              config_path + ("idp_icon", )) from e

    client_secret_jwt_key_config = oidc_config.get("client_secret_jwt_key")
    client_secret_jwt_key: Optional[OidcProviderClientSecretJwtKey] = None
    if client_secret_jwt_key_config is not None:
        keyfile = client_secret_jwt_key_config.get("key_file")
        if keyfile:
            key = read_file(keyfile, config_path + ("client_secret_jwt_key", ))
        else:
            key = client_secret_jwt_key_config["key"]
        client_secret_jwt_key = OidcProviderClientSecretJwtKey(
            key=key,
            jwt_header=client_secret_jwt_key_config["jwt_header"],
            jwt_payload=client_secret_jwt_key_config.get("jwt_payload", {}),
        )
    # parse attribute_requirements from config (list of dicts) into a list of SsoAttributeRequirement
    attribute_requirements = [
        SsoAttributeRequirement(**x)
        for x in oidc_config.get("attribute_requirements", [])
    ]

    return OidcProviderConfig(
        idp_id=idp_id,
        idp_name=oidc_config.get("idp_name", "OIDC"),
        idp_icon=idp_icon,
        idp_brand=oidc_config.get("idp_brand"),
        discover=oidc_config.get("discover", True),
        issuer=oidc_config["issuer"],
        client_id=oidc_config["client_id"],
        client_secret=oidc_config.get("client_secret"),
        client_secret_jwt_key=client_secret_jwt_key,
        client_auth_method=oidc_config.get("client_auth_method",
                                           "client_secret_basic"),
        scopes=oidc_config.get("scopes", ["openid"]),
        authorization_endpoint=oidc_config.get("authorization_endpoint"),
        token_endpoint=oidc_config.get("token_endpoint"),
        userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
        jwks_uri=oidc_config.get("jwks_uri"),
        skip_verification=oidc_config.get("skip_verification", False),
        user_profile_method=oidc_config.get("user_profile_method", "auto"),
        allow_existing_users=oidc_config.get("allow_existing_users", False),
        user_mapping_provider_class=user_mapping_provider_class,
        user_mapping_provider_config=user_mapping_provider_config,
        attribute_requirements=attribute_requirements,
    )
Пример #22
0
    def _listener_http(self, config, listener_config):
        port = listener_config["port"]
        bind_addresses = listener_config["bind_addresses"]
        tls = listener_config.get("tls", False)
        site_tag = listener_config.get("tag", port)

        resources = {}
        for res in listener_config["resources"]:
            for name in res["names"]:
                if name == "openid" and "federation" in res["names"]:
                    # Skip loading openid resource if federation is defined
                    # since federation resource will include openid
                    continue
                resources.update(
                    self._configure_named_resource(
                        name,
                        res.get("compress", False),
                    ))

        additional_resources = listener_config.get("additional_resources", {})
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = ModuleApi(self, self.get_auth_handler())
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(resmodule)
            handler = handler_cls(config, module_api)
            resources[path] = AdditionalResource(self, handler.handle_request)

        # try to find something useful to redirect '/' to
        if WEB_CLIENT_PREFIX in resources:
            root_resource = RootRedirect(WEB_CLIENT_PREFIX)
        elif STATIC_PREFIX in resources:
            root_resource = RootRedirect(STATIC_PREFIX)
        else:
            root_resource = NoResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            ports = listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag, ),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d (TLS)", port)

        else:
            ports = listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag, ),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d", port)

        return ports
    def read_config(self, config):
        self.spam_checker = None

        provider = config.get("spam_checker", None)
        if provider is not None:
            self.spam_checker = load_module(provider)
Пример #24
0
    def read_config(self, config):
        self.max_upload_size = self.parse_size(config["max_upload_size"])
        self.max_image_pixels = self.parse_size(config["max_image_pixels"])
        self.max_spider_size = self.parse_size(config["max_spider_size"])

        self.media_store_path = self.ensure_directory(
            config["media_store_path"])

        backup_media_store_path = config.get("backup_media_store_path")

        synchronous_backup_media_store = config.get(
            "synchronous_backup_media_store", False)

        storage_providers = config.get("media_storage_providers", [])

        if backup_media_store_path:
            if storage_providers:
                raise ConfigError(
                    "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                )

            storage_providers = [{
                "module": "file_system",
                "store_local": True,
                "store_synchronous": synchronous_backup_media_store,
                "store_remote": True,
                "config": {
                    "directory": backup_media_store_path,
                }
            }]

        # This is a list of config that can be used to create the storage
        # providers. The entries are tuples of (Class, class_config,
        # MediaStorageProviderConfig), where Class is the class of the provider,
        # the class_config the config to pass to it, and
        # MediaStorageProviderConfig are options for StorageProviderWrapper.
        #
        # We don't create the storage providers here as not all workers need
        # them to be started.
        self.media_storage_providers = []

        for provider_config in storage_providers:
            # We special case the module "file_system" so as not to need to
            # expose FileStorageProviderBackend
            if provider_config["module"] == "file_system":
                provider_config["module"] = (
                    "synapse.rest.media.v1.storage_provider"
                    ".FileStorageProviderBackend")

            provider_class, parsed_config = load_module(provider_config)

            wrapper_config = MediaStorageProviderConfig(
                provider_config.get("store_local", False),
                provider_config.get("store_remote", False),
                provider_config.get("store_synchronous", False),
            )

            self.media_storage_providers.append((
                provider_class,
                parsed_config,
                wrapper_config,
            ))

        self.uploads_path = self.ensure_directory(config["uploads_path"])
        self.dynamic_thumbnails = config["dynamic_thumbnails"]
        self.thumbnail_requirements = parse_thumbnail_requirements(
            config["thumbnail_sizes"])
        self.url_preview_enabled = config.get("url_preview_enabled", False)
        if self.url_preview_enabled:
            try:
                import lxml
                lxml  # To stop unused lint.
            except ImportError:
                raise ConfigError(MISSING_LXML)

            try:
                from netaddr import IPSet
            except ImportError:
                raise ConfigError(MISSING_NETADDR)

            if "url_preview_ip_range_blacklist" in config:
                self.url_preview_ip_range_blacklist = IPSet(
                    config["url_preview_ip_range_blacklist"])
            else:
                raise ConfigError(
                    "For security, you must specify an explicit target IP address "
                    "blacklist in url_preview_ip_range_blacklist for url previewing "
                    "to work")

            self.url_preview_ip_range_whitelist = IPSet(
                config.get("url_preview_ip_range_whitelist", ()))

            self.url_preview_url_blacklist = config.get(
                "url_preview_url_blacklist", ())
Пример #25
0
    def read_config(self, config, **kwargs):

        # Only enable the media repo if either the media repo is enabled or the
        # current worker app is the media repo.
        if (self.enable_media_repo is False and
                config.get("worker_app") != "synapse.app.media_repository"):
            self.can_load_media_repo = False
            return
        else:
            self.can_load_media_repo = True

        # Whether this instance should be the one to run the background jobs to
        # e.g clean up old URL previews.
        self.media_instance_running_background_jobs = config.get(
            "media_instance_running_background_jobs", )

        self.max_upload_size = self.parse_size(
            config.get("max_upload_size", "50M"))
        self.max_image_pixels = self.parse_size(
            config.get("max_image_pixels", "32M"))
        self.max_spider_size = self.parse_size(
            config.get("max_spider_size", "10M"))

        self.media_store_path = self.ensure_directory(
            config.get("media_store_path", "media_store"))

        backup_media_store_path = config.get("backup_media_store_path")

        synchronous_backup_media_store = config.get(
            "synchronous_backup_media_store", False)

        storage_providers = config.get("media_storage_providers", [])

        if backup_media_store_path:
            if storage_providers:
                raise ConfigError(
                    "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                )

            storage_providers = [{
                "module": "file_system",
                "store_local": True,
                "store_synchronous": synchronous_backup_media_store,
                "store_remote": True,
                "config": {
                    "directory": backup_media_store_path
                },
            }]

        # This is a list of config that can be used to create the storage
        # providers. The entries are tuples of (Class, class_config,
        # MediaStorageProviderConfig), where Class is the class of the provider,
        # the class_config the config to pass to it, and
        # MediaStorageProviderConfig are options for StorageProviderWrapper.
        #
        # We don't create the storage providers here as not all workers need
        # them to be started.
        self.media_storage_providers = []  # type: List[tuple]

        for i, provider_config in enumerate(storage_providers):
            # We special case the module "file_system" so as not to need to
            # expose FileStorageProviderBackend
            if provider_config["module"] == "file_system":
                provider_config["module"] = (
                    "synapse.rest.media.v1.storage_provider"
                    ".FileStorageProviderBackend")

            provider_class, parsed_config = load_module(
                provider_config, ("media_storage_providers", "<item %i>" % i))

            wrapper_config = MediaStorageProviderConfig(
                provider_config.get("store_local", False),
                provider_config.get("store_remote", False),
                provider_config.get("store_synchronous", False),
            )

            self.media_storage_providers.append(
                (provider_class, parsed_config, wrapper_config))

        self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
        self.thumbnail_requirements = parse_thumbnail_requirements(
            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES))
        self.url_preview_enabled = config.get("url_preview_enabled", False)
        if self.url_preview_enabled:
            try:
                check_requirements("url_preview")

            except DependencyException as e:
                raise ConfigError(e.message)

            if "url_preview_ip_range_blacklist" not in config:
                raise ConfigError(
                    "For security, you must specify an explicit target IP address "
                    "blacklist in url_preview_ip_range_blacklist for url previewing "
                    "to work")

            # we always blacklist '0.0.0.0' and '::', which are supposed to be
            # unroutable addresses.
            self.url_preview_ip_range_blacklist = generate_ip_set(
                config["url_preview_ip_range_blacklist"],
                ["0.0.0.0", "::"],
                config_path=("url_preview_ip_range_blacklist", ),
            )

            self.url_preview_ip_range_whitelist = generate_ip_set(
                config.get("url_preview_ip_range_whitelist", ()),
                config_path=("url_preview_ip_range_whitelist", ),
            )

            self.url_preview_url_blacklist = config.get(
                "url_preview_url_blacklist", ())

            self.url_preview_accept_language = config.get(
                "url_preview_accept_language") or ["en"]
Пример #26
0
    def read_config(self, config):
        self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
        self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
        self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))

        self.media_store_path = self.ensure_directory(config["media_store_path"])

        backup_media_store_path = config.get("backup_media_store_path")

        synchronous_backup_media_store = config.get(
            "synchronous_backup_media_store", False
        )

        storage_providers = config.get("media_storage_providers", [])

        if backup_media_store_path:
            if storage_providers:
                raise ConfigError(
                    "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                )

            storage_providers = [{
                "module": "file_system",
                "store_local": True,
                "store_synchronous": synchronous_backup_media_store,
                "store_remote": True,
                "config": {
                    "directory": backup_media_store_path,
                }
            }]

        # This is a list of config that can be used to create the storage
        # providers. The entries are tuples of (Class, class_config,
        # MediaStorageProviderConfig), where Class is the class of the provider,
        # the class_config the config to pass to it, and
        # MediaStorageProviderConfig are options for StorageProviderWrapper.
        #
        # We don't create the storage providers here as not all workers need
        # them to be started.
        self.media_storage_providers = []

        for provider_config in storage_providers:
            # We special case the module "file_system" so as not to need to
            # expose FileStorageProviderBackend
            if provider_config["module"] == "file_system":
                provider_config["module"] = (
                    "synapse.rest.media.v1.storage_provider"
                    ".FileStorageProviderBackend"
                )

            provider_class, parsed_config = load_module(provider_config)

            wrapper_config = MediaStorageProviderConfig(
                provider_config.get("store_local", False),
                provider_config.get("store_remote", False),
                provider_config.get("store_synchronous", False),
            )

            self.media_storage_providers.append(
                (provider_class, parsed_config, wrapper_config,)
            )

        self.uploads_path = self.ensure_directory(config["uploads_path"])
        self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
        self.thumbnail_requirements = parse_thumbnail_requirements(
            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES),
        )
        self.url_preview_enabled = config.get("url_preview_enabled", False)
        if self.url_preview_enabled:
            try:
                import lxml
                lxml  # To stop unused lint.
            except ImportError:
                raise ConfigError(MISSING_LXML)

            try:
                from netaddr import IPSet
            except ImportError:
                raise ConfigError(MISSING_NETADDR)

            if "url_preview_ip_range_blacklist" not in config:
                raise ConfigError(
                    "For security, you must specify an explicit target IP address "
                    "blacklist in url_preview_ip_range_blacklist for url previewing "
                    "to work"
                )

            self.url_preview_ip_range_blacklist = IPSet(
                config["url_preview_ip_range_blacklist"]
            )

            # we always blacklist '0.0.0.0' and '::', which are supposed to be
            # unroutable addresses.
            self.url_preview_ip_range_blacklist.update(['0.0.0.0', '::'])

            self.url_preview_ip_range_whitelist = IPSet(
                config.get("url_preview_ip_range_whitelist", ())
            )

            self.url_preview_url_blacklist = config.get(
                "url_preview_url_blacklist", ()
            )
Пример #27
0
    def _listener_http(self, config: HomeServerConfig,
                       listener_config: ListenerConfig) -> Iterable[Port]:
        port = listener_config.port
        bind_addresses = listener_config.bind_addresses
        tls = listener_config.tls
        # Must exist since this is an HTTP listener.
        assert listener_config.http_options is not None
        site_tag = listener_config.http_options.tag
        if site_tag is None:
            site_tag = str(port)

        # We always include a health resource.
        resources: Dict[str, Resource] = {"/health": HealthResource()}

        for res in listener_config.http_options.resources:
            for name in res.names:
                if name == "openid" and "federation" in res.names:
                    # Skip loading openid resource if federation is defined
                    # since federation resource will include openid
                    continue
                resources.update(
                    self._configure_named_resource(name, res.compress))

        additional_resources = listener_config.http_options.additional_resources
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = self.get_module_api()
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(
                resmodule,
                ("listeners", site_tag, "additional_resources", "<%s>" %
                 (path, )),
            )
            handler = handler_cls(config, module_api)
            if isinstance(handler, Resource):
                resource = handler
            elif hasattr(handler, "handle_request"):
                resource = AdditionalResource(self, handler.handle_request)
            else:
                raise ConfigError(
                    "additional_resource %s does not implement a known interface"
                    % (resmodule["module"], ))
            resources[path] = resource

        # Attach additional resources registered by modules.
        resources.update(self._module_web_resources)
        self._module_web_resources_consumed = True

        # Try to find something useful to serve at '/':
        #
        # 1. Redirect to the web client if it is an HTTP(S) URL.
        # 2. Redirect to the static "Synapse is running" page.
        # 3. Do not redirect and use a blank resource.
        if self.config.server.web_client_location:
            root_resource: Resource = RootOptionsRedirectResource(
                self.config.server.web_client_location)
        elif STATIC_PREFIX in resources:
            root_resource = RootOptionsRedirectResource(STATIC_PREFIX)
        else:
            root_resource = OptionsResource()

        site = SynapseSite(
            "synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
            site_tag,
            listener_config,
            create_resource_tree(resources, root_resource),
            self.version_string,
            max_request_body_size=max_request_body_size(self.config),
            reactor=self.get_reactor(),
        )

        if tls:
            # refresh_certificate should have been called before this.
            assert self.tls_server_context_factory is not None
            ports = listen_ssl(
                bind_addresses,
                port,
                site,
                self.tls_server_context_factory,
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d (TLS)", port)

        else:
            ports = listen_tcp(
                bind_addresses,
                port,
                site,
                reactor=self.get_reactor(),
            )
            logger.info("Synapse now listening on TCP port %d", port)

        return ports
Пример #28
0
def _parse_oidc_config_dict(
        oidc_config: JsonDict,
        config_path: Tuple[str, ...]) -> "OidcProviderConfig":
    """Take the configuration dict and parse it into an OidcProviderConfig

    Raises:
        ConfigError if the configuration is malformed.
    """
    ump_config = oidc_config.get("user_mapping_provider", {})
    ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
    ump_config.setdefault("config", {})

    (
        user_mapping_provider_class,
        user_mapping_provider_config,
    ) = load_module(ump_config, config_path + ("user_mapping_provider", ))

    # Ensure loaded user mapping module has defined all necessary methods
    required_methods = [
        "get_remote_user_id",
        "map_user_attributes",
    ]
    missing_methods = [
        method for method in required_methods
        if not hasattr(user_mapping_provider_class, method)
    ]
    if missing_methods:
        raise ConfigError(
            "Class %s is missing required "
            "methods: %s" % (
                user_mapping_provider_class,
                ", ".join(missing_methods),
            ),
            config_path + ("user_mapping_provider", "module"),
        )

    # MSC2858 will apply certain limits in what can be used as an IdP id, so let's
    # enforce those limits now.
    # TODO: factor out this stuff to a generic function
    idp_id = oidc_config.get("idp_id", "oidc")

    # TODO: update this validity check based on what MSC2858 decides.
    valid_idp_chars = set(string.ascii_lowercase + string.digits + "-._")

    if any(c not in valid_idp_chars for c in idp_id):
        raise ConfigError(
            'idp_id may only contain a-z, 0-9, "-", ".", "_"',
            config_path + ("idp_id", ),
        )

    if idp_id[0] not in string.ascii_lowercase:
        raise ConfigError(
            "idp_id must start with a-z",
            config_path + ("idp_id", ),
        )

    # prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
    # clashes with other mechs (such as SAML, CAS).
    #
    # We allow "oidc" as an exception so that people migrating from old-style
    # "oidc_config" format (which has long used "oidc" as its idp_id) can migrate to
    # a new-style "oidc_providers" entry without changing the idp_id for their provider
    # (and thereby invalidating their user_external_ids data).

    if idp_id != "oidc":
        idp_id = "oidc-" + idp_id

    # MSC2858 also specifies that the idp_icon must be a valid MXC uri
    idp_icon = oidc_config.get("idp_icon")
    if idp_icon is not None:
        try:
            parse_and_validate_mxc_uri(idp_icon)
        except ValueError as e:
            raise ConfigError("idp_icon must be a valid MXC URI",
                              config_path + ("idp_icon", )) from e

    return OidcProviderConfig(
        idp_id=idp_id,
        idp_name=oidc_config.get("idp_name", "OIDC"),
        idp_icon=idp_icon,
        discover=oidc_config.get("discover", True),
        issuer=oidc_config["issuer"],
        client_id=oidc_config["client_id"],
        client_secret=oidc_config["client_secret"],
        client_auth_method=oidc_config.get("client_auth_method",
                                           "client_secret_basic"),
        scopes=oidc_config.get("scopes", ["openid"]),
        authorization_endpoint=oidc_config.get("authorization_endpoint"),
        token_endpoint=oidc_config.get("token_endpoint"),
        userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
        jwks_uri=oidc_config.get("jwks_uri"),
        skip_verification=oidc_config.get("skip_verification", False),
        user_profile_method=oidc_config.get("user_profile_method", "auto"),
        allow_existing_users=oidc_config.get("allow_existing_users", False),
        user_mapping_provider_class=user_mapping_provider_class,
        user_mapping_provider_config=user_mapping_provider_config,
    )
Пример #29
0
    def _listener_http(self, config, listener_config):
        port = listener_config["port"]
        bind_addresses = listener_config["bind_addresses"]
        tls = listener_config.get("tls", False)
        site_tag = listener_config.get("tag", port)

        if tls and config.no_tls:
            raise ConfigError(
                "Listener on port %i has TLS enabled, but no_tls is set" % (port,),
            )

        resources = {}
        for res in listener_config["resources"]:
            for name in res["names"]:
                resources.update(self._configure_named_resource(
                    name, res.get("compress", False),
                ))

        additional_resources = listener_config.get("additional_resources", {})
        logger.debug("Configuring additional resources: %r",
                     additional_resources)
        module_api = ModuleApi(self, self.get_auth_handler())
        for path, resmodule in additional_resources.items():
            handler_cls, config = load_module(resmodule)
            handler = handler_cls(config, module_api)
            resources[path] = AdditionalResource(self, handler.handle_request)

        # try to find something useful to redirect '/' to
        if WEB_CLIENT_PREFIX in resources:
            root_resource = RootRedirect(WEB_CLIENT_PREFIX)
        elif STATIC_PREFIX in resources:
            root_resource = RootRedirect(STATIC_PREFIX)
        else:
            root_resource = NoResource()

        root_resource = create_resource_tree(resources, root_resource)

        if tls:
            return listen_ssl(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.https.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                ),
                self.tls_server_context_factory,
            )

        else:
            return listen_tcp(
                bind_addresses,
                port,
                SynapseSite(
                    "synapse.access.http.%s" % (site_tag,),
                    site_tag,
                    listener_config,
                    root_resource,
                    self.version_string,
                )
            )