Example #1
0
def _check_resource_config(listeners):
    resource_names = set(
        res_name
        for listener in listeners
        for res in listener.get("resources", [])
        for res_name in res.get("names", [])
    )

    for resource in resource_names:
        if resource not in KNOWN_RESOURCES:
            raise ConfigError(
                "Unknown listener resource '%s'" % (resource, )
            )
        if resource == "consent":
            try:
                check_requirements('resources.consent')
            except DependencyException as e:
                raise ConfigError(e.message)
Example #2
0
def main():
    with LoggingContext("main"):
        # check base requirements
        check_requirements()
        hs = setup(sys.argv[1:])
        run(hs)
Example #3
0
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import sys

from synapse import python_dependencies  # noqa: E402

sys.dont_write_bytecode = True

try:
    python_dependencies.check_requirements()
except python_dependencies.DependencyException as e:
    sys.stderr.writelines(e.message)
    sys.exit(1)
Example #4
0
    def read_config(self, config, **kwargs):

        # Only enable the media repo if either the media repo is enabled or the
        # current worker app is the media repo.
        if (self.enable_media_repo is False and
                config.get("worker_app") != "synapse.app.media_repository"):
            self.can_load_media_repo = False
            return
        else:
            self.can_load_media_repo = True

        # Whether this instance should be the one to run the background jobs to
        # e.g clean up old URL previews.
        self.media_instance_running_background_jobs = config.get(
            "media_instance_running_background_jobs", )

        self.max_upload_size = self.parse_size(
            config.get("max_upload_size", "50M"))
        self.max_image_pixels = self.parse_size(
            config.get("max_image_pixels", "32M"))
        self.max_spider_size = self.parse_size(
            config.get("max_spider_size", "10M"))

        self.media_store_path = self.ensure_directory(
            config.get("media_store_path", "media_store"))

        backup_media_store_path = config.get("backup_media_store_path")

        synchronous_backup_media_store = config.get(
            "synchronous_backup_media_store", False)

        storage_providers = config.get("media_storage_providers", [])

        if backup_media_store_path:
            if storage_providers:
                raise ConfigError(
                    "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                )

            storage_providers = [{
                "module": "file_system",
                "store_local": True,
                "store_synchronous": synchronous_backup_media_store,
                "store_remote": True,
                "config": {
                    "directory": backup_media_store_path
                },
            }]

        # This is a list of config that can be used to create the storage
        # providers. The entries are tuples of (Class, class_config,
        # MediaStorageProviderConfig), where Class is the class of the provider,
        # the class_config the config to pass to it, and
        # MediaStorageProviderConfig are options for StorageProviderWrapper.
        #
        # We don't create the storage providers here as not all workers need
        # them to be started.
        self.media_storage_providers = []  # type: List[tuple]

        for i, provider_config in enumerate(storage_providers):
            # We special case the module "file_system" so as not to need to
            # expose FileStorageProviderBackend
            if provider_config["module"] == "file_system":
                provider_config["module"] = (
                    "synapse.rest.media.v1.storage_provider"
                    ".FileStorageProviderBackend")

            provider_class, parsed_config = load_module(
                provider_config, ("media_storage_providers", "<item %i>" % i))

            wrapper_config = MediaStorageProviderConfig(
                provider_config.get("store_local", False),
                provider_config.get("store_remote", False),
                provider_config.get("store_synchronous", False),
            )

            self.media_storage_providers.append(
                (provider_class, parsed_config, wrapper_config))

        self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
        self.thumbnail_requirements = parse_thumbnail_requirements(
            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES))
        self.url_preview_enabled = config.get("url_preview_enabled", False)
        if self.url_preview_enabled:
            try:
                check_requirements("url_preview")

            except DependencyException as e:
                raise ConfigError(e.message)

            if "url_preview_ip_range_blacklist" not in config:
                raise ConfigError(
                    "For security, you must specify an explicit target IP address "
                    "blacklist in url_preview_ip_range_blacklist for url previewing "
                    "to work")

            # we always blacklist '0.0.0.0' and '::', which are supposed to be
            # unroutable addresses.
            self.url_preview_ip_range_blacklist = generate_ip_set(
                config["url_preview_ip_range_blacklist"],
                ["0.0.0.0", "::"],
                config_path=("url_preview_ip_range_blacklist", ),
            )

            self.url_preview_ip_range_whitelist = generate_ip_set(
                config.get("url_preview_ip_range_whitelist", ()),
                config_path=("url_preview_ip_range_whitelist", ),
            )

            self.url_preview_url_blacklist = config.get(
                "url_preview_url_blacklist", ())

            self.url_preview_accept_language = config.get(
                "url_preview_accept_language") or ["en"]
Example #5
0
def main():
    with LoggingContext("main"):
        check_requirements()
        setup()
Example #6
0
def setup():
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        sys.argv[1:],
        generate_section="Homeserver"
    )

    config.setup_logging()

    check_requirements()

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    if re.search(":[0-9]+$", config.server_name):
        domain_with_port = config.server_name
    else:
        domain_with_port = "%s:%s" % (config.server_name, config.bind_port)

    tls_context_factory = context_factory.ServerContextFactory(config)

    hs = SynapseHomeServer(
        config.server_name,
        domain_with_port=domain_with_port,
        upload_dir=os.path.abspath("uploads"),
        db_name=config.database_path,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
    )

    hs.create_resource_tree(
        web_client=config.webclient,
        redirect_root_to_web_client=True,
    )

    db_name = hs.get_db_name()

    logger.info("Preparing database: %s...", db_name)

    try:
        with sqlite3.connect(db_name) as db_conn:
            prepare_database(db_conn)
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %s.", db_name)

    if config.manhole:
        f = twisted.manhole.telnet.ShellFactory()
        f.username = "******"
        f.password = "******"
        f.namespace['hs'] = hs
        reactor.listenTCP(config.manhole, f, interface='127.0.0.1')

    bind_port = config.bind_port
    if config.no_tls:
        bind_port = None

    hs.start_listening(bind_port, config.unsecure_port)

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    if config.daemonize:
        print config.pid_file
        daemon = Daemonize(
            app="synapse-homeserver",
            pid=config.pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )

        daemon.start()
    else:
        reactor.run()
Example #7
0
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.
        should_run (bool): Whether to start the reactor.

    Returns:
        HomeServer
    """
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        config_options,
        generate_section="Homeserver"
    )

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config["name"])
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %r...", config.database_config)

    try:
        db_conn = database_engine.module.connect(
            **{
                k: v for k, v in config.database_config.get("args", {}).items()
                if not k.startswith("cp_")
            }
        )

        database_engine.prepare_database(db_conn)
        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %r.", config.database_config)

    hs.start_listening()

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    return hs
Example #8
0
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.

    Returns:
        HomeServer
    """
    try:
        config = HomeServerConfig.load_or_generate_config(
            "Synapse Homeserver",
            config_options,
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        # If a config isn't returned, and an exception isn't raised, we're just
        # generating config files and shouldn't try to continue.
        sys.exit(0)

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string("Synapse", synapse)

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config)
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %s...", config.database_config['name'])

    try:
        db_conn = hs.get_db_conn(run_new_connection=False)
        prepare_database(db_conn, database_engine, config=config)
        database_engine.on_new_connection(db_conn)

        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %s.", config.database_config['name'])

    hs.setup()
    hs.start_listening()

    def start():
        hs.get_pusherpool().start()
        hs.get_state_handler().start_caching()
        hs.get_datastore().start_profiling()
        hs.get_datastore().start_doing_background_updates()
        hs.get_replication_layer().start_get_pdu_cache()

    reactor.callWhenRunning(start)

    return hs
Example #9
0
    def read_config(self, config, **kwargs):
        self.event_cache_size = self.parse_size(
            config.get("event_cache_size", _DEFAULT_EVENT_CACHE_SIZE))
        self.cache_factors: Dict[str, float] = {}

        cache_config = config.get("caches") or {}
        self.global_factor = cache_config.get("global_factor",
                                              properties.default_factor_size)
        if not isinstance(self.global_factor, (int, float)):
            raise ConfigError("caches.global_factor must be a number.")

        # Set the global one so that it's reflected in new caches
        properties.default_factor_size = self.global_factor

        # Load cache factors from the config
        individual_factors = cache_config.get("per_cache_factors") or {}
        if not isinstance(individual_factors, dict):
            raise ConfigError("caches.per_cache_factors must be a dictionary")

        # Canonicalise the cache names *before* updating with the environment
        # variables.
        individual_factors = {
            _canonicalise_cache_name(key): val
            for key, val in individual_factors.items()
        }

        # Override factors from environment if necessary
        individual_factors.update({
            _canonicalise_cache_name(key[len(_CACHE_PREFIX) + 1:]): float(val)
            for key, val in self._environ.items()
            if key.startswith(_CACHE_PREFIX + "_")
        })

        for cache, factor in individual_factors.items():
            if not isinstance(factor, (int, float)):
                raise ConfigError(
                    "caches.per_cache_factors.%s must be a number" % (cache, ))
            self.cache_factors[cache] = factor

        self.track_memory_usage = cache_config.get("track_memory_usage", False)
        if self.track_memory_usage:
            try:
                check_requirements("cache_memory")
            except DependencyException as e:
                raise ConfigError(
                    e.
                    message  # noqa: B306, DependencyException.message is a property
                )

        expiry_time = cache_config.get("expiry_time")
        if expiry_time:
            self.expiry_time_msec = self.parse_duration(expiry_time)
        else:
            self.expiry_time_msec = None

        self.sync_response_cache_duration = self.parse_duration(
            cache_config.get("sync_response_cache_duration", 0))

        # Resize all caches (if necessary) with the new factors we've loaded
        self.resize_all_caches()

        # Store this function so that it can be called from other classes without
        # needing an instance of Config
        properties.resize_all_caches_func = self.resize_all_caches
Example #10
0
    def read_config(self, config, **kwargs):
        self.saml2_enabled = False

        saml2_config = config.get("saml2_config")

        if not saml2_config or not saml2_config.get("enabled", True):
            return

        if not saml2_config.get("sp_config") and not saml2_config.get(
                "config_path"):
            return

        try:
            check_requirements("saml2")
        except DependencyException as e:
            raise ConfigError(
                e.
                message  # noqa: B306, DependencyException.message is a property
            )

        self.saml2_enabled = True

        attribute_requirements = saml2_config.get(
            "attribute_requirements") or []
        self.attribute_requirements = _parse_attribute_requirements_def(
            attribute_requirements)

        self.saml2_grandfathered_mxid_source_attribute = saml2_config.get(
            "grandfathered_mxid_source_attribute", "uid")

        self.saml2_idp_entityid = saml2_config.get("idp_entityid", None)

        # user_mapping_provider may be None if the key is present but has no value
        ump_dict = saml2_config.get("user_mapping_provider") or {}

        # Use the default user mapping provider if not set
        ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
        if ump_dict.get("module") == LEGACY_USER_MAPPING_PROVIDER:
            ump_dict["module"] = DEFAULT_USER_MAPPING_PROVIDER

        # Ensure a config is present
        ump_dict["config"] = ump_dict.get("config") or {}

        if ump_dict["module"] == DEFAULT_USER_MAPPING_PROVIDER:
            # Load deprecated options for use by the default module
            old_mxid_source_attribute = saml2_config.get(
                "mxid_source_attribute")
            if old_mxid_source_attribute:
                logger.warning(
                    "The config option saml2_config.mxid_source_attribute is deprecated. "
                    "Please use saml2_config.user_mapping_provider.config"
                    ".mxid_source_attribute instead.")
                ump_dict["config"][
                    "mxid_source_attribute"] = old_mxid_source_attribute

            old_mxid_mapping = saml2_config.get("mxid_mapping")
            if old_mxid_mapping:
                logger.warning(
                    "The config option saml2_config.mxid_mapping is deprecated. Please "
                    "use saml2_config.user_mapping_provider.config.mxid_mapping instead."
                )
                ump_dict["config"]["mxid_mapping"] = old_mxid_mapping

        # Retrieve an instance of the module's class
        # Pass the config dictionary to the module for processing
        (
            self.saml2_user_mapping_provider_class,
            self.saml2_user_mapping_provider_config,
        ) = load_module(ump_dict, ("saml2_config", "user_mapping_provider"))

        # Ensure loaded user mapping module has defined all necessary methods
        # Note parse_config() is already checked during the call to load_module
        required_methods = [
            "get_saml_attributes",
            "saml_response_to_user_attributes",
            "get_remote_user_id",
        ]
        missing_methods = [
            method for method in required_methods
            if not hasattr(self.saml2_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by saml2_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods), ))

        # Get the desired saml auth response attributes from the module
        saml2_config_dict = self._default_saml_config_dict(
            *self.saml2_user_mapping_provider_class.get_saml_attributes(
                self.saml2_user_mapping_provider_config))
        _dict_merge(merge_dict=saml2_config.get("sp_config", {}),
                    into_dict=saml2_config_dict)

        config_path = saml2_config.get("config_path", None)
        if config_path is not None:
            mod = load_python_module(config_path)
            config = getattr(mod, "CONFIG", None)
            if config is None:
                raise ConfigError(
                    "Config path specified by saml2_config.config_path does not "
                    "have a CONFIG property.")
            _dict_merge(merge_dict=config, into_dict=saml2_config_dict)

        import saml2.config

        self.saml2_sp_config = saml2.config.SPConfig()
        self.saml2_sp_config.load(saml2_config_dict)

        # session lifetime: in milliseconds
        self.saml2_session_lifetime = self.parse_duration(
            saml2_config.get("saml_session_lifetime", "15m"))
Example #11
0
    def read_config(self, config, **kwargs):
        self.oidc_enabled = False

        oidc_config = config.get("oidc_config")

        if not oidc_config or not oidc_config.get("enabled", False):
            return

        try:
            check_requirements("oidc")
        except DependencyException as e:
            raise ConfigError(e.message)

        public_baseurl = self.public_baseurl
        if public_baseurl is None:
            raise ConfigError(
                "oidc_config requires a public_baseurl to be set")
        self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback"

        self.oidc_enabled = True
        self.oidc_discover = oidc_config.get("discover", True)
        self.oidc_issuer = oidc_config["issuer"]
        self.oidc_client_id = oidc_config["client_id"]
        self.oidc_client_secret = oidc_config["client_secret"]
        self.oidc_client_auth_method = oidc_config.get("client_auth_method",
                                                       "client_secret_basic")
        self.oidc_scopes = oidc_config.get("scopes", ["openid"])
        self.oidc_authorization_endpoint = oidc_config.get(
            "authorization_endpoint")
        self.oidc_token_endpoint = oidc_config.get("token_endpoint")
        self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
        self.oidc_jwks_uri = oidc_config.get("jwks_uri")
        self.oidc_skip_verification = oidc_config.get("skip_verification",
                                                      False)
        self.oidc_user_profile_method = oidc_config.get(
            "user_profile_method", "auto")
        self.oidc_allow_existing_users = oidc_config.get(
            "allow_existing_users", False)

        ump_config = oidc_config.get("user_mapping_provider", {})
        ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
        ump_config.setdefault("config", {})

        (
            self.oidc_user_mapping_provider_class,
            self.oidc_user_mapping_provider_config,
        ) = load_module(ump_config)

        # Ensure loaded user mapping module has defined all necessary methods
        required_methods = [
            "get_remote_user_id",
            "map_user_attributes",
        ]
        missing_methods = [
            method for method in required_methods
            if not hasattr(self.oidc_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by oidc_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods), ))
Example #12
0
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.
        should_run (bool): Whether to start the reactor.

    Returns:
        HomeServer
    """
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        config_options,
        generate_section="Homeserver"
    )

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    if re.search(":[0-9]+$", config.server_name):
        domain_with_port = config.server_name
    else:
        domain_with_port = "%s:%s" % (config.server_name, config.bind_port)

    tls_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config["name"])
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        domain_with_port=domain_with_port,
        upload_dir=os.path.abspath("uploads"),
        db_config=config.database_config,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    hs.create_resource_tree(
        redirect_root_to_web_client=True,
    )

    logger.info("Preparing database: %r...", config.database_config)

    try:
        db_conn = database_engine.module.connect(
            **{
                k: v for k, v in config.database_config.get("args", {}).items()
                if not k.startswith("cp_")
            }
        )

        database_engine.prepare_database(db_conn)
        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %r.", config.database_config)

    if config.manhole:
        f = twisted.manhole.telnet.ShellFactory()
        f.username = "******"
        f.password = "******"
        f.namespace['hs'] = hs
        reactor.listenTCP(config.manhole, f, interface='127.0.0.1')

    hs.start_listening()

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    return hs
Example #13
0
    def read_config(self, config, **kwargs):
        self.saml2_enabled = False

        saml2_config = config.get("saml2_config")

        if not saml2_config or not saml2_config.get("enabled", True):
            return

        if not saml2_config.get("sp_config") and not saml2_config.get(
                "config_path"):
            return

        try:
            check_requirements("saml2")
        except DependencyException as e:
            raise ConfigError(e.message)

        self.saml2_enabled = True

        self.saml2_grandfathered_mxid_source_attribute = saml2_config.get(
            "grandfathered_mxid_source_attribute", "uid")

        # user_mapping_provider may be None if the key is present but has no value
        ump_dict = saml2_config.get("user_mapping_provider") or {}

        # Use the default user mapping provider if not set
        ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)

        # Ensure a config is present
        ump_dict["config"] = ump_dict.get("config") or {}

        if ump_dict["module"] == DEFAULT_USER_MAPPING_PROVIDER:
            # Load deprecated options for use by the default module
            old_mxid_source_attribute = saml2_config.get(
                "mxid_source_attribute")
            if old_mxid_source_attribute:
                logger.warning(
                    "The config option saml2_config.mxid_source_attribute is deprecated. "
                    "Please use saml2_config.user_mapping_provider.config"
                    ".mxid_source_attribute instead.")
                ump_dict["config"][
                    "mxid_source_attribute"] = old_mxid_source_attribute

            old_mxid_mapping = saml2_config.get("mxid_mapping")
            if old_mxid_mapping:
                logger.warning(
                    "The config option saml2_config.mxid_mapping is deprecated. Please "
                    "use saml2_config.user_mapping_provider.config.mxid_mapping instead."
                )
                ump_dict["config"]["mxid_mapping"] = old_mxid_mapping

        # Retrieve an instance of the module's class
        # Pass the config dictionary to the module for processing
        (
            self.saml2_user_mapping_provider_class,
            self.saml2_user_mapping_provider_config,
        ) = load_module(ump_dict)

        # Ensure loaded user mapping module has defined all necessary methods
        # Note parse_config() is already checked during the call to load_module
        required_methods = [
            "get_saml_attributes",
            "saml_response_to_user_attributes",
            "get_remote_user_id",
        ]
        missing_methods = [
            method for method in required_methods
            if not hasattr(self.saml2_user_mapping_provider_class, method)
        ]
        if missing_methods:
            raise ConfigError(
                "Class specified by saml2_config."
                "user_mapping_provider.module is missing required "
                "methods: %s" % (", ".join(missing_methods), ))

        # Get the desired saml auth response attributes from the module
        saml2_config_dict = self._default_saml_config_dict(
            *self.saml2_user_mapping_provider_class.get_saml_attributes(
                self.saml2_user_mapping_provider_config))
        _dict_merge(merge_dict=saml2_config.get("sp_config", {}),
                    into_dict=saml2_config_dict)

        config_path = saml2_config.get("config_path", None)
        if config_path is not None:
            mod = load_python_module(config_path)
            _dict_merge(merge_dict=mod.CONFIG, into_dict=saml2_config_dict)

        import saml2.config

        self.saml2_sp_config = saml2.config.SPConfig()
        self.saml2_sp_config.load(saml2_config_dict)

        # session lifetime: in milliseconds
        self.saml2_session_lifetime = self.parse_duration(
            saml2_config.get("saml_session_lifetime", "15m"))

        template_dir = saml2_config.get("template_dir")
        if not template_dir:
            template_dir = pkg_resources.resource_filename(
                "synapse",
                "res/templates",
            )

        loader = jinja2.FileSystemLoader(template_dir)
        # enable auto-escape here, to having to remember to escape manually in the
        # template
        env = jinja2.Environment(loader=loader, autoescape=True)
        self.saml2_error_html_template = env.get_template("saml_error.html")
Example #14
0
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.

    Returns:
        HomeServer
    """
    try:
        config = HomeServerConfig.load_config("Synapse Homeserver",
                                              config_options,
                                              generate_section="Homeserver")
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        # If a config isn't returned, and an exception isn't raised, we're just
        # generating config files and shouldn't try to continue.
        sys.exit(0)

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string("Synapse", synapse)

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config)
    config.database_config["args"][
        "cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %s...", config.database_config['name'])

    try:
        db_conn = hs.get_db_conn(run_new_connection=False)
        prepare_database(db_conn, database_engine, config=config)
        database_engine.on_new_connection(db_conn)

        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n")
        sys.exit(1)

    logger.info("Database prepared in %s.", config.database_config['name'])

    hs.setup()
    hs.start_listening()

    def start():
        hs.get_pusherpool().start()
        hs.get_state_handler().start_caching()
        hs.get_datastore().start_profiling()
        hs.get_datastore().start_doing_background_updates()
        hs.get_replication_layer().start_get_pdu_cache()

    reactor.callWhenRunning(start)

    return hs
Example #15
0
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import sys
sys.dont_write_bytecode = True
from synapse.python_dependencies import check_requirements

if __name__ == '__main__':
    check_requirements()

from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
from synapse.storage import (
    are_all_users_on_domain, UpgradeDatabaseException,
)

from synapse.server import HomeServer


from twisted.internet import reactor
from twisted.application import service
from twisted.enterprise import adbapi
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.static import File
from twisted.web.server import Site, GzipEncoderFactory, Request
Example #16
0
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import sys
sys.dont_write_bytecode = True

from synapse import python_dependencies   # noqa: E402

try:
    python_dependencies.check_requirements()
except python_dependencies.MissingRequirementError as e:
    message = "\n".join([
        "Missing Requirement: %s" % (e.message,),
        "To install run:",
        "    pip install --upgrade --force \"%s\"" % (e.dependency,),
        "",
    ])
    sys.stderr.writelines(message)
    sys.exit(1)
Example #17
0
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.
        should_run (bool): Whether to start the reactor.

    Returns:
        HomeServer
    """
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        config_options,
        generate_section="Homeserver"
    )

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config["name"])
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %r...", config.database_config)

    try:
        db_conn = database_engine.module.connect(
            **{
                k: v for k, v in config.database_config.get("args", {}).items()
                if not k.startswith("cp_")
            }
        )

        database_engine.prepare_database(db_conn)
        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %r.", config.database_config)

    hs.start_listening()

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    return hs
Example #18
0
def main():
    with LoggingContext("main"):
        # check base requirements
        check_requirements()
        hs = setup(sys.argv[1:])
        run(hs)
Example #19
0
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import sys
sys.dont_write_bytecode = True
from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS

if __name__ == '__main__':
    check_requirements()

from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
from synapse.storage import (
    are_all_users_on_domain, UpgradeDatabaseException,
)

from synapse.server import HomeServer


from twisted.internet import reactor
from twisted.application import service
from twisted.enterprise import adbapi
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.static import File
from twisted.web.server import Site, GzipEncoderFactory, Request