コード例 #1
0
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.disable_registration = False

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(name,
                        db_pool=db_pool,
                        config=config,
                        version_string="Synapse/tests",
                        database_engine=create_engine("sqlite3"),
                        **kargs)
    else:
        hs = HomeServer(name,
                        db_pool=None,
                        datastore=datastore,
                        config=config,
                        version_string="Synapse/tests",
                        database_engine=create_engine("sqlite3"),
                        **kargs)

    defer.returnValue(hs)
コード例 #2
0
ファイル: utils.py プロジェクト: rrix/synapse
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.disable_registration = False

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(
            name, db_pool=db_pool, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            **kargs
        )
    else:
        hs = HomeServer(
            name, db_pool=None, datastore=datastore, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            **kargs
        )

    defer.returnValue(hs)
コード例 #3
0
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.disable_registration = False
        config.macaroon_secret_key = "not even a little secret"
        config.server_name = "server.under.test"

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(name,
                        db_pool=db_pool,
                        config=config,
                        version_string="Synapse/tests",
                        database_engine=create_engine("sqlite3"),
                        **kargs)
    else:
        hs = HomeServer(name,
                        db_pool=None,
                        datastore=datastore,
                        config=config,
                        version_string="Synapse/tests",
                        database_engine=create_engine("sqlite3"),
                        **kargs)

    # bcrypt is far too slow to be doing in unit tests
    def swap_out_hash_for_testing(old_build_handlers):
        def build_handlers():
            handlers = old_build_handlers()
            auth_handler = handlers.auth_handler
            auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
            auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest(
            ) == h
            return handlers

        return build_handlers

    hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)

    defer.returnValue(hs)
コード例 #4
0
ファイル: synchrotron.py プロジェクト: DoubleMalt/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse synchrotron", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.synchrotron"

    setup_logging(config, use_worker_options=True)

    synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = SynchrotronServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
        application_service_handler=SynchrotronApplicationService(),
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-synchrotron", config)
コード例 #5
0
ファイル: test_base.py プロジェクト: skbaum/synapse
    def setUp(self):
        self.db_pool = Mock(spec=["runInteraction"])
        self.mock_txn = Mock()
        self.mock_conn = Mock(spec_set=["cursor", "rollback", "commit"])
        self.mock_conn.cursor.return_value = self.mock_txn
        self.mock_conn.rollback.return_value = None

        # Our fake runInteraction just runs synchronously inline

        def runInteraction(func, *args, **kwargs):
            return defer.succeed(func(self.mock_txn, *args, **kwargs))

        self.db_pool.runInteraction = runInteraction

        def runWithConnection(func, *args, **kwargs):
            return defer.succeed(func(self.mock_conn, *args, **kwargs))

        self.db_pool.runWithConnection = runWithConnection

        config = Mock()
        config.event_cache_size = 1
        config.database_config = {"name": "sqlite3"}
        hs = HomeServer(
            "test",
            db_pool=self.db_pool,
            config=config,
            database_engine=create_engine(config),
        )

        self.datastore = SQLBaseStore(hs)
コード例 #6
0
ファイル: frontend_proxy.py プロジェクト: matrix-org/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse frontend proxy", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.frontend_proxy"

    assert config.worker_main_http_uri is not None

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = FrontendProxyServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.callWhenRunning(_base.start, ss, config.worker_listeners)

    _base.start_worker_reactor("synapse-frontend-proxy", config)
コード例 #7
0
ファイル: event_creator.py プロジェクト: syamgk/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse event creator",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.event_creator"

    assert config.worker_replication_http_port is not None

    # This should only be done on the user directory worker or the master
    config.update_user_directory = False

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = EventCreatorServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    setup_logging(ss, config, use_worker_options=True)

    ss.setup()
    reactor.addSystemEventTrigger("before", "startup", _base.start, ss,
                                  config.worker_listeners)

    _base.start_worker_reactor("synapse-event-creator", config)
コード例 #8
0
ファイル: frontend_proxy.py プロジェクト: werner291/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse frontend proxy",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.frontend_proxy"

    assert config.worker_main_http_uri is not None

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = FrontendProxyServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.addSystemEventTrigger("before", "startup", _base.start, ss,
                                  config.worker_listeners)

    _base.start_worker_reactor("synapse-frontend-proxy", config)
コード例 #9
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse client reader",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.client_reader"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = ClientReaderServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.callWhenRunning(_base.start, ss, config.worker_listeners)

    _base.start_worker_reactor("synapse-client-reader", config)
コード例 #10
0
    def setUp(self):
        self.db_pool = Mock(spec=["runInteraction"])
        self.mock_txn = Mock()
        self.mock_conn = Mock(spec_set=["cursor", "rollback", "commit"])
        self.mock_conn.cursor.return_value = self.mock_txn
        self.mock_conn.rollback.return_value = None
        # Our fake runInteraction just runs synchronously inline

        def runInteraction(func, *args, **kwargs):
            return defer.succeed(func(self.mock_txn, *args, **kwargs))

        self.db_pool.runInteraction = runInteraction

        def runWithConnection(func, *args, **kwargs):
            return defer.succeed(func(self.mock_conn, *args, **kwargs))

        self.db_pool.runWithConnection = runWithConnection

        config = default_config(name="test", parse=True)
        hs = TestHomeServer("test", config=config)

        sqlite_config = {"name": "sqlite3"}
        engine = create_engine(sqlite_config)
        fake_engine = Mock(wraps=engine)
        fake_engine.can_native_upsert = False
        fake_engine.in_transaction.return_value = False

        db = DatabasePool(Mock(), Mock(config=sqlite_config), fake_engine)
        db._db_pool = self.db_pool

        self.datastore = SQLBaseStore(db, None, hs)
コード例 #11
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse synchrotron",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.synchrotron"

    setup_logging(config, use_worker_options=True)

    synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = SynchrotronServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
        application_service_handler=SynchrotronApplicationService(),
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-synchrotron", config)
コード例 #12
0
ファイル: client_reader.py プロジェクト: matrix-org/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse client reader", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.client_reader"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = ClientReaderServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.callWhenRunning(_base.start, ss, config.worker_listeners)

    _base.start_worker_reactor("synapse-client-reader", config)
コード例 #13
0
ファイル: test_base.py プロジェクト: unfaulty/synapse
    def setUp(self):
        self.db_pool = Mock(spec=["runInteraction"])
        self.mock_txn = Mock()
        self.mock_conn = Mock(spec_set=["cursor"])
        self.mock_conn.cursor.return_value = self.mock_txn
        # Our fake runInteraction just runs synchronously inline

        def runInteraction(func, *args, **kwargs):
            return defer.succeed(func(self.mock_txn, *args, **kwargs))
        self.db_pool.runInteraction = runInteraction

        def runWithConnection(func, *args, **kwargs):
            return defer.succeed(func(self.mock_conn, *args, **kwargs))
        self.db_pool.runWithConnection = runWithConnection

        config = Mock()
        config.event_cache_size = 1
        hs = HomeServer(
            "test",
            db_pool=self.db_pool,
            config=config,
            database_engine=create_engine("sqlite3"),
        )

        self.datastore = SQLBaseStore(hs)
コード例 #14
0
ファイル: test_base.py プロジェクト: matrix-org/synapse
    def setUp(self):
        self.db_pool = Mock(spec=["runInteraction"])
        self.mock_txn = Mock()
        self.mock_conn = Mock(spec_set=["cursor", "rollback", "commit"])
        self.mock_conn.cursor.return_value = self.mock_txn
        self.mock_conn.rollback.return_value = None
        # Our fake runInteraction just runs synchronously inline

        def runInteraction(func, *args, **kwargs):
            return defer.succeed(func(self.mock_txn, *args, **kwargs))

        self.db_pool.runInteraction = runInteraction

        def runWithConnection(func, *args, **kwargs):
            return defer.succeed(func(self.mock_conn, *args, **kwargs))

        self.db_pool.runWithConnection = runWithConnection

        config = Mock()
        config._disable_native_upserts = True
        config.event_cache_size = 1
        config.database_config = {"name": "sqlite3"}
        engine = create_engine(config.database_config)
        fake_engine = Mock(wraps=engine)
        fake_engine.can_native_upsert = False
        hs = TestHomeServer(
            "test", db_pool=self.db_pool, config=config, database_engine=fake_engine
        )

        self.datastore = SQLBaseStore(None, hs)
コード例 #15
0
    def build_db_store(
        self,
        db_config: DatabaseConnectionConfig,
        allow_outdated_version: bool = False,
    ) -> Store:
        """Builds and returns a database store using the provided configuration.

        Args:
            db_config: The database configuration
            allow_outdated_version: True to suppress errors about the database server
                version being too old to run a complete synapse

        Returns:
            The built Store object.
        """
        self.progress.set_state("Preparing %s" % db_config.config["name"])

        engine = create_engine(db_config.config)

        hs = MockHomeserver(self.hs_config)

        with make_conn(db_config, engine, "portdb") as db_conn:
            engine.check_database(
                db_conn, allow_outdated_version=allow_outdated_version)
            prepare_database(db_conn, engine, config=self.hs_config)
            # Type safety: ignore that we're using Mock homeservers here.
            store = Store(DatabasePool(hs, db_config, engine), db_conn,
                          hs)  # type: ignore[arg-type]
            db_conn.commit()

        return store
コード例 #16
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse media repository",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.media_repository"

    if config.enable_media_repo:
        _base.quit_with_error(
            "enable_media_repo must be disabled in the main synapse process\n"
            "before the media repo can be run in a separate worker.\n"
            "Please add ``enable_media_repo: false`` to the main config\n")

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = MediaRepositoryServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.callWhenRunning(_base.start, ss, config.worker_listeners)

    _base.start_worker_reactor("synapse-media-repository", config)
コード例 #17
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse synchrotron",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.synchrotron"

    setup_logging(config, use_worker_options=True)

    synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    ss = SynchrotronServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
        application_service_handler=SynchrotronApplicationService(),
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def run():
        # make sure that we run the reactor with the sentinel log context,
        # otherwise other PreserveLoggingContext instances will get confused
        # and complain when they see the logcontext arbitrarily swapping
        # between the sentinel and `run` logcontexts.
        with PreserveLoggingContext():
            logger.info("Running")
            change_resource_limit(config.soft_file_limit)
            if config.gc_thresholds:
                gc.set_threshold(*config.gc_thresholds)
            reactor.run()

    def start():
        ss.get_datastore().start_profiling()
        ss.replicate()
        ss.get_state_handler().start_caching()

    reactor.callWhenRunning(start)

    if config.worker_daemonize:
        daemon = Daemonize(
            app="synapse-synchrotron",
            pid=config.worker_pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )
        daemon.start()
    else:
        run()
コード例 #18
0
ファイル: utils.py プロジェクト: Xe/synapse
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.disable_registration = False
        config.macaroon_secret_key = "not even a little secret"
        config.server_name = "server.under.test"

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(
            name, db_pool=db_pool, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            **kargs
        )
    else:
        hs = HomeServer(
            name, db_pool=None, datastore=datastore, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            **kargs
        )

    # bcrypt is far too slow to be doing in unit tests
    def swap_out_hash_for_testing(old_build_handlers):
        def build_handlers():
            handlers = old_build_handlers()
            auth_handler = handlers.auth_handler
            auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
            auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
            return handlers
        return build_handlers

    hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)

    defer.returnValue(hs)
コード例 #19
0
    def __init__(self, main_store_class, hs):
        # Note we pass in the main store class here as workers use a different main
        # store.

        self.databases = []
        self.main = None
        self.state = None

        for database_config in hs.config.database.databases:
            db_name = database_config.name
            engine = create_engine(database_config.config)

            with make_conn(database_config, engine) as db_conn:
                logger.info("Preparing database %r...", db_name)

                engine.check_database(db_conn.cursor())
                prepare_database(
                    db_conn,
                    engine,
                    hs.config,
                    data_stores=database_config.data_stores,
                )

                database = Database(hs, database_config, engine)

                if "main" in database_config.data_stores:
                    logger.info("Starting 'main' data store")

                    # Sanity check we don't try and configure the main store on
                    # multiple databases.
                    if self.main:
                        raise Exception("'main' data store already configured")

                    self.main = main_store_class(database, db_conn, hs)

                if "state" in database_config.data_stores:
                    logger.info("Starting 'state' data store")

                    # Sanity check we don't try and configure the state store on
                    # multiple databases.
                    if self.state:
                        raise Exception(
                            "'state' data store already configured")

                    self.state = StateGroupDataStore(database, db_conn, hs)

                db_conn.commit()

                self.databases.append(database)

                logger.info("Database %r prepared", db_name)

        # Sanity check that we have actually configured all the required stores.
        if not self.main:
            raise Exception("No 'main' data store configured")

        if not self.state:
            raise Exception("No 'main' data store configured")
コード例 #20
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse client reader",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.client_reader"

    setup_logging(config.worker_log_config, config.worker_log_file)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    ss = ClientReaderServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.get_handlers()
    ss.start_listening(config.worker_listeners)

    def run():
        with LoggingContext("run"):
            logger.info("Running")
            change_resource_limit(config.soft_file_limit)
            if config.gc_thresholds:
                gc.set_threshold(*config.gc_thresholds)
            reactor.run()

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()
        ss.replicate()

    reactor.callWhenRunning(start)

    if config.worker_daemonize:
        daemon = Daemonize(
            app="synapse-client-reader",
            pid=config.worker_pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )
        daemon.start()
    else:
        run()
コード例 #21
0
ファイル: federation_reader.py プロジェクト: mebjas/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse federation reader", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_reader"

    setup_logging(config.worker_log_config, config.worker_log_file)

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    ss = FederationReaderServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.get_handlers()
    ss.start_listening(config.worker_listeners)

    def run():
        with LoggingContext("run"):
            logger.info("Running")
            change_resource_limit(config.soft_file_limit)
            if config.gc_thresholds:
                gc.set_threshold(*config.gc_thresholds)
            reactor.run()

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()
        ss.replicate()

    reactor.callWhenRunning(start)

    if config.worker_daemonize:
        daemon = Daemonize(
            app="synapse-federation-reader",
            pid=config.worker_pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )
        daemon.start()
    else:
        run()
コード例 #22
0
ファイル: synchrotron.py プロジェクト: n3h3m/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse synchrotron",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.synchrotron"

    setup_logging(config.worker_log_config, config.worker_log_file)

    database_engine = create_engine(config.database_config)

    ss = SynchrotronServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
        application_service_handler=SynchrotronApplicationService(),
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def run():
        with LoggingContext("run"):
            logger.info("Running")
            change_resource_limit(config.soft_file_limit)
            if config.gc_thresholds:
                gc.set_threshold(*config.gc_thresholds)
            reactor.run()

    def start():
        ss.get_datastore().start_profiling()
        ss.replicate()
        ss.get_state_handler().start_caching()

    reactor.callWhenRunning(start)

    if config.worker_daemonize:
        daemon = Daemonize(
            app="synapse-synchrotron",
            pid=config.worker_pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )
        daemon.start()
    else:
        run()
コード例 #23
0
ファイル: user_dir.py プロジェクト: timoschwarzer/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse user directory", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.user_dir"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.update_user_directory:
        sys.stderr.write(
            "\nThe update_user_directory must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``update_user_directory: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.update_user_directory = True

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    ps = UserDirectoryServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def start():
        ps.get_datastore().start_profiling()
        ps.get_state_handler().start_caching()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-user-dir", config)
コード例 #24
0
ファイル: user_dir.py プロジェクト: DoubleMalt/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse user directory", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.user_dir"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.update_user_directory:
        sys.stderr.write(
            "\nThe update_user_directory must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``update_user_directory: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.update_user_directory = True

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    ps = UserDirectoryServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def start():
        ps.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-user-dir", config)
コード例 #25
0
ファイル: utils.py プロジェクト: samuelyi/synapse
def setupdb():
    # If we're using PostgreSQL, set up the db once
    if USE_POSTGRES_FOR_TESTS:
        # create a PostgresEngine
        db_engine = create_engine({"name": "psycopg2", "args": {}})

        # connect to postgres to create the base database.
        db_conn = db_engine.module.connect(
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            port=POSTGRES_PORT,
            password=POSTGRES_PASSWORD,
            dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
        )
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB, ))
        cur.execute(
            "CREATE DATABASE %s ENCODING 'UTF8' LC_COLLATE='C' LC_CTYPE='C' "
            "template=template0;" % (POSTGRES_BASE_DB, ))
        cur.close()
        db_conn.close()

        # Set up in the db
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB,
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            port=POSTGRES_PORT,
            password=POSTGRES_PASSWORD,
        )
        db_conn = LoggingDatabaseConnection(db_conn, db_engine, "tests")
        prepare_database(db_conn, db_engine, None)
        db_conn.close()

        def _cleanup():
            db_conn = db_engine.module.connect(
                user=POSTGRES_USER,
                host=POSTGRES_HOST,
                port=POSTGRES_PORT,
                password=POSTGRES_PASSWORD,
                dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
            )
            db_conn.autocommit = True
            cur = db_conn.cursor()
            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB, ))
            cur.close()
            db_conn.close()

        atexit.register(_cleanup)
コード例 #26
0
ファイル: federation_sender.py プロジェクト: rubo77/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse federation sender", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_sender"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.send_federation:
        sys.stderr.write(
            "\nThe send_federation must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``send_federation: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.send_federation = True

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    ps = FederationSenderServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def start():
        ps.get_datastore().start_profiling()
        ps.get_state_handler().start_caching()

    reactor.callWhenRunning(start)
    _base.start_worker_reactor("synapse-federation-sender", config)
コード例 #27
0
ファイル: utils.py プロジェクト: matrix-org/synapse
def setupdb():
    # If we're using PostgreSQL, set up the db once
    if USE_POSTGRES_FOR_TESTS:
        # create a PostgresEngine
        db_engine = create_engine({"name": "psycopg2", "args": {}})

        # connect to postgres to create the base database.
        db_conn = db_engine.module.connect(
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            password=POSTGRES_PASSWORD,
            dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
        )
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
        cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
        cur.close()
        db_conn.close()

        # Set up in the db
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB,
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            password=POSTGRES_PASSWORD,
        )
        cur = db_conn.cursor()
        _get_or_create_schema_state(cur, db_engine)
        _setup_new_database(cur, db_engine)
        db_conn.commit()
        cur.close()
        db_conn.close()

        def _cleanup():
            db_conn = db_engine.module.connect(
                user=POSTGRES_USER,
                host=POSTGRES_HOST,
                password=POSTGRES_PASSWORD,
                dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
            )
            db_conn.autocommit = True
            cur = db_conn.cursor()
            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
            cur.close()
            db_conn.close()

        atexit.register(_cleanup)
コード例 #28
0
def setupdb():
    # If we're using PostgreSQL, set up the db once
    if USE_POSTGRES_FOR_TESTS:
        # create a PostgresEngine
        db_engine = create_engine({"name": "psycopg2", "args": {}})

        # connect to postgres to create the base database.
        db_conn = db_engine.module.connect(
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            password=POSTGRES_PASSWORD,
            dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
        )
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB, ))
        cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB, ))
        cur.close()
        db_conn.close()

        # Set up in the db
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB,
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            password=POSTGRES_PASSWORD,
        )
        cur = db_conn.cursor()
        _get_or_create_schema_state(cur, db_engine)
        _setup_new_database(cur, db_engine)
        db_conn.commit()
        cur.close()
        db_conn.close()

        def _cleanup():
            db_conn = db_engine.module.connect(
                user=POSTGRES_USER,
                host=POSTGRES_HOST,
                password=POSTGRES_PASSWORD,
                dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
            )
            db_conn.autocommit = True
            cur = db_conn.cursor()
            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB, ))
            cur.close()
            db_conn.close()

        atexit.register(_cleanup)
コード例 #29
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse pusher", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.pusher"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    if config.start_pushers:
        sys.stderr.write(
            "\nThe pushers must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``start_pushers: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.start_pushers = True

    database_engine = create_engine(config.database_config)

    ps = PusherServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def start():
        ps.get_pusherpool().start()
        ps.get_datastore().start_profiling()
        ps.get_state_handler().start_caching()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-pusher", config)
コード例 #30
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse federation sender",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_sender"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.send_federation:
        sys.stderr.write(
            "\nThe send_federation must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``send_federation: false`` to the main config"
            "\n")
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.send_federation = True

    ss = FederationSenderServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()

    def start():
        ss.config.read_certificate_from_disk()
        ss.tls_server_context_factory = context_factory.ServerContextFactory(
            config)
        ss.tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
            config)
        ss.start_listening(config.worker_listeners)
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)
    _base.start_worker_reactor("synapse-federation-sender", config)
コード例 #31
0
ファイル: pusher.py プロジェクト: DoubleMalt/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse pusher", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.pusher"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    if config.start_pushers:
        sys.stderr.write(
            "\nThe pushers must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``start_pushers: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.start_pushers = True

    database_engine = create_engine(config.database_config)

    ps = PusherServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def start():
        ps.get_pusherpool().start()
        ps.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-pusher", config)
コード例 #32
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse media repository", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.media_repository"

    if config.enable_media_repo:
        _base.quit_with_error(
            "enable_media_repo must be disabled in the main synapse process\n"
            "before the media repo can be run in a separate worker.\n"
            "Please add ``enable_media_repo: false`` to the main config\n"
        )

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    ss = MediaRepositoryServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-media-repository", config)
コード例 #33
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse media repository",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.media_repository"

    if config.enable_media_repo:
        _base.quit_with_error(
            "enable_media_repo must be disabled in the main synapse process\n"
            "before the media repo can be run in a separate worker.\n"
            "Please add ``enable_media_repo: false`` to the main config\n")

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
        config)

    ss = MediaRepositoryServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-media-repository", config)
コード例 #34
0
ファイル: utils.py プロジェクト: gergelypolonkai/synapse
def setupdb():

    # If we're using PostgreSQL, set up the db once
    if USE_POSTGRES_FOR_TESTS:
        pgconfig = {
            "name": "psycopg2",
            "args": {
                "database": POSTGRES_BASE_DB,
                "user": POSTGRES_USER,
                "cp_min": 1,
                "cp_max": 5,
            },
        }
        config = Mock()
        config.password_providers = []
        config.database_config = pgconfig
        db_engine = create_engine(pgconfig)
        db_conn = db_engine.module.connect(user=POSTGRES_USER)
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
        cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
        cur.close()
        db_conn.close()

        # Set up in the db
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB, user=POSTGRES_USER
        )
        cur = db_conn.cursor()
        _get_or_create_schema_state(cur, db_engine)
        _setup_new_database(cur, db_engine)
        db_conn.commit()
        cur.close()
        db_conn.close()

        def _cleanup():
            db_conn = db_engine.module.connect(user=POSTGRES_USER)
            db_conn.autocommit = True
            cur = db_conn.cursor()
            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
            cur.close()
            db_conn.close()

        atexit.register(_cleanup)
コード例 #35
0
ファイル: utils.py プロジェクト: t2bot/synapse
def setupdb():

    # If we're using PostgreSQL, set up the db once
    if USE_POSTGRES_FOR_TESTS:
        pgconfig = {
            "name": "psycopg2",
            "args": {
                "database": POSTGRES_BASE_DB,
                "user": POSTGRES_USER,
                "cp_min": 1,
                "cp_max": 5,
            },
        }
        config = Mock()
        config.password_providers = []
        config.database_config = pgconfig
        db_engine = create_engine(pgconfig)
        db_conn = db_engine.module.connect(user=POSTGRES_USER)
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
        cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
        cur.close()
        db_conn.close()

        # Set up in the db
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB, user=POSTGRES_USER
        )
        cur = db_conn.cursor()
        _get_or_create_schema_state(cur, db_engine)
        _setup_new_database(cur, db_engine)
        db_conn.commit()
        cur.close()
        db_conn.close()

        def _cleanup():
            db_conn = db_engine.module.connect(user=POSTGRES_USER)
            db_conn.autocommit = True
            cur = db_conn.cursor()
            cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
            cur.close()
            db_conn.close()

        atexit.register(_cleanup)
コード例 #36
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse event creator", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.event_creator"

    assert config.worker_replication_http_port is not None

    setup_logging(config, use_worker_options=True)

    # This should only be done on the user directory worker or the master
    config.update_user_directory = False

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    ss = EventCreatorServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-event-creator", config)
コード例 #37
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse federation sender", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_sender"

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.send_federation:
        sys.stderr.write(
            "\nThe send_federation must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``send_federation: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.send_federation = True

    ss = FederationSenderServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    setup_logging(ss, config, use_worker_options=True)

    ss.setup()
    reactor.addSystemEventTrigger(
        "before", "startup", _base.start, ss, config.worker_listeners
    )

    _base.start_worker_reactor("synapse-federation-sender", config)
コード例 #38
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse frontend proxy",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.frontend_proxy"

    assert config.worker_main_http_uri is not None

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
        config)

    ss = FrontendProxyServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-frontend-proxy", config)
コード例 #39
0
ファイル: frontend_proxy.py プロジェクト: rubo77/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse frontend proxy", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.frontend_proxy"

    assert config.worker_main_http_uri is not None

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    ss = FrontendProxyServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-frontend-proxy", config)
コード例 #40
0
ファイル: pusher.py プロジェクト: 0-T-0/synapse
def setup(config_options):
    try:
        config = PusherSlaveConfig.load_config(
            "Synapse pusher", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        sys.exit(0)

    config.setup_logging()

    database_engine = create_engine(config.database_config)

    ps = PusherServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string=get_version_string("Synapse", synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening()

    change_resource_limit(ps.config.soft_file_limit)
    if ps.config.gc_thresholds:
        gc.set_threshold(*ps.config.gc_thresholds)

    def start():
        ps.replicate()
        ps.get_pusherpool().start()
        ps.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    return ps
コード例 #41
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse federation reader",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_reader"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
        config)

    ss = FederationReaderServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-federation-reader", config)
コード例 #42
0
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse federation reader", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.federation_reader"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    ss = FederationReaderServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-federation-reader", config)
コード例 #43
0
ファイル: synchrotron.py プロジェクト: 0-T-0/synapse
def setup(config_options):
    try:
        config = SynchrotronConfig.load_config(
            "Synapse synchrotron", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        sys.exit(0)

    config.setup_logging()

    database_engine = create_engine(config.database_config)

    ss = SynchrotronServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string=get_version_string("Synapse", synapse),
        database_engine=database_engine,
        application_service_handler=SynchrotronApplicationService(),
    )

    ss.setup()
    ss.start_listening()

    change_resource_limit(ss.config.soft_file_limit)
    if ss.config.gc_thresholds:
        ss.set_threshold(*ss.config.gc_thresholds)

    def start():
        ss.get_datastore().start_profiling()
        ss.replicate()

    reactor.callWhenRunning(start)

    return ss
コード例 #44
0
ファイル: user_dir.py プロジェクト: zymptomLabs/synapse
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse user directory",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.user_dir"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    if config.update_user_directory:
        sys.stderr.write(
            "\nThe update_user_directory must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``update_user_directory: false`` to the main config"
            "\n")
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.update_user_directory = True

    ss = UserDirectoryServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    reactor.callWhenRunning(_base.start, ss, config.worker_listeners)

    _base.start_worker_reactor("synapse-user-dir", config)
コード例 #45
0
ファイル: media_repository.py プロジェクト: meharimg/swarm-im
def start(config_options):
    try:
        config = HomeServerConfig.load_config("Synapse media repository",
                                              config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.media_repository"

    setup_logging(config, use_worker_options=True)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    ss = MediaRepositoryServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ss.setup()
    ss.get_handlers()
    ss.start_listening(config.worker_listeners)

    def start():
        ss.get_state_handler().start_caching()
        ss.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    _base.start_worker_reactor("synapse-media-repository", config)
コード例 #46
0
def setup(config_options):
    try:
        config = PusherSlaveConfig.load_config("Synapse pusher",
                                               config_options)
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        sys.exit(0)

    config.setup_logging()

    database_engine = create_engine(config.database_config)

    ps = PusherServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string=get_version_string("Synapse", synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening()

    change_resource_limit(ps.config.soft_file_limit)
    if ps.config.gc_thresholds:
        gc.set_threshold(*ps.config.gc_thresholds)

    def start():
        ps.replicate()
        ps.get_pusherpool().start()
        ps.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    return ps
コード例 #47
0
ファイル: homeserver.py プロジェクト: DoubleMalt/synapse
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.

    Returns:
        HomeServer
    """
    try:
        config = HomeServerConfig.load_or_generate_config(
            "Synapse Homeserver",
            config_options,
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    if not config:
        # If a config isn't returned, and an exception isn't raised, we're just
        # generating config files and shouldn't try to continue.
        sys.exit(0)

    synapse.config.logger.setup_logging(config, use_worker_options=False)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_server_context_factory = context_factory.ServerContextFactory(config)
    tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)

    database_engine = create_engine(config.database_config)
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        tls_client_options_factory=tls_client_options_factory,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    logger.info("Preparing database: %s...", config.database_config['name'])

    try:
        with hs.get_db_conn(run_new_connection=False) as db_conn:
            prepare_database(db_conn, database_engine, config=config)
            database_engine.on_new_connection(db_conn)

            hs.run_startup_checks(db_conn, database_engine)

            db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %s.", config.database_config['name'])

    hs.setup()
    hs.start_listening()

    def start():
        hs.get_pusherpool().start()
        hs.get_datastore().start_profiling()
        hs.get_datastore().start_doing_background_updates()

    reactor.callWhenRunning(start)

    return hs
コード例 #48
0
ファイル: homeserver.py プロジェクト: howethomas/synapse
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.
        should_run (bool): Whether to start the reactor.

    Returns:
        HomeServer
    """
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        config_options,
        generate_section="Homeserver"
    )

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config["name"])
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %r...", config.database_config)

    try:
        db_conn = database_engine.module.connect(
            **{
                k: v for k, v in config.database_config.get("args", {}).items()
                if not k.startswith("cp_")
            }
        )

        database_engine.prepare_database(db_conn)
        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %r.", config.database_config)

    hs.start_listening()

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    return hs
コード例 #49
0
    def run(self):
        try:
            sqlite_db_pool = adbapi.ConnectionPool(
                self.sqlite_config["name"],
                **self.sqlite_config["args"]
            )

            postgres_db_pool = adbapi.ConnectionPool(
                self.postgres_config["name"],
                **self.postgres_config["args"]
            )

            sqlite_engine = create_engine("sqlite3")
            postgres_engine = create_engine("psycopg2")

            self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
            self.postgres_store = Store(postgres_db_pool, postgres_engine)

            yield self.postgres_store.execute(
                postgres_engine.check_database
            )

            # Step 1. Set up databases.
            self.progress.set_state("Preparing SQLite3")
            self.setup_db(sqlite_config, sqlite_engine)

            self.progress.set_state("Preparing PostgreSQL")
            self.setup_db(postgres_config, postgres_engine)

            # Step 2. Get tables.
            self.progress.set_state("Fetching tables")
            sqlite_tables = yield self.sqlite_store._simple_select_onecol(
                table="sqlite_master",
                keyvalues={
                    "type": "table",
                },
                retcol="name",
            )

            postgres_tables = yield self.postgres_store._simple_select_onecol(
                table="information_schema.tables",
                keyvalues={
                    "table_schema": "public",
                },
                retcol="distinct table_name",
            )

            tables = set(sqlite_tables) & set(postgres_tables)

            self.progress.set_state("Creating tables")

            logger.info("Found %d tables", len(tables))

            def create_port_table(txn):
                txn.execute(
                    "CREATE TABLE port_from_sqlite3 ("
                    " table_name varchar(100) NOT NULL UNIQUE,"
                    " rowid bigint NOT NULL"
                    ")"
                )

            try:
                yield self.postgres_store.runInteraction(
                    "create_port_table", create_port_table
                )
            except Exception as e:
                logger.info("Failed to create port table: %s", e)

            self.progress.set_state("Setting up")

            # Set up tables.
            setup_res = yield defer.gatherResults(
                [
                    self.setup_table(table)
                    for table in tables
                    if table not in ["schema_version", "applied_schema_deltas"]
                    and not table.startswith("sqlite_")
                ],
                consumeErrors=True,
            )

            # Process tables.
            yield defer.gatherResults(
                [
                    self.handle_table(*res)
                    for res in setup_res
                ],
                consumeErrors=True,
            )

            self.progress.done()
        except:
            global end_error_exec_info
            end_error_exec_info = sys.exc_info()
            logger.exception("")
        finally:
            reactor.stop()
コード例 #50
0
def setup_test_homeserver(cleanup_func,
                          name="test",
                          datastore=None,
                          config=None,
                          reactor=None,
                          homeserverToUse=TestHomeServer,
                          **kargs):
    """
    Setup a homeserver suitable for running tests against.  Keyword arguments
    are passed to the Homeserver constructor.

    If no datastore is supplied, one is created and given to the homeserver.

    Args:
        cleanup_func : The function used to register a cleanup routine for
                       after the test.

    Calling this method directly is deprecated: you should instead derive from
    HomeserverTestCase.
    """
    if reactor is None:
        from twisted.internet import reactor

    if config is None:
        config = default_config(name)

    config.ldap_enabled = False

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if USE_POSTGRES_FOR_TESTS:
        test_db = "synapse_test_%s" % uuid.uuid4().hex

        config.database_config = {
            "name": "psycopg2",
            "args": {
                "database": test_db,
                "host": POSTGRES_HOST,
                "password": POSTGRES_PASSWORD,
                "user": POSTGRES_USER,
                "cp_min": 1,
                "cp_max": 5,
            },
        }
    else:
        config.database_config = {
            "name": "sqlite3",
            "args": {
                "database": ":memory:",
                "cp_min": 1,
                "cp_max": 1
            },
        }

    db_engine = create_engine(config.database_config)

    # Create the database before we actually try and connect to it, based off
    # the template database we generate in setupdb()
    if datastore is None and isinstance(db_engine, PostgresEngine):
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB,
            user=POSTGRES_USER,
            host=POSTGRES_HOST,
            password=POSTGRES_PASSWORD,
        )
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db, ))
        cur.execute("CREATE DATABASE %s WITH TEMPLATE %s;" %
                    (test_db, POSTGRES_BASE_DB))
        cur.close()
        db_conn.close()

    # we need to configure the connection pool to run the on_new_connection
    # function, so that we can test code that uses custom sqlite functions
    # (like rank).
    config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection

    if datastore is None:
        hs = homeserverToUse(name,
                             config=config,
                             db_config=config.database_config,
                             version_string="Synapse/tests",
                             database_engine=db_engine,
                             tls_server_context_factory=Mock(),
                             tls_client_options_factory=Mock(),
                             reactor=reactor,
                             **kargs)

        # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
        # date db
        if not isinstance(db_engine, PostgresEngine):
            db_conn = hs.get_db_conn()
            yield prepare_database(db_conn, db_engine, config)
            db_conn.commit()
            db_conn.close()

        else:
            # We need to do cleanup on PostgreSQL
            def cleanup():
                import psycopg2

                # Close all the db pools
                hs.get_db_pool().close()

                dropped = False

                # Drop the test database
                db_conn = db_engine.module.connect(
                    database=POSTGRES_BASE_DB,
                    user=POSTGRES_USER,
                    host=POSTGRES_HOST,
                    password=POSTGRES_PASSWORD,
                )
                db_conn.autocommit = True
                cur = db_conn.cursor()

                # Try a few times to drop the DB. Some things may hold on to the
                # database for a few more seconds due to flakiness, preventing
                # us from dropping it when the test is over. If we can't drop
                # it, warn and move on.
                for x in range(5):
                    try:
                        cur.execute("DROP DATABASE IF EXISTS %s;" %
                                    (test_db, ))
                        db_conn.commit()
                        dropped = True
                    except psycopg2.OperationalError as e:
                        warnings.warn("Couldn't drop old db: " + str(e),
                                      category=UserWarning)
                        time.sleep(0.5)

                cur.close()
                db_conn.close()

                if not dropped:
                    warnings.warn("Failed to drop old DB.",
                                  category=UserWarning)

            if not LEAVE_DB:
                # Register the cleanup hook
                cleanup_func(cleanup)

        hs.setup()
        if homeserverToUse.__name__ == "TestHomeServer":
            hs.setup_master()
    else:
        hs = homeserverToUse(name,
                             db_pool=None,
                             datastore=datastore,
                             config=config,
                             version_string="Synapse/tests",
                             database_engine=db_engine,
                             tls_server_context_factory=Mock(),
                             tls_client_options_factory=Mock(),
                             reactor=reactor,
                             **kargs)

    # bcrypt is far too slow to be doing in unit tests
    # Need to let the HS build an auth handler and then mess with it
    # because AuthHandler's constructor requires the HS, so we can't make one
    # beforehand and pass it in to the HS's constructor (chicken / egg)
    hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')
                                                       ).hexdigest()
    hs.get_auth_handler().validate_hash = (
        lambda p, h: hashlib.md5(p.encode('utf8')).hexdigest() == h)

    fed = kargs.get("resource_for_federation", None)
    if fed:
        register_federation_servlets(hs, fed)

    defer.returnValue(hs)
コード例 #51
0
ファイル: homeserver.py プロジェクト: payingattention/synapse
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.
        should_run (bool): Whether to start the reactor.

    Returns:
        HomeServer
    """
    config = HomeServerConfig.load_config(
        "Synapse Homeserver",
        config_options,
        generate_section="Homeserver"
    )

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string()

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    if re.search(":[0-9]+$", config.server_name):
        domain_with_port = config.server_name
    else:
        domain_with_port = "%s:%s" % (config.server_name, config.bind_port)

    tls_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config["name"])
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        domain_with_port=domain_with_port,
        upload_dir=os.path.abspath("uploads"),
        db_config=config.database_config,
        tls_context_factory=tls_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    hs.create_resource_tree(
        redirect_root_to_web_client=True,
    )

    logger.info("Preparing database: %r...", config.database_config)

    try:
        db_conn = database_engine.module.connect(
            **{
                k: v for k, v in config.database_config.get("args", {}).items()
                if not k.startswith("cp_")
            }
        )

        database_engine.prepare_database(db_conn)
        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %r.", config.database_config)

    if config.manhole:
        f = twisted.manhole.telnet.ShellFactory()
        f.username = "******"
        f.password = "******"
        f.namespace['hs'] = hs
        reactor.listenTCP(config.manhole, f, interface='127.0.0.1')

    hs.start_listening()

    hs.get_pusherpool().start()
    hs.get_state_handler().start_caching()
    hs.get_datastore().start_profiling()
    hs.get_replication_layer().start_get_pdu_cache()

    return hs
コード例 #52
0
ファイル: utils.py プロジェクト: Ralith/synapse
 def create_engine(self):
     return create_engine(self.config.database_config)
コード例 #53
0
ファイル: pusher.py プロジェクト: pombredanne/synapse-2
def start(config_options):
    try:
        config = HomeServerConfig.load_config(
            "Synapse pusher", config_options
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    assert config.worker_app == "synapse.app.pusher"

    setup_logging(config.worker_log_config, config.worker_log_file)

    if config.start_pushers:
        sys.stderr.write(
            "\nThe pushers must be disabled in the main synapse process"
            "\nbefore they can be run in a separate worker."
            "\nPlease add ``start_pushers: false`` to the main config"
            "\n"
        )
        sys.exit(1)

    # Force the pushers to start since they will be disabled in the main config
    config.start_pushers = True

    database_engine = create_engine(config.database_config)

    ps = PusherServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    ps.setup()
    ps.start_listening(config.worker_listeners)

    def run():
        with LoggingContext("run"):
            logger.info("Running")
            change_resource_limit(config.soft_file_limit)
            if config.gc_thresholds:
                gc.set_threshold(*config.gc_thresholds)
            reactor.run()

    def start():
        ps.replicate()
        ps.get_pusherpool().start()
        ps.get_datastore().start_profiling()

    reactor.callWhenRunning(start)

    if config.worker_daemonize:
        daemon = Daemonize(
            app="synapse-pusher",
            pid=config.worker_pid_file,
            action=run,
            auto_close_fds=False,
            verbose=True,
            logger=logger,
        )
        daemon.start()
    else:
        run()
コード例 #54
0
ファイル: utils.py プロジェクト: Vutsuak16/synapse
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.enable_registration = True
        config.macaroon_secret_key = "not even a little secret"
        config.server_name = "server.under.test"
        config.trusted_third_party_id_servers = []

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(
            name, db_pool=db_pool, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            get_db_conn=db_pool.get_db_conn,
            **kargs
        )
        hs.setup()
    else:
        hs = HomeServer(
            name, db_pool=None, datastore=datastore, config=config,
            version_string="Synapse/tests",
            database_engine=create_engine("sqlite3"),
            **kargs
        )

    # bcrypt is far too slow to be doing in unit tests
    def swap_out_hash_for_testing(old_build_handlers):
        def build_handlers():
            handlers = old_build_handlers()
            auth_handler = handlers.auth_handler
            auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
            auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
            return handlers
        return build_handlers

    hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)

    fed = kargs.get("resource_for_federation", None)
    if fed:
        server.register_servlets(
            hs,
            resource=fed,
            authenticator=server.Authenticator(hs),
            ratelimiter=FederationRateLimiter(
                hs.get_clock(),
                window_size=hs.config.federation_rc_window_size,
                sleep_limit=hs.config.federation_rc_sleep_limit,
                sleep_msec=hs.config.federation_rc_sleep_delay,
                reject_limit=hs.config.federation_rc_reject_limit,
                concurrent_requests=hs.config.federation_rc_concurrent
            ),
        )

    defer.returnValue(hs)
コード例 #55
0
ファイル: homeserver.py プロジェクト: JigmeDatse/synapse
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.

    Returns:
        HomeServer
    """
    try:
        config = HomeServerConfig.load_or_generate_config(
            "Synapse Homeserver",
            config_options,
        )
    except ConfigError as e:
        sys.stderr.write("\n" + e.message + "\n")
        sys.exit(1)

    if not config:
        # If a config isn't returned, and an exception isn't raised, we're just
        # generating config files and shouldn't try to continue.
        sys.exit(0)

    config.setup_logging()

    # check any extra requirements we have now we have a config
    check_requirements(config)

    version_string = get_version_string("Synapse", synapse)

    logger.info("Server hostname: %s", config.server_name)
    logger.info("Server version: %s", version_string)

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    tls_server_context_factory = context_factory.ServerContextFactory(config)

    database_engine = create_engine(config.database_config)
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        tls_server_context_factory=tls_server_context_factory,
        config=config,
        content_addr=config.content_addr,
        version_string=version_string,
        database_engine=database_engine,
    )

    logger.info("Preparing database: %s...", config.database_config['name'])

    try:
        db_conn = hs.get_db_conn(run_new_connection=False)
        prepare_database(db_conn, database_engine, config=config)
        database_engine.on_new_connection(db_conn)

        hs.run_startup_checks(db_conn, database_engine)

        db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %s.", config.database_config['name'])

    hs.setup()
    hs.start_listening()

    def start():
        hs.get_pusherpool().start()
        hs.get_state_handler().start_caching()
        hs.get_datastore().start_profiling()
        hs.get_datastore().start_doing_background_updates()
        hs.get_replication_layer().start_get_pdu_cache()

    reactor.callWhenRunning(start)

    return hs
コード例 #56
0
 def prepare(self):
     engine = create_engine("sqlite3")
     return self.runWithConnection(
         lambda conn: prepare_database(conn, engine))
コード例 #57
0
ファイル: utils.py プロジェクト: rrix/synapse
 def prepare(self):
     engine = create_engine("sqlite3")
     return self.runWithConnection(
         lambda conn: prepare_database(conn, engine)
     )
コード例 #58
0
ファイル: homeserver.py プロジェクト: matrix-org/synapse
def setup(config_options):
    """
    Args:
        config_options_options: The options passed to Synapse. Usually
            `sys.argv[1:]`.

    Returns:
        HomeServer
    """
    try:
        config = HomeServerConfig.load_or_generate_config(
            "Synapse Homeserver",
            config_options,
        )
    except ConfigError as e:
        sys.stderr.write("\n" + str(e) + "\n")
        sys.exit(1)

    if not config:
        # If a config isn't returned, and an exception isn't raised, we're just
        # generating config files and shouldn't try to continue.
        sys.exit(0)

    synapse.config.logger.setup_logging(
        config,
        use_worker_options=False
    )

    events.USE_FROZEN_DICTS = config.use_frozen_dicts

    database_engine = create_engine(config.database_config)
    config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection

    hs = SynapseHomeServer(
        config.server_name,
        db_config=config.database_config,
        config=config,
        version_string="Synapse/" + get_version_string(synapse),
        database_engine=database_engine,
    )

    logger.info("Preparing database: %s...", config.database_config['name'])

    try:
        with hs.get_db_conn(run_new_connection=False) as db_conn:
            prepare_database(db_conn, database_engine, config=config)
            database_engine.on_new_connection(db_conn)

            hs.run_startup_checks(db_conn, database_engine)

            db_conn.commit()
    except UpgradeDatabaseException:
        sys.stderr.write(
            "\nFailed to upgrade database.\n"
            "Have you checked for version specific instructions in"
            " UPGRADES.rst?\n"
        )
        sys.exit(1)

    logger.info("Database prepared in %s.", config.database_config['name'])

    hs.setup()
    hs.setup_master()

    @defer.inlineCallbacks
    def do_acme():
        """
        Reprovision an ACME certificate, if it's required.

        Returns:
            Deferred[bool]: Whether the cert has been updated.
        """
        acme = hs.get_acme_handler()

        # Check how long the certificate is active for.
        cert_days_remaining = hs.config.is_disk_cert_valid(
            allow_self_signed=False
        )

        # We want to reprovision if cert_days_remaining is None (meaning no
        # certificate exists), or the days remaining number it returns
        # is less than our re-registration threshold.
        provision = False

        if (
            cert_days_remaining is None or
            cert_days_remaining < hs.config.acme_reprovision_threshold
        ):
            provision = True

        if provision:
            yield acme.provision_certificate()

        defer.returnValue(provision)

    @defer.inlineCallbacks
    def reprovision_acme():
        """
        Provision a certificate from ACME, if required, and reload the TLS
        certificate if it's renewed.
        """
        reprovisioned = yield do_acme()
        if reprovisioned:
            _base.refresh_certificate(hs)

    @defer.inlineCallbacks
    def start():
        try:
            # Run the ACME provisioning code, if it's enabled.
            if hs.config.acme_enabled:
                acme = hs.get_acme_handler()
                # Start up the webservices which we will respond to ACME
                # challenges with, and then provision.
                yield acme.start_listening()
                yield do_acme()

                # Check if it needs to be reprovisioned every day.
                hs.get_clock().looping_call(
                    reprovision_acme,
                    24 * 60 * 60 * 1000
                )

            _base.start(hs, config.listeners)

            hs.get_pusherpool().start()
            hs.get_datastore().start_doing_background_updates()
        except Exception:
            # Print the exception and bail out.
            print("Error during startup:", file=sys.stderr)

            # this gives better tracebacks than traceback.print_exc()
            Failure().printTraceback(file=sys.stderr)

            if reactor.running:
                reactor.stop()
            sys.exit(1)

    reactor.callWhenRunning(start)

    return hs
コード例 #59
0
ファイル: utils.py プロジェクト: Ralith/synapse
def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
    """Setup a homeserver suitable for running tests against. Keyword arguments
    are passed to the Homeserver constructor. If no datastore is supplied a
    datastore backed by an in-memory sqlite db will be given to the HS.
    """
    if config is None:
        config = Mock()
        config.signing_key = [MockKey()]
        config.event_cache_size = 1
        config.enable_registration = True
        config.macaroon_secret_key = "not even a little secret"
        config.expire_access_token = False
        config.server_name = name
        config.trusted_third_party_id_servers = []
        config.room_invite_state_types = []

    config.use_frozen_dicts = True
    config.database_config = {"name": "sqlite3"}

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if datastore is None:
        db_pool = SQLiteMemoryDbPool()
        yield db_pool.prepare()
        hs = HomeServer(
            name,
            db_pool=db_pool,
            config=config,
            version_string="Synapse/tests",
            database_engine=create_engine(config.database_config),
            get_db_conn=db_pool.get_db_conn,
            room_list_handler=object(),
            **kargs
        )
        hs.setup()
    else:
        hs = HomeServer(
            name,
            db_pool=None,
            datastore=datastore,
            config=config,
            version_string="Synapse/tests",
            database_engine=create_engine(config.database_config),
            room_list_handler=object(),
            **kargs
        )

    # bcrypt is far too slow to be doing in unit tests
    # Need to let the HS build an auth handler and then mess with it
    # because AuthHandler's constructor requires the HS, so we can't make one
    # beforehand and pass it in to the HS's constructor (chicken / egg)
    hs.get_auth_handler().hash = lambda p: hashlib.md5(p).hexdigest()
    hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h

    fed = kargs.get("resource_for_federation", None)
    if fed:
        server.register_servlets(
            hs,
            resource=fed,
            authenticator=server.Authenticator(hs),
            ratelimiter=FederationRateLimiter(
                hs.get_clock(),
                window_size=hs.config.federation_rc_window_size,
                sleep_limit=hs.config.federation_rc_sleep_limit,
                sleep_msec=hs.config.federation_rc_sleep_delay,
                reject_limit=hs.config.federation_rc_reject_limit,
                concurrent_requests=hs.config.federation_rc_concurrent,
            ),
        )

    defer.returnValue(hs)
コード例 #60
0
ファイル: utils.py プロジェクト: gergelypolonkai/synapse
def setup_test_homeserver(
    cleanup_func,
    name="test",
    datastore=None,
    config=None,
    reactor=None,
    homeserverToUse=TestHomeServer,
    **kargs
):
    """
    Setup a homeserver suitable for running tests against.  Keyword arguments
    are passed to the Homeserver constructor.

    If no datastore is supplied, one is created and given to the homeserver.

    Args:
        cleanup_func : The function used to register a cleanup routine for
                       after the test.
    """
    if reactor is None:
        from twisted.internet import reactor

    if config is None:
        config = default_config(name)

    config.ldap_enabled = False

    if "clock" not in kargs:
        kargs["clock"] = MockClock()

    if USE_POSTGRES_FOR_TESTS:
        test_db = "synapse_test_%s" % uuid.uuid4().hex

        config.database_config = {
            "name": "psycopg2",
            "args": {"database": test_db, "cp_min": 1, "cp_max": 5},
        }
    else:
        config.database_config = {
            "name": "sqlite3",
            "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
        }

    db_engine = create_engine(config.database_config)

    # Create the database before we actually try and connect to it, based off
    # the template database we generate in setupdb()
    if datastore is None and isinstance(db_engine, PostgresEngine):
        db_conn = db_engine.module.connect(
            database=POSTGRES_BASE_DB, user=POSTGRES_USER
        )
        db_conn.autocommit = True
        cur = db_conn.cursor()
        cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
        cur.execute(
            "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
        )
        cur.close()
        db_conn.close()

    # we need to configure the connection pool to run the on_new_connection
    # function, so that we can test code that uses custom sqlite functions
    # (like rank).
    config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection

    if datastore is None:
        hs = homeserverToUse(
            name,
            config=config,
            db_config=config.database_config,
            version_string="Synapse/tests",
            database_engine=db_engine,
            room_list_handler=object(),
            tls_server_context_factory=Mock(),
            tls_client_options_factory=Mock(),
            reactor=reactor,
            **kargs
        )

        # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
        # date db
        if not isinstance(db_engine, PostgresEngine):
            db_conn = hs.get_db_conn()
            yield prepare_database(db_conn, db_engine, config)
            db_conn.commit()
            db_conn.close()

        else:
            # We need to do cleanup on PostgreSQL
            def cleanup():
                import psycopg2

                # Close all the db pools
                hs.get_db_pool().close()

                dropped = False

                # Drop the test database
                db_conn = db_engine.module.connect(
                    database=POSTGRES_BASE_DB, user=POSTGRES_USER
                )
                db_conn.autocommit = True
                cur = db_conn.cursor()

                # Try a few times to drop the DB. Some things may hold on to the
                # database for a few more seconds due to flakiness, preventing
                # us from dropping it when the test is over. If we can't drop
                # it, warn and move on.
                for x in range(5):
                    try:
                        cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
                        db_conn.commit()
                        dropped = True
                    except psycopg2.OperationalError as e:
                        warnings.warn(
                            "Couldn't drop old db: " + str(e), category=UserWarning
                        )
                        time.sleep(0.5)

                cur.close()
                db_conn.close()

                if not dropped:
                    warnings.warn("Failed to drop old DB.", category=UserWarning)

            if not LEAVE_DB:
                # Register the cleanup hook
                cleanup_func(cleanup)

        hs.setup()
    else:
        hs = homeserverToUse(
            name,
            db_pool=None,
            datastore=datastore,
            config=config,
            version_string="Synapse/tests",
            database_engine=db_engine,
            room_list_handler=object(),
            tls_server_context_factory=Mock(),
            tls_client_options_factory=Mock(),
            reactor=reactor,
            **kargs
        )

    # bcrypt is far too slow to be doing in unit tests
    # Need to let the HS build an auth handler and then mess with it
    # because AuthHandler's constructor requires the HS, so we can't make one
    # beforehand and pass it in to the HS's constructor (chicken / egg)
    hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest()
    hs.get_auth_handler().validate_hash = (
        lambda p, h: hashlib.md5(p.encode('utf8')).hexdigest() == h
    )

    fed = kargs.get("resource_for_federation", None)
    if fed:
        server.register_servlets(
            hs,
            resource=fed,
            authenticator=server.Authenticator(hs),
            ratelimiter=FederationRateLimiter(
                hs.get_clock(),
                window_size=hs.config.federation_rc_window_size,
                sleep_limit=hs.config.federation_rc_sleep_limit,
                sleep_msec=hs.config.federation_rc_sleep_delay,
                reject_limit=hs.config.federation_rc_reject_limit,
                concurrent_requests=hs.config.federation_rc_concurrent,
            ),
        )

    defer.returnValue(hs)