def setUp(self): self.as_yaml_files = [] hs = yield setup_test_homeserver( self.addCleanup, federation_sender=Mock(), federation_client=Mock() ) hs.config.app_service_config_files = self.as_yaml_files hs.config.event_cache_size = 1 hs.config.password_providers = [] self.as_list = [ {"token": "token1", "url": "https://matrix-as.org", "id": "id_1"}, {"token": "alpha_tok", "url": "https://alpha.com", "id": "id_alpha"}, {"token": "beta_tok", "url": "https://beta.com", "id": "id_beta"}, {"token": "gamma_tok", "url": "https://gamma.com", "id": "id_gamma"}, ] for s in self.as_list: yield self._add_service(s["url"], s["token"], s["id"]) self.as_yaml_files = [] # We assume there is only one database in these tests database = hs.get_datastores().databases[0] self.db_pool = database._db_pool self.engine = database.engine db_config = hs.config.get_single_database() self.store = TestTransactionStore( database, make_conn(db_config, self.engine), hs )
def build_db_store( self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False, ) -> Store: """Builds and returns a database store using the provided configuration. Args: db_config: The database configuration allow_outdated_version: True to suppress errors about the database server version being too old to run a complete synapse Returns: The built Store object. """ self.progress.set_state("Preparing %s" % db_config.config["name"]) engine = create_engine(db_config.config) hs = MockHomeserver(self.hs_config) with make_conn(db_config, engine, "portdb") as db_conn: engine.check_database( db_conn, allow_outdated_version=allow_outdated_version) prepare_database(db_conn, engine, config=self.hs_config) # Type safety: ignore that we're using Mock homeservers here. store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) # type: ignore[arg-type] db_conn.commit() return store
def prepare(self, reactor, clock, hs): db_config = hs.config.database.get_single_database() self.master_store = self.hs.get_datastore() self.storage = hs.get_storage() database = hs.get_datastores().databases[0] self.slaved_store = self.STORE_TYPE( database, make_conn(db_config, database.engine), self.hs) self.event_id = 0 server_factory = ReplicationStreamProtocolFactory(self.hs) self.streamer = hs.get_replication_streamer() # We now do some gut wrenching so that we have a client that is based # off of the slave store rather than the main store. self.replication_handler = ReplicationCommandHandler(self.hs) self.replication_handler._instance_name = "worker" self.replication_handler._replication_data_handler = ReplicationDataHandler( self.slaved_store) client_factory = DirectTcpReplicationClientFactory( self.hs, "client_name", self.replication_handler) client_factory.handler = self.replication_handler server = server_factory.buildProtocol(None) client = client_factory.buildProtocol(None) client.makeConnection(FakeTransport(server, reactor)) self.server_to_client_transport = FakeTransport(client, reactor) server.makeConnection(self.server_to_client_transport)
def __init__(self, main_store_class, hs): # Note we pass in the main store class here as workers use a different main # store. self.databases = [] self.main = None self.state = None for database_config in hs.config.database.databases: db_name = database_config.name engine = create_engine(database_config.config) with make_conn(database_config, engine) as db_conn: logger.info("Preparing database %r...", db_name) engine.check_database(db_conn.cursor()) prepare_database( db_conn, engine, hs.config, data_stores=database_config.data_stores, ) database = Database(hs, database_config, engine) if "main" in database_config.data_stores: logger.info("Starting 'main' data store") # Sanity check we don't try and configure the main store on # multiple databases. if self.main: raise Exception("'main' data store already configured") self.main = main_store_class(database, db_conn, hs) if "state" in database_config.data_stores: logger.info("Starting 'state' data store") # Sanity check we don't try and configure the state store on # multiple databases. if self.state: raise Exception( "'state' data store already configured") self.state = StateGroupDataStore(database, db_conn, hs) db_conn.commit() self.databases.append(database) logger.info("Database %r prepared", db_name) # Sanity check that we have actually configured all the required stores. if not self.main: raise Exception("No 'main' data store configured") if not self.state: raise Exception("No 'main' data store configured")
def test_safe_locale(self) -> None: database = self.hs.get_datastores().databases[0] db_conn = make_conn(database._database_config, database.engine, "test_unsafe") with db_conn.cursor() as txn: res = database.engine.get_db_locale(txn) self.assertEqual(res, ("C", "C")) db_conn.close()
def test_unsafe_locale(self, mock_db_locale: MagicMock) -> None: mock_db_locale.return_value = ("B", "B") database = self.hs.get_datastores().databases[0] db_conn = make_conn(database._database_config, database.engine, "test_unsafe") with self.assertRaises(IncorrectDatabaseSetup): database.engine.check_database(db_conn) with self.assertRaises(IncorrectDatabaseSetup): database.engine.check_new_database(db_conn) db_conn.close()
def test_unique_works(self) -> None: f1 = self._write_config(suffix="1") f2 = self._write_config(suffix="2") self.hs.config.appservice.app_service_config_files = [f1, f2] self.hs.config.caches.event_cache_size = 1 database = self.hs.get_datastores().databases[0] ApplicationServiceStore( database, make_conn(database._database_config, database.engine, "test"), self.hs, )
def test_unique_works(self): f1 = self._write_config(suffix="1") f2 = self._write_config(suffix="2") hs = yield setup_test_homeserver( self.addCleanup, federation_sender=Mock(), federation_client=Mock() ) hs.config.appservice.app_service_config_files = [f1, f2] hs.config.caches.event_cache_size = 1 database = hs.get_datastores().databases[0] ApplicationServiceStore( database, make_conn(database._database_config, database.engine, "test"), hs )
def test_duplicate_as_tokens(self) -> None: f1 = self._write_config(as_token="as_token", suffix="1") f2 = self._write_config(as_token="as_token", suffix="2") self.hs.config.appservice.app_service_config_files = [f1, f2] self.hs.config.caches.event_cache_size = 1 with self.assertRaises(ConfigError) as cm: database = self.hs.get_datastores().databases[0] ApplicationServiceStore( database, make_conn(database._database_config, database.engine, "test"), self.hs, ) e = cm.exception self.assertIn(f1, str(e)) self.assertIn(f2, str(e)) self.assertIn("as_token", str(e))
def setUp(self) -> None: super(ApplicationServiceTransactionStoreTestCase, self).setUp() self.as_yaml_files: List[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 self.as_list = [ { "token": "token1", "url": "https://matrix-as.org", "id": "id_1" }, { "token": "alpha_tok", "url": "https://alpha.com", "id": "id_alpha" }, { "token": "beta_tok", "url": "https://beta.com", "id": "id_beta" }, { "token": "gamma_tok", "url": "https://gamma.com", "id": "id_gamma" }, ] for s in self.as_list: self._add_service(s["url"], s["token"], s["id"]) self.as_yaml_files = [] # We assume there is only one database in these tests database = self.hs.get_datastores().databases[0] self.db_pool = database._db_pool self.engine = database.engine db_config = self.hs.config.database.get_single_database() self.store = TestTransactionStore( database, make_conn(db_config, self.engine, "test"), self.hs)
def test_duplicate_as_tokens(self): f1 = self._write_config(as_token="as_token", suffix="1") f2 = self._write_config(as_token="as_token", suffix="2") hs = yield setup_test_homeserver(self.addCleanup, federation_sender=Mock(), federation_client=Mock()) hs.config.app_service_config_files = [f1, f2] hs.config.caches.event_cache_size = 1 hs.config.password_providers = [] with self.assertRaises(ConfigError) as cm: database = hs.get_datastores().databases[0] ApplicationServiceStore( database, make_conn(database._database_config, database.engine), hs) e = cm.exception self.assertIn(f1, str(e)) self.assertIn(f2, str(e)) self.assertIn("as_token", str(e))
def setUp(self): self.as_yaml_files = [] hs = yield setup_test_homeserver( self.addCleanup, federation_sender=Mock(), federation_client=Mock() ) hs.config.appservice.app_service_config_files = self.as_yaml_files hs.config.caches.event_cache_size = 1 self.as_token = "token1" self.as_url = "some_url" self.as_id = "as1" self._add_appservice( self.as_token, self.as_id, self.as_url, "some_hs_token", "bob" ) self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") # must be done after inserts database = hs.get_datastores().databases[0] self.store = ApplicationServiceStore( database, make_conn(database._database_config, database.engine, "test"), hs )
def setUp(self): super(ApplicationServiceStoreTestCase, self).setUp() self.as_yaml_files: List[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 self.as_token = "token1" self.as_url = "some_url" self.as_id = "as1" self._add_appservice(self.as_token, self.as_id, self.as_url, "some_hs_token", "bob") self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") # must be done after inserts database = self.hs.get_datastores().databases[0] self.store = ApplicationServiceStore( database, make_conn(database._database_config, database.engine, "test"), self.hs, )
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-c", "--config-path", action="append", metavar="CONFIG_FILE", help="The config files for Synapse.", required=True, ) parser.add_argument( "-s", "--since", metavar="duration", help= "Specify how far back to review user registrations for, defaults to 7d (i.e. 7 days).", default="7d", ) parser.add_argument( "-e", "--exclude-emails", action="store_true", help="Exclude users that have validated email addresses", ) parser.add_argument( "-u", "--only-users", action="store_true", help="Only print user IDs that match.", ) config = ReviewConfig() config_args = parser.parse_args(sys.argv[1:]) config_files = find_config_files(search_paths=config_args.config_path) config_dict = read_config_files(config_files) config.parse_config_dict(config_dict, ) since_ms = time.time() * 1000 - config.parse_duration(config_args.since) exclude_users_with_email = config_args.exclude_emails include_context = not config_args.only_users for database_config in config.database.databases: if "main" in database_config.databases: break engine = create_engine(database_config.config) with make_conn(database_config, engine, "review_recent_signups") as db_conn: user_infos = get_recent_users(db_conn.cursor(), since_ms) for user_info in user_infos: if exclude_users_with_email and user_info.emails: continue if include_context: print_public_rooms = "" if user_info.public_rooms: print_public_rooms = "(" + ", ".join( user_info.public_rooms[:3]) if len(user_info.public_rooms) > 3: print_public_rooms += ", ..." print_public_rooms += ")" print("# Created:", datetime.fromtimestamp(user_info.creation_ts)) print("# Email:", ", ".join(user_info.emails) or "None") print("# IPs:", ", ".join(user_info.ips)) print( "# Number joined public rooms:", len(user_info.public_rooms), print_public_rooms, ) print("# Number joined private rooms:", len(user_info.private_rooms)) print("#") print(user_info.user_id) if include_context: print()
def __init__(self, main_store_class, hs): # Note we pass in the main store class here as workers use a different main # store. self.databases = [] main = None state = None persist_events = None for database_config in hs.config.database.databases: db_name = database_config.name engine = create_engine(database_config.config) with make_conn(database_config, engine, "startup") as db_conn: logger.info("[database config %r]: Checking database server", db_name) engine.check_database(db_conn) logger.info( "[database config %r]: Preparing for databases %r", db_name, database_config.databases, ) prepare_database( db_conn, engine, hs.config, databases=database_config.databases, ) database = DatabasePool(hs, database_config, engine) if "main" in database_config.databases: logger.info( "[database config %r]: Starting 'main' database", db_name) # Sanity check we don't try and configure the main store on # multiple databases. if main: raise Exception("'main' data store already configured") main = main_store_class(database, db_conn, hs) # If we're on a process that can persist events also # instantiate a `PersistEventsStore` if hs.get_instance_name( ) in hs.config.worker.writers.events: persist_events = PersistEventsStore(hs, database, main) if "state" in database_config.databases: logger.info( "[database config %r]: Starting 'state' database", db_name) # Sanity check we don't try and configure the state store on # multiple databases. if state: raise Exception( "'state' data store already configured") state = StateGroupDataStore(database, db_conn, hs) db_conn.commit() self.databases.append(database) logger.info("[database config %r]: prepared", db_name) # Closing the context manager doesn't close the connection. # psycopg will close the connection when the object gets GCed, but *only* # if the PID is the same as when the connection was opened [1], and # it may not be if we fork in the meantime. # # [1]: https://github.com/psycopg/psycopg2/blob/2_8_5/psycopg/connection_type.c#L1378 db_conn.close() # Sanity check that we have actually configured all the required stores. if not main: raise Exception("No 'main' database configured") if not state: raise Exception("No 'state' database configured") # We use local variables here to ensure that the databases do not have # optional types. self.main = main self.state = state self.persist_events = persist_events
def __init__(self, main_store_class, hs): # Note we pass in the main store class here as workers use a different main # store. self.databases = [] main = None state = None persist_events = None for database_config in hs.config.database.databases: db_name = database_config.name engine = create_engine(database_config.config) with make_conn(database_config, engine) as db_conn: logger.info("Preparing database %r...", db_name) engine.check_database(db_conn) prepare_database( db_conn, engine, hs.config, databases=database_config.databases, ) database = DatabasePool(hs, database_config, engine) if "main" in database_config.databases: logger.info("Starting 'main' data store") # Sanity check we don't try and configure the main store on # multiple databases. if main: raise Exception("'main' data store already configured") main = main_store_class(database, db_conn, hs) # If we're on a process that can persist events also # instantiate a `PersistEventsStore` if hs.config.worker.writers.events == hs.get_instance_name( ): persist_events = PersistEventsStore(hs, database, main) if "state" in database_config.databases: logger.info("Starting 'state' data store") # Sanity check we don't try and configure the state store on # multiple databases. if state: raise Exception( "'state' data store already configured") state = StateGroupDataStore(database, db_conn, hs) db_conn.commit() self.databases.append(database) logger.info("Database %r prepared", db_name) # Sanity check that we have actually configured all the required stores. if not main: raise Exception("No 'main' data store configured") if not state: raise Exception("No 'main' data store configured") # We use local variables here to ensure that the databases do not have # optional types. self.main = main self.state = state self.persist_events = persist_events