def setUp(self): self.db_pool = Mock(spec=["runInteraction"]) self.mock_txn = Mock() self.mock_conn = Mock(spec_set=["cursor", "rollback", "commit"]) self.mock_conn.cursor.return_value = self.mock_txn self.mock_conn.rollback.return_value = None # Our fake runInteraction just runs synchronously inline def runInteraction(func, *args, **kwargs): return defer.succeed(func(self.mock_txn, *args, **kwargs)) self.db_pool.runInteraction = runInteraction def runWithConnection(func, *args, **kwargs): return defer.succeed(func(self.mock_conn, *args, **kwargs)) self.db_pool.runWithConnection = runWithConnection config = default_config(name="test", parse=True) hs = TestHomeServer("test", config=config) sqlite_config = {"name": "sqlite3"} engine = create_engine(sqlite_config) fake_engine = Mock(wraps=engine) fake_engine.can_native_upsert = False fake_engine.in_transaction.return_value = False db = DatabasePool(Mock(), Mock(config=sqlite_config), fake_engine) db._db_pool = self.db_pool self.datastore = SQLBaseStore(db, None, hs)
def set_renewal_mail_status_txn(txn: LoggingTransaction): DatabasePool.simple_update_one_txn( txn=txn, table="email_account_validity", keyvalues={"user_id": user_id}, updatevalues={"email_sent": email_sent}, )
def get_recent_users(txn: LoggingTransaction, since_ms: int, exclude_app_service: bool) -> List[UserInfo]: """Fetches recently registered users and some info on them.""" sql = """ SELECT name, creation_ts FROM users WHERE ? <= creation_ts AND deactivated = 0 """ if exclude_app_service: sql += " AND appservice_id IS NULL" txn.execute(sql, (since_ms / 1000, )) user_infos = [ UserInfo(user_id, creation_ts) for user_id, creation_ts in txn ] for user_info in user_infos: user_info.emails = DatabasePool.simple_select_onecol_txn( txn, table="user_threepids", keyvalues={ "user_id": user_info.user_id, "medium": "email" }, retcol="address", ) sql = """ SELECT room_id, canonical_alias, name, join_rules FROM local_current_membership INNER JOIN room_stats_state USING (room_id) WHERE user_id = ? AND membership = 'join' """ txn.execute(sql, (user_info.user_id, )) for room_id, canonical_alias, name, join_rules in txn: if join_rules == "public": user_info.public_rooms.append(canonical_alias or name or room_id) else: user_info.private_rooms.append(canonical_alias or name or room_id) user_info.ips = DatabasePool.simple_select_onecol_txn( txn, table="user_ips", keyvalues={"user_id": user_info.user_id}, retcol="ip", ) return user_infos
def get_renewal_token_txn(txn: LoggingTransaction): return DatabasePool.simple_select_one_onecol_txn( txn=txn, table="email_account_validity", keyvalues={"user_id": user_id}, retcol="renewal_token", )
def get_user_from_renewal_token_txn(txn: LoggingTransaction): return DatabasePool.simple_select_one_txn( txn=txn, table="email_account_validity", keyvalues={"renewal_token": renewal_token}, retcols=["user_id", "expiration_ts_ms", "token_used_ts_ms"], )
def build_db_store( self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False, ) -> Store: """Builds and returns a database store using the provided configuration. Args: db_config: The database configuration allow_outdated_version: True to suppress errors about the database server version being too old to run a complete synapse Returns: The built Store object. """ self.progress.set_state("Preparing %s" % db_config.config["name"]) engine = create_engine(db_config.config) hs = MockHomeserver(self.hs_config) with make_conn(db_config, engine, "portdb") as db_conn: engine.check_database( db_conn, allow_outdated_version=allow_outdated_version) prepare_database(db_conn, engine, config=self.hs_config) # Type safety: ignore that we're using Mock homeservers here. store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) # type: ignore[arg-type] db_conn.commit() return store
def get_expiration_ts_for_user_txn(txn: LoggingTransaction): return DatabasePool.simple_select_one_onecol_txn( txn=txn, table="email_account_validity", keyvalues={"user_id": user_id}, retcol="expiration_ts_ms", allow_none=True, )
def select_users_txn(txn, now_ms, renew_at): txn.execute( """ SELECT user_id, expiration_ts_ms FROM email_account_validity WHERE email_sent = ? AND (expiration_ts_ms - ?) <= ? """, (False, now_ms, renew_at), ) return DatabasePool.cursor_to_dict(txn)
def set_renewal_token_for_user_txn(txn: LoggingTransaction): if unique: ret = DatabasePool.simple_select_one_onecol_txn( txn=txn, table="email_account_validity", keyvalues={"renewal_token": renewal_token}, retcol="user_id", allow_none=True, ) if ret is not None: raise SynapseError(409, "Renewal token already in use") DatabasePool.simple_update_one_txn( txn=txn, table="email_account_validity", keyvalues={"user_id": user_id}, updatevalues={ "renewal_token": renewal_token, "token_used_ts_ms": None }, )
def __init__(self, main_store_class, hs): # Note we pass in the main store class here as workers use a different main # store. self.databases = [] main = None state = None persist_events = None for database_config in hs.config.database.databases: db_name = database_config.name engine = create_engine(database_config.config) with make_conn(database_config, engine, "startup") as db_conn: logger.info("[database config %r]: Checking database server", db_name) engine.check_database(db_conn) logger.info( "[database config %r]: Preparing for databases %r", db_name, database_config.databases, ) prepare_database( db_conn, engine, hs.config, databases=database_config.databases, ) database = DatabasePool(hs, database_config, engine) if "main" in database_config.databases: logger.info( "[database config %r]: Starting 'main' database", db_name) # Sanity check we don't try and configure the main store on # multiple databases. if main: raise Exception("'main' data store already configured") main = main_store_class(database, db_conn, hs) # If we're on a process that can persist events also # instantiate a `PersistEventsStore` if hs.get_instance_name( ) in hs.config.worker.writers.events: persist_events = PersistEventsStore(hs, database, main) if "state" in database_config.databases: logger.info( "[database config %r]: Starting 'state' database", db_name) # Sanity check we don't try and configure the state store on # multiple databases. if state: raise Exception( "'state' data store already configured") state = StateGroupDataStore(database, db_conn, hs) db_conn.commit() self.databases.append(database) logger.info("[database config %r]: prepared", db_name) # Closing the context manager doesn't close the connection. # psycopg will close the connection when the object gets GCed, but *only* # if the PID is the same as when the connection was opened [1], and # it may not be if we fork in the meantime. # # [1]: https://github.com/psycopg/psycopg2/blob/2_8_5/psycopg/connection_type.c#L1378 db_conn.close() # Sanity check that we have actually configured all the required stores. if not main: raise Exception("No 'main' database configured") if not state: raise Exception("No 'state' database configured") # We use local variables here to ensure that the databases do not have # optional types. self.main = main self.state = state self.persist_events = persist_events
def populate_table_txn(txn: LoggingTransaction, batch_size: int) -> int: # Populate the database with the users that are in the users table but not in # the email_account_validity one. txn.execute( """ SELECT users.name FROM users LEFT JOIN email_account_validity ON (users.name = email_account_validity.user_id) WHERE email_account_validity.user_id IS NULL LIMIT ? """, (batch_size, ), ) missing_users = DatabasePool.cursor_to_dict(txn) if not missing_users: return 0 # Figure out the state of these users in the account_validity table. # Note that at some point we'll want to get rid of the account_validity table # and we'll need to get rid of this code as well. rows = DatabasePool.simple_select_many_txn( txn=txn, table="account_validity", column="user_id", iterable=tuple([user["name"] for user in missing_users]), keyvalues={}, retcols=( "user_id", "expiration_ts_ms", "email_sent", "renewal_token", "token_used_ts_ms", ), ) # Turn the results into a dictionary so we can later merge it with the list # of registered users on the homeserver. users_to_insert = {} for row in rows: users_to_insert[row["user_id"]] = row # Look for users that are registered but don't have a state in the # account_validity table, and set a default state for them. This default # state includes an expiration timestamp close to now + validity period, but # is slightly randomised to avoid sending huge bursts of renewal emails at # once. default_expiration_ts = self._api.current_time_ms() + self._period for user in missing_users: if users_to_insert.get(user["name"]) is None: users_to_insert[user["name"]] = { "user_id": user["name"], "expiration_ts_ms": self._rand.randrange( default_expiration_ts - self._expiration_ts_max_delta, default_expiration_ts, ), "email_sent": False, "renewal_token": None, "token_used_ts_ms": None, } # Insert the users in the table. DatabasePool.simple_insert_many_txn( txn=txn, table="email_account_validity", values=list(users_to_insert.values()), ) return len(missing_users)
def __init__(self, main_store_class, hs): # Note we pass in the main store class here as workers use a different main # store. self.databases = [] main = None state = None persist_events = None for database_config in hs.config.database.databases: db_name = database_config.name engine = create_engine(database_config.config) with make_conn(database_config, engine) as db_conn: logger.info("Preparing database %r...", db_name) engine.check_database(db_conn) prepare_database( db_conn, engine, hs.config, databases=database_config.databases, ) database = DatabasePool(hs, database_config, engine) if "main" in database_config.databases: logger.info("Starting 'main' data store") # Sanity check we don't try and configure the main store on # multiple databases. if main: raise Exception("'main' data store already configured") main = main_store_class(database, db_conn, hs) # If we're on a process that can persist events also # instantiate a `PersistEventsStore` if hs.config.worker.writers.events == hs.get_instance_name( ): persist_events = PersistEventsStore(hs, database, main) if "state" in database_config.databases: logger.info("Starting 'state' data store") # Sanity check we don't try and configure the state store on # multiple databases. if state: raise Exception( "'state' data store already configured") state = StateGroupDataStore(database, db_conn, hs) db_conn.commit() self.databases.append(database) logger.info("Database %r prepared", db_name) # Sanity check that we have actually configured all the required stores. if not main: raise Exception("No 'main' data store configured") if not state: raise Exception("No 'main' data store configured") # We use local variables here to ensure that the databases do not have # optional types. self.main = main self.state = state self.persist_events = persist_events