def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(DROP_INDICES.splitlines()): cur.execute(statement) if isinstance(database_engine, PostgresEngine): drop_constraint = POSTGRES_DROP_CONSTRAINT else: drop_constraint = SQLITE_DROP_CONSTRAINT for statement in get_statements(drop_constraint.splitlines()): cur.execute(statement)
def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(BOTH_TABLES.splitlines()): cur.execute(statement) if isinstance(database_engine, PostgresEngine): for statement in get_statements(POSTGRES_TABLE.splitlines()): cur.execute(statement) elif isinstance(database_engine, Sqlite3Engine): for statement in get_statements(SQLITE_TABLE.splitlines()): cur.execute(statement) else: raise Exception("Unrecognized database engine")
def run_create(cur, database_engine, *args, **kwargs): if not isinstance(database_engine, PostgresEngine): return for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, "have_added_indexes": False, } progress_json = json.dumps(progress) sql = ( "INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)" ) sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_search_order", progress_json))
def run_create(cur, database_engine, *args, **kwargs): for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, } progress_json = simplejson.dumps(progress) sql = ( "INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)" ) sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_fields_sender_url", progress_json))
def run_create(cur, database_engine, *args, **kwargs): if isinstance(database_engine, PostgresEngine): for statement in get_statements(POSTGRES_TABLE.splitlines()): cur.execute(statement) elif isinstance(database_engine, Sqlite3Engine): cur.execute(SQLITE_TABLE) else: raise Exception("Unrecognized database engine") cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, } progress_json = ujson.dumps(progress) sql = ( "INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)" ) sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_search", progress_json))
def run_upgrade(cur, database_engine, *args, **kwargs): for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, } progress_json = ujson.dumps(progress) sql = ("INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)") sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_origin_server_ts", progress_json))
def run_create(cur, database_engine, *args, **kwargs): if isinstance(database_engine, PostgresEngine): for statement in get_statements(POSTGRES_TABLE.splitlines()): cur.execute(statement) elif isinstance(database_engine, Sqlite3Engine): cur.execute(SQLITE_TABLE) else: raise Exception("Unrecognized database engine") cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, } progress_json = ujson.dumps(progress) sql = ("INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)") sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_search", progress_json))
def run_create(cur, database_engine, *args, **kwargs): if not isinstance(database_engine, PostgresEngine): return for statement in get_statements(ALTER_TABLE.splitlines()): cur.execute(statement) cur.execute("SELECT MIN(stream_ordering) FROM events") rows = cur.fetchall() min_stream_id = rows[0][0] cur.execute("SELECT MAX(stream_ordering) FROM events") rows = cur.fetchall() max_stream_id = rows[0][0] if min_stream_id is not None and max_stream_id is not None: progress = { "target_min_stream_id_inclusive": min_stream_id, "max_stream_id_exclusive": max_stream_id + 1, "rows_inserted": 0, "have_added_indexes": False, } progress_json = ujson.dumps(progress) sql = ( "INSERT into background_updates (update_name, progress_json)" " VALUES (?, ?)" ) sql = database_engine.convert_param_style(sql) cur.execute(sql, ("event_search_order", progress_json))
def _make_staging_area(txn): # Create the temporary tables stmts = get_statements(""" -- We just recreate the table, we'll be reinserting the -- correct entries again later anyway. DROP TABLE IF EXISTS {temp}_rooms; CREATE TABLE IF NOT EXISTS {temp}_rooms( room_id TEXT NOT NULL, events BIGINT NOT NULL ); CREATE INDEX {temp}_rooms_events ON {temp}_rooms(events); CREATE INDEX {temp}_rooms_id ON {temp}_rooms(room_id); """.format(temp=TEMP_TABLE).splitlines()) for statement in stmts: txn.execute(statement) sql = ("CREATE TABLE IF NOT EXISTS " + TEMP_TABLE + "_position(position TEXT NOT NULL)") txn.execute(sql) # Get rooms we want to process from the database, only adding # those that we haven't (i.e. those not in room_stats_earliest_token) sql = """ INSERT INTO %s_rooms (room_id, events) SELECT c.room_id, count(*) FROM current_state_events AS c LEFT JOIN room_stats_earliest_token AS t USING (room_id) WHERE t.room_id IS NULL GROUP BY c.room_id """ % (TEMP_TABLE, ) txn.execute(sql)
def run_create(cur, database_engine, *args, **kwargs): rowid = "ctid" if isinstance(database_engine, PostgresEngine) else "rowid" # remove duplicates from group_users & group_invites tables cur.execute(""" DELETE FROM group_users WHERE %s NOT IN ( SELECT min(%s) FROM group_users GROUP BY group_id, user_id ); """ % (rowid, rowid)) cur.execute(""" DELETE FROM group_invites WHERE %s NOT IN ( SELECT min(%s) FROM group_invites GROUP BY group_id, user_id ); """ % (rowid, rowid)) for statement in get_statements(FIX_INDEXES.splitlines()): cur.execute(statement)
def run_create(cur, database_engine, *args, **kwargs): if not isinstance(database_engine, PostgresEngine): return for statement in get_statements(CREATE_TABLE.splitlines()): cur.execute(statement)