def build_topic_mute_checker( cursor: CursorWrapper, user_profile: UserProfile ) -> Callable[[int, str], bool]: """ This function is similar to the function of the same name in zerver/lib/topic_mutes.py, but it works without the ORM, so that we can use it in migrations. """ query = SQL( """ SELECT recipient_id, topic_name FROM zerver_usertopic WHERE user_profile_id = %s """ ) cursor.execute(query, [user_profile.id]) rows = cursor.fetchall() tups = {(recipient_id, topic_name.lower()) for (recipient_id, topic_name) in rows} def is_muted(recipient_id: int, topic: str) -> bool: return (recipient_id, topic.lower()) in tups return is_muted
def _quote_values(self, connection: Connection, cursor: Cursor, values: list[Any]) -> list[str]: """ Quotes a list of values for inclusion in a different sql value """ # make a statement SELECT quote(%s), quote(%s), ... for each of the values sql = "SELECT {}".format(", ".join(["quote(%s)"] * len(values))) cursor.execute(sql, values) return cursor.fetchone()
def migrate_data(cursor: CursorWrapper): cursor.execute(""" insert into video_progress registration_id, category_id select r.id, c.id from registrations as r inner join course co on co.id = r.course_id inner join categories c on c.id=co.category_id; """)
def update_unread_flags(cursor: CursorWrapper, user_message_ids: List[int]) -> None: query = SQL(""" UPDATE zerver_usermessage SET flags = flags | 1 WHERE id IN %(user_message_ids)s """) cursor.execute(query, {"user_message_ids": tuple(user_message_ids)})
def remove(self, using: CursorWrapper): chunk_size = self._find_ideal_chunk_size() using.execute( ''' DELETE FROM `performance_datum` WHERE push_timestamp <= %s LIMIT %s ''', [self._max_timestamp, chunk_size], )
def copy_from_temp_table(cursor: CursorWrapper): cursor.execute(''' INSERT INTO backend_datapoint(plant_id, datetime_generated, energy_expected, energy_observed, irradiation_expected, irradiation_observed) SELECT td.plant_id, td.datetime_generated, td.energy_expected, td.energy_observed, td.irradiation_expected, td.irradiation_observed FROM temp_datapoint td ON CONFLICT(plant_id, datetime_generated) DO UPDATE SET energy_expected = EXCLUDED.energy_expected, energy_observed = EXCLUDED.energy_observed, irradiation_expected = EXCLUDED.irradiation_observed ''')
def create_destroy_datapoint_table(cursor: CursorWrapper): """Context manager for creating and dropping temp tables""" cursor.execute(''' DROP TABLE IF EXISTS temp_datapoint; CREATE TEMPORARY TABLE temp_datapoint AS SELECT * FROM backend_datapoint LIMIT 0; ''') try: yield finally: cursor.execute(''' DROP TABLE IF EXISTS temp_datapoint; ''')
def remove(self, using: CursorWrapper): chunk_size = self._find_ideal_chunk_size() using.execute( ''' DELETE FROM `performance_datum` WHERE (repository_id NOT IN %s) AND push_timestamp <= %s LIMIT %s ''', [ tuple(self.relevant_repositories), self._max_timestamp, chunk_size, ], )
def remove(self, using: CursorWrapper): chunk_size = self._find_ideal_chunk_size() # Django's queryset API doesn't support MySQL's # DELETE statements with LIMIT constructs, # even though this database is capable of doing that. # # If ever this support is added in Django, replace # raw SQL bellow with equivalent queryset commands. using.execute( ''' DELETE FROM `performance_datum` WHERE push_timestamp <= %s LIMIT %s ''', [self._max_timestamp, chunk_size], )
def __attempt_remove(self, using: CursorWrapper): # Django's queryset API doesn't support MySQL's # DELETE statements with LIMIT constructs, # even though this database is capable of doing that. # # If ever this support is added in Django, replace # raw SQL bellow with equivalent queryset commands. using.execute( ''' DELETE FROM `performance_datum` WHERE repository_id = %s AND signature_id = %s AND push_timestamp <= %s LIMIT %s ''', [ self.target_signature.repository_id, self.target_signature.id, self._max_timestamp, self._chunk_size, ], )
def _execute(self, connection: Connection, cursor: Cursor, sql: str, args: list[Any], force_manual_escape: bool = False) -> None: if force_manual_escape: # in a manual escape sitatuon, we use the quote() SQL function to manually escape each parameter escape_args = self._quote_values(connection, cursor, args) # split the query and add in the escaped values split_query = sql.split("%s") substiuted_query_parts = [ query_part + escaped_arg for (escaped_arg, query_part) in zip(escape_args, split_query[:-1]) ] sql = "".join(substiuted_query_parts) + split_query[-1] # we subtituted in the variables, so we no longer need any argument args = [] # execute using the vanilla cursor return cursor.execute(sql, args)
def do_batch_update( cursor: CursorWrapper, table: str, assignments: List[Composable], batch_size: int = 10000, sleep: float = 0.1, ) -> None: # nocoverage # The string substitution below is complicated by our need to # support multiple PostgreSQL versions. stmt = SQL(""" UPDATE {} SET {} WHERE id >= %s AND id < %s """).format( Identifier(table), SQL(", ").join(assignments), ) cursor.execute( SQL("SELECT MIN(id), MAX(id) FROM {}").format(Identifier(table))) (min_id, max_id) = cursor.fetchone() if min_id is None: return print(f"\n Range of rows to update: [{min_id}, {max_id}]") while min_id <= max_id: lower = min_id upper = min_id + batch_size print(f" Updating range [{lower},{upper})") cursor.execute(stmt, [lower, upper]) min_id = upper time.sleep(sleep) # Once we've finished, check if any new rows were inserted to the table if min_id > max_id: cursor.execute( SQL("SELECT MAX(id) FROM {}").format(Identifier(table))) (max_id, ) = cursor.fetchone() print(" Finishing...", end="")