def setUpBeforeMigration(self, apps: StateApps) -> None: Reaction = apps.get_model('zerver', 'Reaction') RealmEmoji = apps.get_model('zerver', 'RealmEmoji') Message = apps.get_model('zerver', 'Message') Recipient = apps.get_model('zerver', 'Recipient') sender = self.example_user('iago') realm = sender.realm sending_client = make_client(name="test suite") stream_name = 'Denmark' stream = get_stream(stream_name, realm) subject = 'foo' def send_fake_message(message_content: str, stream: ModelBase) -> ModelBase: recipient = Recipient.objects.get(type_id=stream.id, type=2) return Message.objects.create(sender = sender, recipient = recipient, subject = subject, content = message_content, pub_date = timezone_now(), sending_client = sending_client) message = send_fake_message('Test 1', stream) # Create reactions for all the realm emoji's on the message we faked. for realm_emoji in RealmEmoji.objects.all(): reaction = Reaction(user_profile=sender, message=message, emoji_name=realm_emoji.name, emoji_code=realm_emoji.name, reaction_type='realm_emoji') reaction.save() realm_emoji_reactions_count = Reaction.objects.filter(reaction_type='realm_emoji').count() self.assertEqual(realm_emoji_reactions_count, 1)
def set_initial_value_of_is_private_flag( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserMessage = apps.get_model("zerver", "UserMessage") Message = apps.get_model("zerver", "Message") if not Message.objects.exists(): return i = 0 # Total is only used for the progress bar total = Message.objects.filter(recipient__type__in=[1, 3]).count() processed = 0 print("\nStart setting initial value for is_private flag...") sys.stdout.flush() while True: range_end = i + 10000 # Can't use [Recipient.PERSONAL, Recipient.HUDDLE] in migration files message_ids = list(Message.objects.filter(recipient__type__in=[1, 3], id__gt=i, id__lte=range_end).values_list("id", flat=True).order_by("id")) count = UserMessage.objects.filter(message_id__in=message_ids).update(flags=F('flags').bitor(UserMessage.flags.is_private)) if count == 0 and range_end >= Message.objects.last().id: break i = range_end processed += len(message_ids) if total != 0: percent = round((processed / total) * 100, 2) else: percent = 100.00 print("Processed %s/%s %s%%" % (processed, total, percent)) sys.stdout.flush()
def add_domain_to_realm_alias_if_needed(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model('zerver', 'Realm') RealmAlias = apps.get_model('zerver', 'RealmAlias') for realm in Realm.objects.all(): # if realm.domain already exists in RealmAlias, assume it is correct if not RealmAlias.objects.filter(domain=realm.domain).exists(): RealmAlias.objects.create(realm=realm, domain=realm.domain)
def convert_muted_topics(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: stream_query = ''' SELECT zerver_stream.name, zerver_stream.realm_id, zerver_stream.id, zerver_recipient.id FROM zerver_stream INNER JOIN zerver_recipient ON ( zerver_recipient.type_id = zerver_stream.id AND zerver_recipient.type = 2 ) ''' stream_dict = {} with connection.cursor() as cursor: cursor.execute(stream_query) rows = cursor.fetchall() for (stream_name, realm_id, stream_id, recipient_id) in rows: stream_name = stream_name.lower() stream_dict[(stream_name, realm_id)] = (stream_id, recipient_id) UserProfile = apps.get_model("zerver", "UserProfile") MutedTopic = apps.get_model("zerver", "MutedTopic") new_objs = [] user_query = UserProfile.objects.values( 'id', 'realm_id', 'muted_topics' ) for row in user_query: user_profile_id = row['id'] realm_id = row['realm_id'] muted_topics = row['muted_topics'] tups = ujson.loads(muted_topics) for (stream_name, topic_name) in tups: stream_name = stream_name.lower() val = stream_dict.get((stream_name, realm_id)) if val is not None: stream_id, recipient_id = val muted_topic = MutedTopic( user_profile_id=user_profile_id, stream_id=stream_id, recipient_id=recipient_id, topic_name=topic_name, ) new_objs.append(muted_topic) with connection.cursor() as cursor: cursor.execute('DELETE from zerver_mutedtopic') MutedTopic.objects.bulk_create(new_objs)
def backfill_first_message_id(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model('zerver', 'Stream') Message = apps.get_model('zerver', 'Message') for stream in Stream.objects.all(): first_message = Message.objects.filter( recipient__type_id=stream.id, recipient__type=2).first() if first_message is None: # No need to change anything if the outcome is the default of None continue stream.first_message_id = first_message.id stream.save()
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserCount = apps.get_model('analytics', 'UserCount') StreamCount = apps.get_model('analytics', 'StreamCount') RealmCount = apps.get_model('analytics', 'RealmCount') InstallationCount = apps.get_model('analytics', 'InstallationCount') FillState = apps.get_model('analytics', 'FillState') property = 'messages_sent:message_type:day' UserCount.objects.filter(property=property).delete() StreamCount.objects.filter(property=property).delete() RealmCount.objects.filter(property=property).delete() InstallationCount.objects.filter(property=property).delete() FillState.objects.filter(property=property).delete()
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserCount = apps.get_model('analytics', 'UserCount') StreamCount = apps.get_model('analytics', 'StreamCount') RealmCount = apps.get_model('analytics', 'RealmCount') InstallationCount = apps.get_model('analytics', 'InstallationCount') FillState = apps.get_model('analytics', 'FillState') property = 'messages_sent_to_stream:is_bot' UserCount.objects.filter(property=property).delete() StreamCount.objects.filter(property=property).delete() RealmCount.objects.filter(property=property).delete() InstallationCount.objects.filter(property=property).delete() FillState.objects.filter(property=property).delete()
def backfill_user_activations_and_deactivations(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: migration_time = timezone_now() RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog') UserProfile = apps.get_model('zerver', 'UserProfile') for user in UserProfile.objects.all(): RealmAuditLog.objects.create(realm=user.realm, modified_user=user, event_type='user_created', event_time=user.date_joined, backfilled=False) for user in UserProfile.objects.filter(is_active=False): RealmAuditLog.objects.create(realm=user.realm, modified_user=user, event_type='user_deactivated', event_time=migration_time, backfilled=True)
def populate_is_zephyr(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") Stream = apps.get_model("zerver", "Stream") realms = Realm.objects.filter( string_id='zephyr', ) for realm in realms: Stream.objects.filter( realm_id=realm.id ).update( is_in_zephyr_realm=True )
def set_realm_for_existing_scheduledemails( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: scheduledemail_model = apps.get_model("zerver", "ScheduledEmail") preregistrationuser_model = apps.get_model("zerver", "PreregistrationUser") for scheduledemail in scheduledemail_model.objects.all(): if scheduledemail.type == 3: # ScheduledEmail.INVITATION_REMINDER # Don't think this can be None, but just be safe prereg = preregistrationuser_model.objects.filter(email=scheduledemail.address).first() if prereg is not None: scheduledemail.realm = prereg.realm else: scheduledemail.realm = scheduledemail.user.realm scheduledemail.save(update_fields=['realm']) # Shouldn't be needed, but just in case scheduledemail_model.objects.filter(realm=None).delete()
def render_all_stream_descriptions(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model('zerver', 'Stream') all_streams = Stream.objects.exclude(description='') for stream in all_streams: stream.rendered_description = bugdown_convert(stream.description, no_previews=True) stream.save(update_fields=["rendered_description"])
def set_initial_value_for_signup_notifications_stream( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: realm_model = apps.get_model("zerver", "Realm") realms = realm_model.objects.exclude(notifications_stream__isnull=True) for realm in realms: realm.signup_notifications_stream = realm.notifications_stream realm.save(update_fields=["signup_notifications_stream"])
def reset_is_private_flag( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserMessage = apps.get_model("zerver", "UserMessage") UserProfile = apps.get_model("zerver", "UserProfile") user_profile_ids = UserProfile.objects.all().order_by("id").values_list("id", flat=True) # We only need to do this because previous migration # zerver/migrations/0100_usermessage_remove_is_me_message.py # didn't clean the field after removing it. i = 0 total = len(user_profile_ids) print("Setting default values for the new flag...") sys.stdout.flush() for user_id in user_profile_ids: while True: # Ideally, we'd just do a single database query per user. # Unfortunately, Django doesn't use the fancy new index on # is_private that we just generated if we do that, # resulting in a very slow migration that could take hours # on a large server. We address this issue by doing a bit # of hackery to generate the SQL just right (with an # `ORDER BY` clause that forces using the new index). flag_set_objects = UserMessage.objects.filter(user_profile__id = user_id).extra( where=["flags & 2048 != 0"]).order_by("message_id")[0:1000] user_message_ids = flag_set_objects.values_list("id", flat=True) count = UserMessage.objects.filter(id__in=user_message_ids).update( flags=F('flags').bitand(~UserMessage.flags.is_private)) if count < 1000: break i += 1 if (i % 50 == 0 or i == total): percent = round((i / total) * 100, 2) print("Processed %s/%s %s%%" % (i, total, percent)) sys.stdout.flush()
def set_initial_value_for_signup_notifications_stream( apps: StateApps, schema_editor: BaseDatabaseSchemaEditor ) -> None: realm_model = apps.get_model("zerver", "Realm") realms = realm_model.objects.exclude(notifications_stream__isnull=True) for realm in realms: realm.signup_notifications_stream = realm.notifications_stream realm.save(update_fields=["signup_notifications_stream"])
def reversal(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: # Ensures that migration can be re-run in case of a failure. RealmEmoji = apps.get_model("zerver", "RealmEmoji") for realm_emoji in RealmEmoji.objects.all(): corrupt_file_name = realm_emoji.file_name correct_file_name = get_emoji_file_name(corrupt_file_name, realm_emoji.name) realm_emoji.file_name = correct_file_name realm_emoji.save(update_fields=["file_name"])
def backfill_remote_zulip_server_creation_log_events( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: RemoteZulipServer = apps.get_model("zilencer", "RemoteZulipServer") RemoteZulipServerAuditLog = apps.get_model("zilencer", "RemoteZulipServerAuditLog") RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED = 10215 objects_to_create = [] for remote_server in RemoteZulipServer.objects.all(): entry = RemoteZulipServerAuditLog( server=remote_server, event_type=RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED, event_time=remote_server.last_updated, backfilled=True, ) objects_to_create.append(entry) RemoteZulipServerAuditLog.objects.bulk_create(objects_to_create)
def set_initial_value_for_bot_creation_policy(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") for realm in Realm.objects.all(): if realm.create_generic_bot_by_admins_only: realm.bot_creation_policy = 2 # BOT_CREATION_LIMIT_GENERIC_BOTS else: realm.bot_creation_policy = 1 # BOT_CREATION_EVERYONE realm.save(update_fields=["bot_creation_policy"])
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") for realm in Realm.objects.all(): if realm.bot_creation_policy == 1: # BOT_CREATION_EVERYONE realm.create_generic_bot_by_admins_only = False else: realm.create_generic_bot_by_admins_only = True realm.save(update_fields=["create_generic_bot_by_admins_only"])
def set_initial_value_for_bot_creation_policy(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") for realm in Realm.objects.all(): if realm.create_generic_bot_by_admins_only: realm.bot_creation_policy = 2 # BOT_CREATION_LIMIT_GENERIC_BOTS else: realm.bot_creation_policy = 1 # BOT_CREATION_EVERYONE realm.save(update_fields=["bot_creation_policy"])
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") for realm in Realm.objects.all(): if realm.bot_creation_policy == 1: # BOT_CREATION_EVERYONE realm.create_generic_bot_by_admins_only = False else: realm.create_generic_bot_by_admins_only = True realm.save(update_fields=["create_generic_bot_by_admins_only"])
def reverse_make_zero_invalid_for_message_delete_limit( apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") Realm.DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS = 600 Realm.objects.filter(allow_message_deleting=True, message_content_delete_limit_seconds=None).update( message_content_delete_limit_seconds=0)
def copy_user_phone_in_appointment(apps: StateApps, schema_editor: PostGISSchemaEditor): Appointment = apps.get_model('appointment', 'Appointment') appointments = Appointment.objects.exclude(client_id=None) for appointment in appointments: if appointment.client.client and not appointment.client_phone: appointment.client_phone = appointment.client.client.user.phone appointment.save()
def delete_old_scheduled_jobs(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: """Delete any old scheduled jobs, to handle changes in the format of that table. Ideally, we'd translate the jobs, but it's not really worth the development effort to save a few invitation reminders and day2 followup emails. """ ScheduledJob = apps.get_model('zerver', 'ScheduledJob') ScheduledJob.objects.all().delete()
def reversal(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: # Ensures that migration can be re-run in case of a failure. RealmEmoji = apps.get_model('zerver', 'RealmEmoji') for realm_emoji in RealmEmoji.objects.all(): corrupt_file_name = realm_emoji.file_name correct_file_name = get_emoji_file_name(corrupt_file_name, realm_emoji.name) realm_emoji.file_name = correct_file_name realm_emoji.save(update_fields=['file_name'])
def create_default_allauth(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor): User = apps.get_model('auth', 'User') # noqa: N806 EmailAddress = apps.get_model('account', 'EmailAddress') # noqa: N806 user = User.objects.create_user( username='******', email='*****@*****.**', password='******', first_name='John', last_name='Doe', ) EmailAddress.objects.create( user=user, email=user.email, verified=True, primary=True, )
def set_initial_value_for_invited_as(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: PreregistrationUser = apps.get_model("zerver", "PreregistrationUser") for user in PreregistrationUser.objects.all(): if user.invited_as_admin: user.invited_as = 1 # PreregistrationUser.INVITE_AS['REALM_ADMIN'] else: user.invited_as = 2 # PreregistrationUser.INVITE_AS['MEMBER'] user.save(update_fields=["invited_as"])
def change_default_tax_and_card_fee(apps: StateApps, schema_editor: PostGISSchemaEditor): Stylist = apps.get_model('salon', 'Stylist') stylists = Stylist.objects.all() for stylist in stylists: stylist.tax_rate = DEFAULT_TAX_RATE stylist.card_fee = DEFAULT_CARD_FEE stylist.save()
def set_initial_value_for_is_muted(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Subscription = apps.get_model("zerver", "Subscription") Subscription.objects.update( is_muted=Case( When(in_home_view=True, then=Value(False)), When(in_home_view=False, then=Value(True)), ) )
def rollback_default_site(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor): Site = apps.get_model('sites', 'Site') # noqa: N806 # This is the initial value of the default site object, as populated by the sites app. # If it doesn't exist for some reason, there is nothing to roll back. Site.objects.filter(pk=settings.SITE_ID).update(domain='example.com', name='example.com')
def remove_google_hangouts_provider(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: # We are removing the Google Hangout integration because Google has # removed the Hangouts brand. All the realms that used Hangouts as # their video chat provided are now set to the default, Jitsi. Realm = apps.get_model('zerver', 'Realm') Realm.objects.filter(video_chat_provider=VIDEO_CHAT_PROVIDERS['google_hangouts']['id']).update( video_chat_provider=VIDEO_CHAT_PROVIDERS['jitsi_meet']['id'] )
def update_existing_video_chat_provider_values( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model('zerver', 'Realm') for realm in Realm.objects.all(): realm.video_chat_provider = get_video_chat_provider_detail( VIDEO_CHAT_PROVIDERS, p_name=realm.video_chat_provider_old)['id'] realm.save(update_fields=["video_chat_provider"])
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") for realm in Realm.objects.all(): realm.video_chat_provider_old = get_video_chat_provider_detail( VIDEO_CHAT_PROVIDERS, p_id=realm.video_chat_provider)['name'] realm.save(update_fields=["video_chat_provider_old"])
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: PreregistrationUser = apps.get_model("zerver", "PreregistrationUser") for user in PreregistrationUser.objects.all(): if user.invited_as == 1: # PreregistrationUser.REALM_ADMIN user.invited_as_admin = True else: # PreregistrationUser.NORMAL_USER user.invited_as_admin = False user.save(update_fields=["invited_as_admin"])
def move_to_seperate_table(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model('zerver', 'UserProfile') AlertWord = apps.get_model('zerver', 'AlertWord') for user_profile in UserProfile.objects.all(): list_of_words = ujson.loads(user_profile.alert_words) # Remove duplicates with our case-insensitive model. word_dict: Dict[str, str] = {} for word in list_of_words: word_dict[word.lower()] = word AlertWord.objects.bulk_create( AlertWord( user_profile=user_profile, word=word, realm=user_profile.realm) for word in word_dict.values())
def set_realm_for_existing_scheduledemails( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: scheduledemail_model = apps.get_model("zerver", "ScheduledEmail") preregistrationuser_model = apps.get_model("zerver", "PreregistrationUser") for scheduledemail in scheduledemail_model.objects.all(): if scheduledemail.type == 3: # ScheduledEmail.INVITATION_REMINDER # Don't think this can be None, but just be safe prereg = preregistrationuser_model.objects.filter( email=scheduledemail.address).first() if prereg is not None: scheduledemail.realm = prereg.realm else: scheduledemail.realm = scheduledemail.user.realm scheduledemail.save(update_fields=['realm']) # Shouldn't be needed, but just in case scheduledemail_model.objects.filter(realm=None).delete()
def set_initial_value_for_invited_as(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: PreregistrationUser = apps.get_model("zerver", "PreregistrationUser") for user in PreregistrationUser.objects.all(): if user.invited_as_admin: user.invited_as = 1 # PreregistrationUser.REALM_ADMIN else: user.invited_as = 2 # PreregistrationUser.NORMAL_USER user.save(update_fields=["invited_as"])
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: PreregistrationUser = apps.get_model("zerver", "PreregistrationUser") for user in PreregistrationUser.objects.all(): if user.invited_as == 1: # PreregistrationUser.INVITE_AS['REALM_ADMIN'] user.invited_as_admin = True else: # PreregistrationUser.INVITE_AS['MEMBER'] user.invited_as_admin = False user.save(update_fields=["invited_as_admin"])
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: """This is a preparatory migration for our Analytics tables. The backstory is that Django's unique_together indexes do not properly handle the subgroup=None corner case (allowing duplicate rows that have a subgroup of None), which meant that in race conditions, rather than updating an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would create a duplicate row. In the next migration, we'll add a proper constraint to fix this bug, but we need to fix any existing problematic rows before we can add that constraint. We fix this in an appropriate fashion for each type of CountStat object; mainly this means deleting the extra rows, but for LoggingCountStat objects, we need to additionally combine the sums. """ count_tables = dict(realm=apps.get_model('analytics', 'RealmCount'), user=apps.get_model('analytics', 'UserCount'), stream=apps.get_model('analytics', 'StreamCount'), installation=apps.get_model('analytics', 'InstallationCount')) for name, count_table in count_tables.items(): value = [name, 'property', 'end_time'] if name == 'installation': value = ['property', 'end_time'] counts = count_table.objects.filter(subgroup=None).values( *value).annotate(Count('id'), Sum('value')).filter(id__count__gt=1) for count in counts: count.pop('id__count') total_value = count.pop('value__sum') duplicate_counts = list(count_table.objects.filter(**count)) first_count = duplicate_counts[0] if count['property'] in [ "invites_sent::day", "active_users_log:is_bot:day" ]: # For LoggingCountStat objects, the right fix is to combine the totals; # for other CountStat objects, we expect the duplicates to have the same value. # And so all we need to do is delete them. first_count.value = total_value first_count.save() to_cleanup = duplicate_counts[1:] for duplicate_count in to_cleanup: duplicate_count.delete()
def render_all_stream_descriptions( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model('zerver', 'Stream') all_streams = Stream.objects.exclude(description='') for stream in all_streams: stream.rendered_description = render_stream_description( stream.description) stream.save(update_fields=["rendered_description"])
def backfill_realm_creation_log_events( apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: RealmAuditLog = apps.get_model("zerver", "RealmAuditLog") RealmAuditLog.REALM_CREATED = 215 Realm = apps.get_model("zerver", "Realm") objects_to_create = [] for realm in Realm.objects.all(): entry = RealmAuditLog( realm=realm, event_type=RealmAuditLog.REALM_CREATED, event_time=realm.date_created, backfilled=True, ) objects_to_create.append(entry) RealmAuditLog.objects.bulk_create(objects_to_create)
def reverse_code(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: RemoteZulipServerAuditLog = apps.get_model("zilencer", "RemoteZulipServerAuditLog") RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED = 10215 RemoteZulipServerAuditLog.objects.filter( event_type=RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED, backfilled=True).delete()
def fix_stream_names(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") Realm = apps.get_model("zerver", "Realm") total_fixed_count = 0 realm_ids = Realm.objects.values_list("id", flat=True) if len(realm_ids) == 0: return print("") for realm_id in realm_ids: print(f"Processing realm {realm_id}") realm_stream_dicts = Stream.objects.filter(realm_id=realm_id).values("id", "name") occupied_stream_names = {stream_dict["name"] for stream_dict in realm_stream_dicts} for stream_dict in realm_stream_dicts: stream_name = stream_dict["name"] fixed_stream_name = "".join( [ character if character_is_printable(character) else "\N{REPLACEMENT CHARACTER}" for character in stream_name ] ) if fixed_stream_name == stream_name: continue if fixed_stream_name == "": fixed_stream_name = "(no name)" # The process of stripping invalid characters can lead to collisions, # with the new stream name being the same as the name of another existing stream. # We append underscore until the name no longer conflicts. while fixed_stream_name in occupied_stream_names: fixed_stream_name += "_" occupied_stream_names.add(fixed_stream_name) total_fixed_count += 1 with connection.cursor() as cursor: cursor.execute( "UPDATE zerver_stream SET name = %s WHERE id = %s", [fixed_stream_name, stream_dict["id"]], ) print(f"Fixed {total_fixed_count} stream names")
def set_realm_admins_as_realm_owners(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model('zerver', 'UserProfile') RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog') UserProfile.ROLE_REALM_OWNER = 100 UserProfile.ROLE_REALM_ADMINISTRATOR = 200 UserProfile.ROLE_MEMBER = 400 UserProfile.ROLE_GUEST = 600 RealmAuditLog.USER_ROLE_CHANGED = 105 RealmAuditLog.OLD_VALUE = '1' RealmAuditLog.NEW_VALUE = '2' RealmAuditLog.ROLE_COUNT = '10' RealmAuditLog.ROLE_COUNT_HUMANS = '11' RealmAuditLog.ROLE_COUNT_BOTS = '12' def realm_user_count_by_role(realm: Any) -> Dict[str, Any]: human_counts = {UserProfile.ROLE_REALM_ADMINISTRATOR: 0, UserProfile.ROLE_REALM_OWNER: 0, UserProfile.ROLE_MEMBER: 0, UserProfile.ROLE_GUEST: 0} for value_dict in list(UserProfile.objects.filter( realm=realm, is_bot=False, is_active=True).values('role').annotate(Count('role'))): human_counts[value_dict['role']] = value_dict['role__count'] bot_count = UserProfile.objects.filter(realm=realm, is_bot=True, is_active=True).count() return { RealmAuditLog.ROLE_COUNT_HUMANS: human_counts, RealmAuditLog.ROLE_COUNT_BOTS: bot_count, } objects_to_create = [] for user in UserProfile.objects.filter(is_active=True, role=UserProfile.ROLE_REALM_ADMINISTRATOR): user.role = UserProfile.ROLE_REALM_OWNER user.save(update_fields=['role']) audit_log_entry = RealmAuditLog(realm=user.realm, modified_user=user, event_type=RealmAuditLog.USER_ROLE_CHANGED, event_time=timezone_now(), extra_data=ujson.dumps({ RealmAuditLog.OLD_VALUE: UserProfile.ROLE_REALM_ADMINISTRATOR, RealmAuditLog.NEW_VALUE: UserProfile.ROLE_REALM_OWNER, RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user.realm), })) objects_to_create.append(audit_log_entry) RealmAuditLog.objects.bulk_create(objects_to_create)
def remove_name_illegal_chars(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model("zerver", "UserProfile") for user in UserProfile.objects.all(): stripped = [] for char in user.full_name: if (char not in NAME_INVALID_CHARS) and (category(char)[0] != "C"): stripped.append(char) user.full_name = "".join(stripped) user.save(update_fields=["full_name"])
def migrate_existing_attachment_data(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Attachment = apps.get_model('zerver', 'Attachment') Recipient = apps.get_model('zerver', 'Recipient') Stream = apps.get_model('zerver', 'Stream') attachments = Attachment.objects.all() for entry in attachments: owner = entry.owner entry.realm = owner.realm for message in entry.messages.all(): if owner == message.sender: if message.recipient.type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id) is_realm_public = not stream.realm.is_zephyr_mirror_realm and not stream.invite_only entry.is_realm_public = entry.is_realm_public or is_realm_public entry.save()
def migrate_twenty_four_hour_time_to_realmuserdefault( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: RealmUserDefault = apps.get_model("zerver", "RealmUserDefault") realm_user_default_objects = RealmUserDefault.objects.exclude( twenty_four_hour_time=F("realm__default_twenty_four_hour_time")) for realm_user_default in realm_user_default_objects: realm = realm_user_default.realm realm_user_default.twenty_four_hour_time = realm.default_twenty_four_hour_time realm_user_default.save(update_fields=["twenty_four_hour_time"])
def move_back_to_user_profile(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: AlertWord = apps.get_model('zerver', 'AlertWord') UserProfile = apps.get_model('zerver', 'UserProfile') user_ids_and_words = AlertWord.objects.all().values( "user_profile_id", "word") user_ids_with_words = dict() # type: Dict[int, List[str]] for id_and_word in user_ids_and_words: user_ids_with_words.setdefault(id_and_word["user_profile_id"], []) user_ids_with_words[id_and_word["user_profile_id"]].append( id_and_word["word"]) for (user_id, words) in user_ids_with_words.items(): user_profile = UserProfile.objects.get(id=user_id) user_profile.alert_words = ujson.dumps(words) user_profile.save(update_fields=['alert_words'])
def migrate_to_invite_to_realm_policy( apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: Realm = apps.get_model("zerver", "Realm") Realm.INVITE_TO_REALM_POLICY_MEMBERS_ONLY = 1 Realm.INVITE_TO_REALM_POLICY_ADMINS_ONLY = 2 Realm.objects.filter(invite_by_admins_only=False).update( invite_to_realm_policy=Realm.INVITE_TO_REALM_POLICY_MEMBERS_ONLY) Realm.objects.filter(invite_by_admins_only=True).update( invite_to_realm_policy=Realm.INVITE_TO_REALM_POLICY_ADMINS_ONLY)
def migrate_existing_attachment_data( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Attachment = apps.get_model('zerver', 'Attachment') Recipient = apps.get_model('zerver', 'Recipient') Stream = apps.get_model('zerver', 'Stream') attachments = Attachment.objects.all() for entry in attachments: owner = entry.owner entry.realm = owner.realm for message in entry.messages.all(): if owner == message.sender: if message.recipient.type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id) is_realm_public = not stream.realm.is_zephyr_mirror_realm and not stream.invite_only entry.is_realm_public = entry.is_realm_public or is_realm_public entry.save()
def migrate_realm_emoji_image_files(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: RealmEmoji = apps.get_model('zerver', 'RealmEmoji') uploader = get_uploader() for realm_emoji in RealmEmoji.objects.all(): old_file_name = realm_emoji.file_name new_file_name = get_emoji_file_name(old_file_name, str(realm_emoji.id)) uploader.ensure_emoji_images(realm_emoji.realm_id, old_file_name, new_file_name) realm_emoji.file_name = new_file_name realm_emoji.save(update_fields=['file_name'])
def reverse_change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model("zerver", "UserProfile") for user in UserProfile.objects.filter(emojiset="text"): # Resetting `emojiset` to "google" (the default) doesn't make an # exact round trip, but it's nearly indistinguishable -- the setting # shouldn't really matter while `emoji_alt_code` is true. user.emoji_alt_code = True user.emojiset = "google" user.save(update_fields=["emoji_alt_code", "emojiset"])
def upload_emoji_to_storage(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: realm_emoji_model = apps.get_model('zerver', 'RealmEmoji') uploader = get_uploader() # type: Uploader for emoji in realm_emoji_model.objects.all(): file_name = uploader.upload_emoji(emoji.realm_id, emoji.img_url, emoji.name) if file_name is None: logging.warning("ERROR: Could not download emoji %s; please reupload manually" % (emoji,)) emoji.file_name = file_name emoji.save()
def emoji_to_lowercase(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: RealmEmoji = apps.get_model("zerver", "RealmEmoji") emoji = RealmEmoji.objects.all() for e in emoji: # Technically, this could create a conflict, but it's # exceedingly unlikely. If that happens, the sysadmin can # manually rename the conflicts with the manage.py shell # and then rerun the migration/upgrade. e.name = e.name.lower() e.save()
def set_subdomain_of_default_realm(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: if settings.DEVELOPMENT: Realm = apps.get_model('zerver', 'Realm') try: default_realm = Realm.objects.get(domain="zulip.com") except ObjectDoesNotExist: default_realm = None if default_realm is not None: default_realm.subdomain = "zulip" default_realm.save()
def check_and_create_attachments(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: STREAM = 2 Message = apps.get_model('zerver', 'Message') Attachment = apps.get_model('zerver', 'Attachment') Stream = apps.get_model('zerver', 'Stream') for message in Message.objects.filter(has_attachment=True, attachment=None): attachment_url_list = attachment_url_re.findall(message.content) for url in attachment_url_list: path_id = attachment_url_to_path_id(url) user_profile = message.sender is_message_realm_public = False if message.recipient.type == STREAM: stream = Stream.objects.get(id=message.recipient.type_id) is_message_realm_public = not stream.invite_only and not stream.realm.is_zephyr_mirror_realm if path_id is not None: attachment = Attachment.objects.create( file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile, realm=user_profile.realm, is_realm_public=is_message_realm_public) attachment.messages.add(message)
def fix_realm_string_ids(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Realm = apps.get_model('zerver', 'Realm') if Realm.objects.filter(deactivated=False).count() != 2: return zulip_realm = Realm.objects.get(string_id="zulip") try: user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0] except Realm.DoesNotExist: return user_realm.string_id = "" user_realm.save()
def realm_emoji_name_to_id(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Reaction = apps.get_model('zerver', 'Reaction') RealmEmoji = apps.get_model('zerver', 'RealmEmoji') realm_emoji_by_realm_id = defaultdict(dict) # type: Dict[int, Dict[str, Any]] for realm_emoji in RealmEmoji.objects.all(): realm_emoji_by_realm_id[realm_emoji.realm_id] = { 'id': str(realm_emoji.id), 'name': realm_emoji.name, 'deactivated': realm_emoji.deactivated, } for reaction in Reaction.objects.filter(reaction_type='realm_emoji'): realm_id = reaction.user_profile.realm_id emoji_name = reaction.emoji_name realm_emoji = realm_emoji_by_realm_id.get(realm_id, {}).get(emoji_name) if realm_emoji is None: # Realm emoji used in this reaction has been deleted so this # reaction should also be deleted. We don't need to reverse # this step in migration reversal code. reaction.delete() else: reaction.emoji_code = realm_emoji["id"] reaction.save()
def move_avatars_to_be_uid_based(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: user_profile_model = apps.get_model('zerver', 'UserProfile') if settings.LOCAL_UPLOADS_DIR is not None: for user_profile in user_profile_model.objects.filter(avatar_source="U"): src_file_name = user_avatar_hash(user_profile.email) dst_file_name = user_avatar_path(user_profile) try: move_local_file('avatars', src_file_name + '.original', dst_file_name + '.original') move_local_file('avatars', src_file_name + '-medium.png', dst_file_name + '-medium.png') move_local_file('avatars', src_file_name + '.png', dst_file_name + '.png') except MissingAvatarException: # If the user's avatar is missing, it's probably # because they previously changed their email address. # So set them to have a gravatar instead. user_profile.avatar_source = "G" user_profile.save(update_fields=["avatar_source"]) else: conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket_name = settings.S3_AVATAR_BUCKET bucket = conn.get_bucket(bucket_name, validate=False) for user_profile in user_profile_model.objects.filter(avatar_source="U"): uid_hash_path = user_avatar_path(user_profile) email_hash_path = user_avatar_hash(user_profile.email) if bucket.get_key(uid_hash_path): continue if not bucket.get_key(email_hash_path): # This is likely caused by a user having previously changed their email # If the user's avatar is missing, it's probably # because they previously changed their email address. # So set them to have a gravatar instead. user_profile.avatar_source = "G" user_profile.save(update_fields=["avatar_source"]) continue bucket.copy_key(uid_hash_path + ".original", bucket_name, email_hash_path + ".original") bucket.copy_key(uid_hash_path + "-medium.png", bucket_name, email_hash_path + "-medium.png") bucket.copy_key(uid_hash_path, bucket_name, email_hash_path) # From an error handling sanity perspective, it's best to # start deleting after everything is copied, so that recovery # from failures is easy (just rerun one loop or the other). for user_profile in user_profile_model.objects.filter(avatar_source="U"): bucket.delete_key(user_avatar_hash(user_profile.email) + ".original") bucket.delete_key(user_avatar_hash(user_profile.email) + "-medium.png") bucket.delete_key(user_avatar_hash(user_profile.email))
def backfill_subscription_log_events(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: migration_time = timezone_now() RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog') Subscription = apps.get_model('zerver', 'Subscription') Message = apps.get_model('zerver', 'Message') objects_to_create = [] subs_query = Subscription.objects.select_related( "user_profile", "user_profile__realm", "recipient").filter(recipient__type=2) for sub in subs_query: entry = RealmAuditLog( realm=sub.user_profile.realm, modified_user=sub.user_profile, modified_stream_id=sub.recipient.type_id, event_last_message_id=0, event_type='subscription_created', event_time=migration_time, backfilled=True) objects_to_create.append(entry) RealmAuditLog.objects.bulk_create(objects_to_create) objects_to_create = [] event_last_message_id = Message.objects.aggregate(Max('id'))['id__max'] migration_time_for_deactivation = timezone_now() for sub in subs_query.filter(active=False): entry = RealmAuditLog( realm=sub.user_profile.realm, modified_user=sub.user_profile, modified_stream_id=sub.recipient.type_id, event_last_message_id=event_last_message_id, event_type='subscription_deactivated', event_time=migration_time_for_deactivation, backfilled=True) objects_to_create.append(entry) RealmAuditLog.objects.bulk_create(objects_to_create) objects_to_create = []