def backfill_visits(apps, schema_editor): DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery") Dashboard = apps.get_model("sentry", "Dashboard") for saved_query in RangeQuerySetWrapperWithProgressBar(DiscoverSavedQuery.objects.all()): changed = False if saved_query.visits is None: saved_query.visits = 1 changed = True if saved_query.last_visited is None: saved_query.last_visited = timezone.now() changed = True if changed: saved_query.save() for dashboard in RangeQuerySetWrapperWithProgressBar(Dashboard.objects.all()): changed = False if dashboard.visits is None: dashboard.visits = 1 changed = True if dashboard.last_visited is None: dashboard.last_visited = timezone.now() changed = True if changed: dashboard.save()
def handle(self, **options): from django.db.models import Q from sentry.constants import RESERVED_ORGANIZATION_SLUGS from sentry.models import Organization, Project, Team, ProjectKey from sentry.db.models import update from sentry.db.models.utils import slugify_instance from sentry.utils.query import RangeQuerySetWrapperWithProgressBar print("Correcting data on organizations") queryset = Organization.objects.filter(slug__isnull=True, ) for org in RangeQuerySetWrapperWithProgressBar(queryset): slugify_instance(org, org.name, RESERVED_ORGANIZATION_SLUGS) org.save() # Create teams for any projects that are missing them print("Correcting data on projects") queryset = Project.objects.filter( Q(team__isnull=True) | Q(organization__isnull=True), ).select_related('owner') for project in RangeQuerySetWrapperWithProgressBar(queryset): if not project.team: organization = Organization( name=project.name, owner=project.owner, ) slugify_instance(organization, organization.name, RESERVED_ORGANIZATION_SLUGS) organization.save() team = Team( name=project.name, owner=project.owner, oprganization=organization, ) slugify_instance(team, team.name, RESERVED_ORGANIZATION_SLUGS) team.save() update(project, organization=team.organization, team=team) # Create missing project keys print("Creating missing project keys") queryset = Team.objects.all() for team in RangeQuerySetWrapperWithProgressBar(queryset): for member in team.member_set.select_related('user'): for project in team.project_set.all(): try: created = ProjectKey.objects.get_or_create( project=project, user=member.user, )[1] except ProjectKey.MultipleObjectsReturned: pass
def backfill_null_actors(apps, schema_editor): User = apps.get_model("sentry", "User") Team = apps.get_model("sentry", "Team") Actor = apps.get_model("sentry", "Actor") for user in RangeQuerySetWrapperWithProgressBar(User.objects.all()): if user.actor_id is None: user.actor_id = Actor.objects.create(type=1).id user.save() for team in RangeQuerySetWrapperWithProgressBar(Team.objects.all()): if team.actor_id is None: team.actor_id = Actor.objects.create(type=0).id team.save()
def repair_callsigns(): from sentry.utils.query import RangeQuerySetWrapperWithProgressBar, \ RangeQuerySetWrapper from sentry.models.counter import increment_project_counter from sentry.models import Organization, Group, Project click.echo('Repairing callsigns') queryset = Organization.objects.all() for org in RangeQuerySetWrapperWithProgressBar(queryset): projects = list(org.project_set.all()) callsigns = get_callsigns(projects) for project in projects: if project.callsign is None: Project.objects.filter( pk=project.id, callsign=None).update(callsign=callsigns[project.id]) q = Group.objects.filter( project=project, short_id=None, ) for group in RangeQuerySetWrapper(q): with catchable_atomic(): pending_short_id = increment_project_counter(project) updated = Group.objects.filter( pk=group.id, short_id=None).update(short_id=pending_short_id) if updated == 0: raise RollbackLocally()
def backfill_rule_level_fields(apps, schema_editor): AlertRule = apps.get_model("sentry", "AlertRule") for alert_rule in RangeQuerySetWrapperWithProgressBar(AlertRule.objects_with_snapshots.all()): triggers = list(alert_rule.alertruletrigger_set.all()) # Determine the resolve_threshold and threshold_type from the rule's triggers if triggers: # Threshold types are the same for all triggers on a rule, so just grab one threshold_type = triggers[0].threshold_type resolve_thresholds = [ t.resolve_threshold for t in triggers if t.resolve_threshold is not None ] if resolve_thresholds: # Either grab the min or max resolve threshold depending on whether # we're an above or below threshold rule. func = min if threshold_type == 0 else max resolve_threshold = func(resolve_thresholds) else: resolve_threshold = None alert_rule.resolve_threshold = resolve_threshold alert_rule.threshold_type = threshold_type else: # Just a failsafe in case we have any bad rules without triggers. alert_rule.threshold_type = 0 alert_rule.save()
def forwards(self, orm): from sentry.constants import RESERVED_ORGANIZATION_SLUGS from sentry.db.models.utils import slugify_instance from sentry.utils.query import RangeQuerySetWrapperWithProgressBar Project = orm['sentry.Project'] queryset = Project.objects.filter( organization__isnull=True ).select_related('team', 'team__organization') for project in RangeQuerySetWrapperWithProgressBar(queryset): project.organization = project.team.organization sid = transaction.savepoint() try: project.save() except IntegrityError: transaction.savepoint_rollback(sid) # we also need to update the slug here based on the new constraints slugify_instance(project, project.name, ( models.Q(organization=project.organization) | models.Q(team=project.team), )) project.save() else: transaction.savepoint_commit(sid)
def delete_itunes_credentials(apps, schema_editor): """ Delete iTunes credentials from App Store Connect symbol sources in project options. """ fields_to_delete = [ "itunesCreated", "itunesSession", "orgPublicId", "orgName", "itunesUser", "itunesPassword", ] ProjectOption = apps.get_model("sentry", "ProjectOption") for project_option in RangeQuerySetWrapperWithProgressBar( ProjectOption.objects.filter(key="sentry:symbol_sources")): symbol_sources = json.loads(project_option.value or "[]") had_itunes_fields = False for config in symbol_sources: if config["type"] == "appStoreConnect": for field in fields_to_delete: try: del config[field] had_itunes_fields = True except KeyError: continue if had_itunes_fields: new_sources = json.dumps(symbol_sources) project_option.value = new_sources project_option.save()
def migrate_alert_query_model(apps, schema_editor): SnubaQuery = apps.get_model("sentry", "SnubaQuery") for snuba_query in RangeQuerySetWrapperWithProgressBar( SnubaQuery.objects.filter(aggregate="count_unique(user)") ): snuba_query.aggregate = "count_unique(tags[sentry:user])" snuba_query.save()
def migrate_alert_query_model(apps, schema_editor): AlertRule = apps.get_model("sentry", "AlertRule") SnubaQuery = apps.get_model("sentry", "SnubaQuery") for alert_rule in RangeQuerySetWrapperWithProgressBar( AlertRule.objects_with_snapshots.filter(snuba_query__isnull=True) ): convert_alert_rule_to_snuba_query(alert_rule, SnubaQuery)
def update_legacy_discover_saved_query_timestamps(apps, schema_editor): DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery") for saved_query in RangeQuerySetWrapperWithProgressBar( DiscoverSavedQuery.objects.all()): query = saved_query.query updated = False for key in ["start", "end"]: if key not in query: continue value = query[key] try: parsed = datetime.strptime( value, LEGACY_DATETIME_FORMAT).astimezone(pytz.utc) except ValueError: pass else: value = datetime.strftime(parsed, DATETIME_FORMAT) query[key] = value updated = True if updated: saved_query.query = query saved_query.save()
def forwards(self, orm): from sentry.utils.query import RangeQuerySetWrapperWithProgressBar File = orm['sentry.File'] FileBlob = orm['sentry.FileBlob'] queryset = File.objects.all() blob_paths = defaultdict(set) for file in RangeQuerySetWrapperWithProgressBar(queryset): if not file.blob_id: self._ensure_blob(orm, file) if file.path: blob_paths[file.blob].add(file.path) saved = 0 count = 0 total = 0 for blob, path_set in blob_paths.iteritems(): total += (blob.size or 0) * len(path_set) if len(path_set) == 1: continue saved += (blob.size or 0) * (len(path_set) - 1) count += len(path_set) - 1 print("Saved %.2fGB (on %.2fGB) with removal of %d duplicate files" % ( saved / 1024 / 1024 / 1024.0, total / 1024 / 1024 / 1024.0, count, ))
def migrate_project_transaction_thresholds(apps, schema_editor): """ Migrate an org's apdex thresholds (if they exist) to the projects transaction thresholds in the org. """ OrganizationOption = apps.get_model("sentry", "OrganizationOption") Project = apps.get_model("sentry", "Project") ProjectTransactionThreshold = apps.get_model( "sentry", "ProjectTransactionThreshold") for option in RangeQuerySetWrapperWithProgressBar( OrganizationOption.objects.all()): if option.key != "sentry:apdex_threshold": continue for project in Project.objects.filter( organization_id=option.organization_id): try: # It is possible that an organization has already set a ProjectTransactionThreshold # and we wouldn't want to overwrite it. ProjectTransactionThreshold.objects.get_or_create( organization_id=option.organization_id, project_id=project.id, defaults={ "threshold": option.value, "metric": TransactionMetric.DURATION.value, }, ) except Exception: logging.exception( f"Error migrating project {project.id} for organization {option.organization_id}" )
def migrate_subscriptions(apps, schema_editor): QuerySubscription = apps.get_model("sentry", "QuerySubscription") for subscription in RangeQuerySetWrapperWithProgressBar( QuerySubscription.objects.select_related("snuba_query").all()): if subscription.subscription_id is not None: subscription_id = None try: subscription_id = _create_in_snuba(subscription) except Exception as e: logging.exception( f"failed to recreate {subscription.subscription_id}: {e}") continue try: _delete_from_snuba( QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id, ) except Exception as e: try: # Delete the subscription we just created to avoid orphans _delete_from_snuba( QueryDatasets(subscription.snuba_query.dataset), subscription_id, ) except Exception as oe: logging.exception( f"failed to delete orphan {subscription_id}: {oe}") logging.exception( f"failed to delete {subscription.subscription_id}: {e}") continue subscription.update(subscription_id=subscription_id)
def backfill_popularity(apps, schema_editor): SentryApp = apps.get_model("sentry", "SentryApp") for sentry_app in RangeQuerySetWrapperWithProgressBar( SentryApp.objects.all()): if sentry_app.popularity is None: sentry_app.popularity = 1 sentry_app.save()
def delete_vsts_repo_data(apps, schema_editor): """ Delete the VSTS plugin rows in the Repository table. """ Repository = apps.get_model("sentry", "Repository") for repository in RangeQuerySetWrapperWithProgressBar(Repository.objects.all()): if repository.provider == "visualstudio": repository.delete()
def backfill_me_or_none(apps, schema_editor): SavedSearch = apps.get_model("sentry", "SavedSearch") for saved_search in RangeQuerySetWrapperWithProgressBar( SavedSearch.objects.all()): if ":me_or_none" in saved_search.query: saved_search.query = saved_search.query.replace( ":me_or_none", ":[me, none]") saved_search.save()
def delete_incidents_with_no_alert_rule(apps, schema_editor): # These are only test incidents that we don't care about, should be fine to remove # these so that we can require there always be an AlertRule associated with # Incidents going forward Incident = apps.get_model("sentry", "Incident") for incident in RangeQuerySetWrapperWithProgressBar( Incident.objects.filter(alert_rule__isnull=True)): incident.delete()
def backfill_target_id(apps, schema_editor): IntegrationFeature = apps.get_model("sentry", "IntegrationFeature") for integration_feature in RangeQuerySetWrapperWithProgressBar( IntegrationFeature.objects.all()): integration_feature.target_id = integration_feature.sentry_app.id integration_feature.target_type = IntegrationTypes.SENTRY_APP.value integration_feature.save()
def backfill_codeowners_auto_sync_column(apps, schema_editor): ProjectOwnership = apps.get_model("sentry", "ProjectOwnership") for ownership in RangeQuerySetWrapperWithProgressBar( ProjectOwnership.objects.all()): if ownership.codeowners_auto_sync is None: ownership.codeowners_auto_sync = True ownership.save()
def delete_code_mappings_with_no_integration(apps, schema_editor): """ Delete the rows in the RepositoryProjectPathConfig table that have null organization_integration_id. """ RepositoryProjectPathConfig = apps.get_model("sentry", "RepositoryProjectPathConfig") for code_mapping in RangeQuerySetWrapperWithProgressBar( RepositoryProjectPathConfig.objects.filter(organization_integration_id=None) ): code_mapping.delete()
def forwards(self, orm): from sentry.utils.query import RangeQuerySetWrapperWithProgressBar Organization = orm['sentry.Organization'] OrganizationMember = orm['sentry.OrganizationMember'] queryset = Organization.objects.all() for org in RangeQuerySetWrapperWithProgressBar(queryset): for idx, member in enumerate(org.member_set.all()): OrganizationMember.objects.filter( id=member.id, ).update(counter=idx + 1)
def backfill_debug_file_checksum(apps, schema_editor): """ Fill the ProjectDebugFile.checksum from related File.checksum. """ ProjectDebugFile = apps.get_model("sentry", "ProjectDebugFile") all_debug_files = ProjectDebugFile.objects.filter(checksum__isnull=True).select_related("file") for debug_file in RangeQuerySetWrapperWithProgressBar(queryset=all_debug_files, step=1000): if debug_file.file.checksum: ProjectDebugFile.objects.filter(id=debug_file.id).update( checksum=debug_file.file.checksum )
def backfill_saved_search_sort(apps, schema_editor): """ Set all saved searches to current default sort "Last Seen" """ SavedSearch = apps.get_model("sentry", "SavedSearch") for search in RangeQuerySetWrapperWithProgressBar( SavedSearch.objects.all()): if search.sort is None: search.sort = "date" search.save()
def handle(self, **options): from sentry.models import Project, ProjectKey from sentry.utils.query import RangeQuerySetWrapperWithProgressBar # Create missing project keys print("Creating missing project keys") queryset = Project.objects.all() for project in RangeQuerySetWrapperWithProgressBar(queryset): try: ProjectKey.objects.get_or_create(project=project, ) except ProjectKey.MultipleObjectsReturned: pass
def forwards(self, orm): from sentry.constants import RESERVED_ORGANIZATION_SLUGS from sentry.db.models.utils import slugify_instance from sentry.utils.query import RangeQuerySetWrapperWithProgressBar Organization = orm['sentry.Organization'] queryset = Organization.objects.filter(slug__isnull=True) for org in RangeQuerySetWrapperWithProgressBar(queryset): slugify_instance(org, org.name, RESERVED_ORGANIZATION_SLUGS) org.save() transaction.commit()
def clear_flag(Model, flag_name, flag_attr_name="flags"): """ This function is used to clear an existing flag value for all items in a given model """ for item in RangeQuerySetWrapperWithProgressBar(Model.objects.all()): flags = getattr(item, flag_attr_name) if flags[flag_name]: # do a bitwise AND on a mask with all 1s except on the bit for the flag update_kwargs = { flag_attr_name: F(flag_attr_name).bitand( ~getattr(Model, flag_attr_name)[flag_name]) } Model.objects.filter(id=item.id).update(**update_kwargs)
def backfill_platformexternalissue_project_id(apps, schema_editor): """ Fill the PlatformExternalIssue.project_id from related Group.project_id. """ PlatformExternalIssue = apps.get_model("sentry", "PlatformExternalIssue") Group = apps.get_model("sentry", "Group") external_issues_with_group = PlatformExternalIssue.objects.filter( project_id__isnull=True).select_related("group") for external_issue in RangeQuerySetWrapperWithProgressBar( queryset=external_issues_with_group, step=1000): try: PlatformExternalIssue.objects.filter(id=external_issue.id).update( project_id=external_issue.group.project_id) except Group.DoesNotExist: pass
def backfill_existing_orgs(apps, schema_editor): """ Backfill the OrganizationOption alerts_member_write to be False for existing orgs """ Organization = apps.get_model("sentry", "Organization") OrganizationOption = apps.get_model("sentry", "OrganizationOption") for org in RangeQuerySetWrapperWithProgressBar(Organization.objects.all()): if org.status != 0: continue try: OrganizationOption.objects.create(organization=org, key="sentry:alerts_member_write", value=False) except Exception: logging.exception(f"Error backfilling organization {org.id}")
def migrate_subscriptions(apps, schema_editor): QuerySubscription = apps.get_model("sentry", "QuerySubscription") AppSnubaQueryEventType = apps.get_model("sentry", "SnubaQueryEventType") for subscription in RangeQuerySetWrapperWithProgressBar( QuerySubscription.objects.select_related("snuba_query").all()): if subscription.subscription_id is not None: # The migration apps don't build this property, so manually set it. raw_event_types = AppSnubaQueryEventType.objects.filter( snuba_query=subscription.snuba_query).all() event_types = [ SnubaQueryEventType.EventType(ev.type) for ev in raw_event_types ] setattr(subscription.snuba_query, "event_types", event_types) subscription_id = None try: subscription_id = _create_in_snuba(subscription) except Exception as e: logging.exception( f"failed to recreate {subscription.subscription_id}: {e}") continue try: _delete_from_snuba( QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id, ) except Exception as e: try: # Delete the subscription we just created to avoid orphans _delete_from_snuba( QueryDatasets(subscription.snuba_query.dataset), subscription_id, ) except Exception as oe: logging.exception( f"failed to delete orphan {subscription_id}: {oe}") logging.exception( f"failed to delete {subscription.subscription_id}: {e}") continue QuerySubscription.objects.filter(id=subscription.id).update( subscription_id=subscription_id)
def cleanup_audit_log_data(apps, schema_editor): """ Fix `AuditLogEntry` rows that have pickled `Team` models in their `data` field. We originally had fixed this in [0], but we missed some types. This is basically the same migration, but without the audit log entry type gaurd. [0]: https://github.com/getsentry/sentry/pull/17545 """ AuditLogEntry = apps.get_model("sentry", "AuditLogEntry") for audit_log in RangeQuerySetWrapperWithProgressBar(AuditLogEntry.objects.all()): teams = audit_log.data.get("teams") if teams and hasattr(teams[0], "id"): # We have a team in here rather than just the expected data audit_log.data["teams"] = [team.id for team in teams] audit_log.data["teams_slugs"] = [team.slug for team in teams] audit_log.save()