def _disable_provider(self, request, organization, auth_provider): self.create_audit_entry( request, organization=organization, target_object=auth_provider.id, event=AuditLogEntryEvent.SSO_DISABLE, data=auth_provider.get_audit_log_data(), ) if db.is_sqlite(): for om in OrganizationMember.objects.filter(organization=organization): om.flags['sso:linked'] = False om.flags['sso:invalid'] = False om.save() else: OrganizationMember.objects.filter( organization=organization, ).update( flags=F('flags').bitand( ~OrganizationMember.flags['sso:linked'], ).bitand( ~OrganizationMember.flags['sso:invalid'], ), ) user_ids = OrganizationMember.objects.filter(organization=organization).values('user') User.objects.filter(id__in=user_ids).update(is_managed=False) email_unlink_notifications.delay(organization.id, request.user.id, auth_provider.provider) auth_provider.delete()
def _disable_provider(self, request, organization, auth_provider): self.create_audit_entry( request, organization=organization, target_object=auth_provider.id, event=AuditLogEntryEvent.SSO_DISABLE, data=auth_provider.get_audit_log_data(), ) if db.is_sqlite(): for om in OrganizationMember.objects.filter(organization=organization): setattr(om.flags, 'sso:linked', False) setattr(om.flags, 'sso:invalid', False) om.save() else: OrganizationMember.objects.filter( organization=organization, ).update( flags=F('flags').bitand( ~getattr(OrganizationMember.flags, 'sso:linked'), ).bitand( ~getattr(OrganizationMember.flags, 'sso:invalid'), ), ) auth_provider.delete()
def increment_project_counter(project, delta=1): """This method primarily exists so that south code can use it.""" if delta <= 0: raise ValueError('There is only one way, and that\'s up.') cur = connection.cursor() try: if is_postgres(): cur.execute( ''' select sentry_increment_project_counter(%s, %s) ''', [project.id, delta] ) return cur.fetchone()[0] elif is_sqlite(): value = cur.execute( ''' insert or ignore into sentry_projectcounter (project_id, value) values (%s, 0); ''', [project.id] ) value = cur.execute( ''' select value from sentry_projectcounter where project_id = %s ''', [project.id] ).fetchone()[0] while 1: cur.execute( ''' update sentry_projectcounter set value = value + %s where project_id = %s; ''', [delta, project.id] ) changes = cur.execute( ''' select changes(); ''' ).fetchone()[0] if changes != 0: return value + delta elif is_mysql(): cur.execute( ''' insert into sentry_projectcounter (project_id, value) values (%s, @new_val := %s) on duplicate key update value = @new_val := value + %s ''', [project.id, delta, delta] ) cur.execute('select @new_val') return cur.fetchone()[0] else: raise AssertionError("Not implemented database engine path") finally: cur.close()
def increment_project_counter(project, delta=1): """This method primarily exists so that south code can use it.""" if delta <= 0: raise ValueError('There is only one way, and that\'s up.') cur = connection.cursor() try: if is_postgres(): cur.execute( ''' select sentry_increment_project_counter(%s, %s) ''', [project.id, delta]) return cur.fetchone()[0] elif is_sqlite(): value = cur.execute( ''' insert or ignore into sentry_projectcounter (project_id, value) values (%s, 0); ''', [project.id]) value = cur.execute( ''' select value from sentry_projectcounter where project_id = %s ''', [project.id]).fetchone()[0] while 1: cur.execute( ''' update sentry_projectcounter set value = value + %s where project_id = %s; ''', [delta, project.id]) changes = cur.execute(''' select changes(); ''').fetchone()[0] if changes != 0: return value + delta elif is_mysql(): cur.execute( ''' insert into sentry_projectcounter (project_id, value) values (%s, @new_val := %s) on duplicate key update value = @new_val := value + %s ''', [project.id, delta, delta]) cur.execute('select @new_val') return cur.fetchone()[0] else: raise AssertionError("Not implemented database engine path") finally: cur.close()
def bulk_insert(self, items): db = router.db_for_write(DSymSymbol) items = list(items) if not items: return # On SQLite we don't do this. Two reasons: one, it does not # seem significantly faster and you're an idiot if you import # huge amounts of system symbols into sqlite anyways. secondly # because of the low parameter limit if not is_sqlite(): try: with transaction.atomic(using=db): cur = connection.cursor() cur.execute( ''' insert into sentry_dsymsymbol (object_id, address, symbol) values %s ''' % ', '.join(['(%s, %s, %s)'] * len(items)), list(chain(*items))) cur.close() return except IntegrityError: pass cur = connection.cursor() for item in items: cur.execute( ''' insert into sentry_dsymsymbol (object_id, address, symbol) select %(object_id)s, %(address)s, %(symbol)s where not exists ( select 1 from sentry_dsymsymbol where object_id = %(object_id)s and address = %(address)s); ''', { 'object_id': item[0], 'address': item[1], 'symbol': item[2], }) cur.close()
def bulk_insert(self, items): db = router.db_for_write(DSymSymbol) items = list(items) if not items: return # On SQLite we don't do this. Two reasons: one, it does not # seem significantly faster and you're an idiot if you import # huge amounts of system symbols into sqlite anyways. secondly # because of the low parameter limit if not is_sqlite(): try: with transaction.atomic(using=db): cur = connection.cursor() cur.execute( """ insert into sentry_dsymsymbol (object_id, address, symbol) values %s """ % ", ".join(["(%s, %s, %s)"] * len(items)), list(chain(*items)), ) cur.close() return except IntegrityError: pass cur = connection.cursor() for item in items: cur.execute( """ insert into sentry_dsymsymbol (object_id, address, symbol) select %(object_id)s, %(address)s, %(symbol)s where not exists ( select 1 from sentry_dsymsymbol where object_id = %(object_id)s and address = %(address)s); """, {"object_id": item[0], "address": item[1], "symbol": item[2]}, ) cur.close()