Exemplo n.º 1
0
def delete_user_threads(sender, **kwargs):
    recount_categories = set()
    recount_threads = set()

    for post in chunk_queryset(sender.liked_post_set):
        cleaned_likes = list(
            filter(lambda i: i['id'] != sender.id, post.last_likes))
        if cleaned_likes != post.last_likes:
            post.last_likes = cleaned_likes
            post.save(update_fields=['last_likes'])

    for thread in chunk_queryset(sender.thread_set):
        recount_categories.add(thread.category_id)
        with transaction.atomic():
            thread.delete()

    for post in chunk_queryset(sender.post_set):
        recount_categories.add(post.category_id)
        recount_threads.add(post.thread_id)
        with transaction.atomic():
            post.delete()

    if recount_threads:
        changed_threads_qs = Thread.objects.filter(id__in=recount_threads)
        for thread in chunk_queryset(changed_threads_qs):
            thread.synchronize()
            thread.save()

    if recount_categories:
        for category in Category.objects.filter(id__in=recount_categories):
            category.synchronize()
            category.save()
Exemplo n.º 2
0
def archive_user_posts_edits(sender, archive=None, **kwargs):
    queryset = PostEdit.objects.filter(post__poster=sender).order_by('id')
    for post_edit in chunk_queryset(queryset):
        item_name = post_edit.edited_on.strftime('%H%M%S-post-edit')
        archive.add_text(item_name,
                         post_edit.edited_from,
                         date=post_edit.edited_on)
    queryset = sender.postedit_set.exclude(
        id__in=queryset.values('id')).order_by('id')
    for post_edit in chunk_queryset(queryset):
        item_name = post_edit.edited_on.strftime('%H%M%S-post-edit')
        archive.add_text(item_name,
                         post_edit.edited_from,
                         date=post_edit.edited_on)
    def sync_users(self, users_to_sync):
        categories = Category.objects.root_category().get_descendants()

        self.stdout.write("Synchronizing {} users...\n".format(users_to_sync))

        synchronized_count = 0
        show_progress(self, synchronized_count, users_to_sync)
        start_time = time.time()

        for user in chunk_queryset(UserModel.objects.all()):
            user.threads = user.thread_set.filter(
                category__in=categories,
                is_hidden=False,
                is_unapproved=False,
            ).count()

            user.posts = user.post_set.filter(
                category__in=categories,
                is_event=False,
                is_unapproved=False,
            ).count()

            user.followers = user.followed_by.count()
            user.following = user.follows.count()

            user.save()

            synchronized_count += 1
            show_progress(self, synchronized_count, users_to_sync, start_time)

        self.stdout.write(
            "\n\nSynchronized {} users".format(synchronized_count))
Exemplo n.º 4
0
    def sync_threads(self, posts_to_sync):
        message = "Rebuilding %s posts...\n"
        self.stdout.write(message % posts_to_sync)

        message = "\n\nRebuild %s posts"

        synchronized_count = 0
        show_progress(self, synchronized_count, posts_to_sync)
        start_time = time.time()

        queryset = Post.objects.select_related('thread').filter(is_event=False)
        for post in chunk_queryset(queryset):
            if post.id == post.thread.first_post_id:
                post.set_search_document(post.thread.title)
            else:
                post.set_search_document()
            post.save(update_fields=['search_document'])

            post.update_search_vector()
            post.save(update_fields=['search_vector'])

            synchronized_count += 1
            show_progress(self, synchronized_count, posts_to_sync, start_time)

        self.stdout.write(message % synchronized_count)
Exemplo n.º 5
0
def archive_user_audit_trail(sender, archive=None, **kwargs):
    queryset = sender.audittrail_set.order_by('id')
    for audit_trail in chunk_queryset(queryset):
        item_name = audit_trail.created_on.strftime('%H%M%S-audit-trail')
        archive.add_text(item_name,
                         audit_trail.ip_address,
                         date=audit_trail.created_on)
Exemplo n.º 6
0
    def execute_step(self, user):
        recount_categories = set()
        recount_threads = set()

        deleted_posts = 0
        is_completed = False

        for post in user.post_set.order_by('-id')[:50]:
            recount_categories.add(post.category_id)
            recount_threads.add(post.thread_id)
            with transaction.atomic():
                post.delete()
                deleted_posts += 1

        if recount_categories:
            changed_threads_qs = Thread.objects.filter(id__in=recount_threads)
            for thread in chunk_queryset(changed_threads_qs, 50):
                thread.synchronize()
                thread.save()

            for category in Category.objects.filter(id__in=recount_categories):
                category.synchronize()
                category.save()
        else:
            is_completed = True

        return {
            'deleted_count': deleted_posts,
            'is_completed': is_completed,
        }
Exemplo n.º 7
0
    def handle(self, *args, **options):
        working_dir = settings.MISAGO_USER_DATA_DOWNLOADS_WORKING_DIR
        if not working_dir:
            self.stdout.write(
                "MISAGO_USER_DATA_DOWNLOADS_WORKING_DIR has to be set in order for "
                "this feature to work.")
            return

        downloads_prepared = 0
        queryset = DataDownload.objects.select_related('user')
        queryset = queryset.filter(status=DataDownload.STATUS_PENDING)
        for data_download in chunk_queryset(queryset):
            if prepare_user_data_download(data_download, logger):
                user = data_download.user
                subject = gettext("%(user)s, your data download is ready") % {
                    'user': user
                }
                mail_user(
                    user,
                    subject,
                    'misago/emails/data_download',
                    context={
                        'data_download':
                        data_download,
                        'expires_in':
                        settings.MISAGO_USER_DATA_DOWNLOADS_EXPIRE_IN_HOURS,
                    })

                downloads_prepared += 1

        self.stdout.write("Data downloads prepared: %s" % downloads_prepared)
Exemplo n.º 8
0
    def handle(self, *args, **options):
        entries_created = 0
        queryset = UserModel.objects.filter(online_tracker__isnull=True)
        for user in chunk_queryset(queryset):
            Online.objects.create(user=user, last_click=user.last_login)
            entries_created += 1

        self.stdout.write("Tracker entries created: %s" % entries_created)
    def test_chunk_shrinking_queryset(self):
        """chunk_queryset utility chunks queryset in delete action"""
        with self.assertNumQueries(121):
            queryset = CacheVersion.objects.all()
            for obj in chunk_queryset(queryset, chunk_size=5):
                obj.delete()

        self.assertEqual(CacheVersion.objects.count(), 0)
Exemplo n.º 10
0
def anonymize_user_in_events(sender, **kwargs):
    queryset = Post.objects.filter(
        is_event=True,
        event_type__in=ANONYMIZABLE_EVENTS,
        event_context__user__id=sender.id,
    )

    for event in chunk_queryset(queryset):
        anonymize_event(sender, event)
    def test_chunk_queryset(self):
        """chunk_queryset utility chunks queryset but returns all items"""
        chunked_ids = []

        with self.assertNumQueries(21):
            queryset = CacheVersion.objects.all()
            for obj in chunk_queryset(queryset, chunk_size=5):
                chunked_ids.append(obj.id)

        self.assertEqual(chunked_ids, self.items_ids)
Exemplo n.º 12
0
    def handle(self, *args, **options):
        users_deleted = 0
        
        queryset = UserModel.objects.filter(is_deleting_account=True)

        for user in chunk_queryset(queryset):
            if can_delete_own_account(user, user):
                user.delete()
                users_deleted += 1

        self.stdout.write("Deleted users: {}".format(users_deleted))
Exemplo n.º 13
0
def archive_user_polls(sender, archive=None, **kwargs):
    queryset = sender.poll_set.order_by('id')
    for poll in chunk_queryset(queryset):
        item_name = poll.posted_on.strftime('%H%M%S-poll')
        archive.add_dict(
            item_name,
            OrderedDict([
                (_("Question"), poll.question),
                (_("Choices"), u', '.join([c['label'] for c in poll.choices])),
            ]),
            date=poll.posted_on,
        )
Exemplo n.º 14
0
def anonymize_user(sender, **kwargs):
    for post in chunk_queryset(sender.post_set):
        post.poster_ip = ANONYMOUS_IP
        update_post_checksum(post)
        post.save(update_fields=['checksum', 'poster_ip'])

    PostEdit.objects.filter(editor=sender).update(editor_ip=ANONYMOUS_IP)
    PostLike.objects.filter(liker=sender).update(liker_ip=ANONYMOUS_IP)

    Attachment.objects.filter(uploader=sender).update(uploader_ip=ANONYMOUS_IP)

    Poll.objects.filter(poster=sender).update(poster_ip=ANONYMOUS_IP)
    PollVote.objects.filter(voter=sender).update(voter_ip=ANONYMOUS_IP)
Exemplo n.º 15
0
    def handle(self, *args, **options):
        downloads_expired = 0
        queryset = DataDownload.objects.select_related('user')
        queryset = queryset.filter(
            status=DataDownload.STATUS_READY,
            expires_on__lte=timezone.now(),
        )

        for data_download in chunk_queryset(queryset):
            expire_user_data_download(data_download)
            downloads_expired += 1

        self.stdout.write("Data downloads expired: %s" % downloads_expired)
Exemplo n.º 16
0
    def sync_attachments(self, queryset, attachments_to_sync):
        self.stdout.write("Clearing %s attachments...\n" % attachments_to_sync)

        cleared_count = 0
        show_progress(self, cleared_count, attachments_to_sync)
        start_time = time.time()
        
        for attachment in chunk_queryset(queryset):
            attachment.delete()

            cleared_count += 1
            show_progress(self, cleared_count, attachments_to_sync, start_time)

        self.stdout.write("\n\nCleared %s attachments" % cleared_count)
Exemplo n.º 17
0
    def handle(self, *args, **options):
        keys = {}

        for user in chunk_queryset(UserModel.objects.all()):
            for key in user.profile_fields.keys():
                keys.setdefault(key, 0)
                keys[key] += 1

        if keys:
            max_len = max([len(k) for k in keys.keys()])
            for key in sorted(keys.keys()):
                space = ' ' * (max_len + 1 - len(key))
                self.stdout.write("%s:%s%s" % (key, space, keys[key]))
        else:
            self.stdout.write("No profile fields are currently in use.")
Exemplo n.º 18
0
    def handle(self, *args, **options):
        if not settings.MISAGO_ENABLE_DELETE_OWN_ACCOUNT:
            self.stdout.write(
                "Delete own account option is currently disabled.")

        deleted = 0

        queryset = UserModel.objects.filter(is_deleting_account=True)

        for user in chunk_queryset(queryset):
            if can_delete_own_account(user, user):
                user.delete()
                deleted += 1

        self.stdout.write("Deleted users: {}".format(deleted))
    def sync_threads(self, threads_to_sync):
        self.stdout.write("Synchronizing {} threads...\n".format(threads_to_sync))

        synchronized_count = 0
        show_progress(self, synchronized_count, threads_to_sync)
        start_time = time.time()

        for thread in chunk_queryset(Thread.objects.all()):
            thread.synchronize()
            thread.save()

            synchronized_count += 1
            show_progress(self, synchronized_count, threads_to_sync, start_time)

        self.stdout.write("\n\nSynchronized {} threads".format(synchronized_count))
Exemplo n.º 20
0
    def update_posts_checksums(self, posts_to_update):
        self.stdout.write("Updating %s posts checksums...\n" % posts_to_update)

        updated_count = 0
        show_progress(self, updated_count, posts_to_update)
        start_time = time.time()

        queryset = Post.objects.filter(is_event=False)
        for post in chunk_queryset(queryset):
            update_post_checksum(post)
            post.save(update_fields=['checksum'])

            updated_count += 1
            show_progress(self, updated_count, posts_to_update, start_time)

        self.stdout.write("\n\nUpdated %s posts checksums" % updated_count)
    def sync_attachments(self, queryset, attachments_to_sync):
        message = "Clearing %s attachments...\n"
        self.stdout.write(message % attachments_to_sync)

        message = "\n\nCleared %s attachments"

        synchronized_count = 0
        show_progress(self, synchronized_count, attachments_to_sync)
        start_time = time.time()
        for attachment in chunk_queryset(queryset):
            attachment.delete()

            synchronized_count += 1
            show_progress(self, synchronized_count, attachments_to_sync, start_time)

        self.stdout.write(message % synchronized_count)
Exemplo n.º 22
0
def archive_user_attachments(sender, archive=None, **kwargs):
    queryset = sender.attachment_set.order_by('id')
    for attachment in chunk_queryset(queryset):
        archive.add_model_file(
            attachment.file,
            prefix=attachment.uploaded_on.strftime('%H%M%S-file'),
            date=attachment.uploaded_on,
        )
        archive.add_model_file(
            attachment.image,
            prefix=attachment.uploaded_on.strftime('%H%M%S-image'),
            date=attachment.uploaded_on,
        )
        archive.add_model_file(
            attachment.thumbnail,
            prefix=attachment.uploaded_on.strftime('%H%M%S-thumbnail'),
            date=attachment.uploaded_on,
        )
Exemplo n.º 23
0
    def handle(self, *args, **options):
        fieldname = options['fieldname']
        if not fieldname:
            self.stderr.write("Specify fieldname to delete.")
            return

        fields_deleted = 0

        queryset = UserModel.objects.filter(
            profile_fields__has_keys=[fieldname], )

        for user in chunk_queryset(queryset):
            if fieldname in user.profile_fields.keys():
                user.profile_fields.pop(fieldname)
                user.save(update_fields=['profile_fields'])
                fields_deleted += 1

        self.stdout.write(
            '"{}" profile field has been deleted from {} users.'.format(
                fieldname, fields_deleted))
Exemplo n.º 24
0
    def handle(self, *args, **options):
        if not settings.MISAGO_DELETE_NEW_INACTIVE_USERS_OLDER_THAN_DAYS:
            self.stdout.write(
                "Automatic deletion of inactive users is currently disabled.")
            return

        users_deleted = 0

        joined_on_cutoff = timezone.now() - timedelta(
            days=settings.MISAGO_DELETE_NEW_INACTIVE_USERS_OLDER_THAN_DAYS)

        queryset = UserModel.objects.filter(
            requires_activation__gt=UserModel.ACTIVATION_NONE,
            joined_on__lt=joined_on_cutoff,
        )

        for user in chunk_queryset(queryset):
            user.delete()
            users_deleted += 1

        self.stdout.write("Deleted users: {}".format(users_deleted))
    def rebuild_posts_search(self, posts_to_reindex):
        self.stdout.write("Rebuilding search for {} posts...\n".format(posts_to_reindex))

        rebuild_count = 0
        show_progress(self, rebuild_count, posts_to_reindex)
        start_time = time.time()

        queryset = Post.objects.select_related('thread').filter(is_event=False)
        for post in chunk_queryset(queryset):
            if post.id == post.thread.first_post_id:
                post.set_search_document(post.thread.title)
            else:
                post.set_search_document()
            post.save(update_fields=['search_document'])

            post.update_search_vector()
            post.save(update_fields=['search_vector'])

            rebuild_count += 1
            show_progress(self, rebuild_count, posts_to_reindex, start_time)

        self.stdout.write("\n\nRebuild search for {} posts".format(rebuild_count))
Exemplo n.º 26
0
def archive_user_posts(sender, archive=None, **kwargs):
    queryset = sender.post_set.order_by('id')
    for post in chunk_queryset(queryset):
        item_name = post.posted_on.strftime('%H%M%S-post')
        archive.add_text(item_name, post.parsed, date=post.posted_on)
Exemplo n.º 27
0
def anonymize_user_in_likes(sender, **kwargs):
    for post in chunk_queryset(sender.liked_post_set):
        anonymize_post_last_likes(sender, post)
Exemplo n.º 28
0
def remove_unparticipated_private_threads(sender, **kwargs):
    threads_qs = kwargs['instance'].privatethread_set.all()
    for thread in chunk_queryset(threads_qs):
        if thread.participants.count() == 1:
            with transaction.atomic():
                thread.delete()