Example #1
0
def archives_not_present_in_filesystem(request: HttpRequest) -> HttpResponse:
    p = request.POST
    get = request.GET

    title = get.get("title", '')
    tags = get.get("tags", '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'clear' in get:
        form = ArchiveSearchForm()
    else:
        form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    if p:
        pks = []
        for k, v in p.items():
            if k.startswith("del-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)
        results_archive = Archive.objects.filter(id__in=pks).order_by('-pk')

        for archive in results_archive:
            message = 'Removing archive missing in filesystem: {}, path: {}'.format(
                archive.title, archive.zipped.path
            )
            logger.info(message)
            messages.success(request, message)
            archive.delete()

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    results = filter_archives_simple(params)

    results = results.filter_non_existent(  # type: ignore
        crawler_settings.MEDIA_ROOT
    )

    paginator = Paginator(results, 50)
    try:
        results_page = paginator.page(page)
    except (InvalidPage, EmptyPage):
        results_page = paginator.page(paginator.num_pages)

    d = {'results': results_page, 'form': form}
    return render(request, "viewer/archives_not_present.html", d)
Example #2
0
def repeated_archives_by_field(request: HttpRequest) -> HttpResponse:
    p = request.POST
    get = request.GET

    title = get.get("title", '')
    tags = get.get("tags", '')

    if 'clear' in get:
        form = ArchiveSearchForm()
    else:
        form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    if p:
        pks = []
        for k, v in p.items():
            if k.startswith("del-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)
        archives = Archive.objects.filter(id__in=pks).order_by('-create_date')

        user_reason = p.get('reason', '')

        if 'delete_archives' in p:
            for archive in archives:
                message = 'Removing archive: {} and deleting file: {}'.format(
                    archive.title, archive.zipped.path)
                frontend_logger.info(message)
                messages.success(request, message)

                gallery = archive.gallery
                archive.gallery.mark_as_deleted()
                archive.delete_all_files()
                archive.delete()

                event_log(request.user,
                          'DELETE_ARCHIVE',
                          reason=user_reason,
                          content_object=gallery,
                          result='deleted')

        elif 'delete_objects' in p:
            for archive in archives:
                message = 'Removing archive: {}, keeping file: {}'.format(
                    archive.title, archive.zipped.path)
                frontend_logger.info(message)
                messages.success(request, message)

                gallery = archive.gallery
                archive.gallery.mark_as_deleted()
                archive.delete_files_but_archive()
                archive.delete()

                event_log(request.user,
                          'DELETE_ARCHIVE',
                          reason=user_reason,
                          content_object=gallery,
                          result='deleted')

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
        'filename': title,
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    results = filter_archives_simple(params)

    if 'no-custom-tags' in get:
        results = results.annotate(
            num_custom_tags=Count('custom_tags')).filter(num_custom_tags=0)

    by_filesize = dict()
    by_crc32 = dict()

    for k, v in groupby(results.order_by('filesize'), lambda x: x.filesize):
        objects = list(v)
        if len(objects) > 1:
            by_filesize[k] = objects

    for k, v in groupby(results.order_by('crc32'), lambda x: x.crc32):
        objects = list(v)
        if len(objects) > 1:
            by_crc32[k] = objects

    # paginator = Paginator(results, 100)
    # try:
    #     results = paginator.page(page)
    # except (InvalidPage, EmptyPage):
    #     results = paginator.page(paginator.num_pages)

    d = {'by_filesize': by_filesize, 'by_crc32': by_crc32, 'form': form}
    return render(request, "viewer/archives_repeated_by_fields.html", d)
Example #3
0
def archives_not_matched_with_gallery(request: HttpRequest) -> HttpResponse:
    p = request.POST
    get = request.GET

    title = get.get("title", '')
    tags = get.get("tags", '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'clear' in get:
        form = ArchiveSearchForm()
    else:
        form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    if p:
        pks = []
        for k, v in p.items():
            if k.startswith("sel-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)
        archives = Archive.objects.filter(id__in=pks).order_by('-create_date')
        if 'delete_archives' in p:
            for archive in archives:
                message = 'Removing archive not matched: {} and deleting file: {}'.format(
                    archive.title, archive.zipped.path)
                frontend_logger.info(message)
                messages.success(request, message)
                archive.delete_all_files()
                archive.delete()
        elif 'delete_objects' in p:
            for archive in archives:
                message = 'Removing archive not matched: {}, keeping file: {}'.format(
                    archive.title, archive.zipped.path)
                frontend_logger.info(message)
                messages.success(request, message)
                archive.delete_files_but_archive()
                archive.delete()
        elif 'create_possible_matches' in p:
            if thread_exists('web_match_worker'):
                return render_error(request,
                                    'Web match worker is already running.')

            matcher_filter = p['create_possible_matches']
            try:
                cutoff = float(p.get('cutoff', '0.4'))
            except ValueError:
                cutoff = 0.4
            try:
                max_matches = int(p.get('max-matches', '10'))
            except ValueError:
                max_matches = 10

            web_match_thread = threading.Thread(
                name='web_match_worker',
                target=generate_possible_matches_for_archives,
                args=(archives, ),
                kwargs={
                    'logger': frontend_logger,
                    'cutoff': cutoff,
                    'max_matches': max_matches,
                    'filters': (matcher_filter, ),
                    'match_local': False,
                    'match_web': True
                })
            web_match_thread.daemon = True
            web_match_thread.start()
            messages.success(request, 'Starting web match worker.')
        elif 'create_possible_matches_internal' in p:
            if thread_exists('match_unmatched_worker'):
                return render_error(
                    request, "Local matching worker is already running.")
            provider = p['create_possible_matches_internal']
            try:
                cutoff = float(p.get('cutoff', '0.4'))
            except ValueError:
                cutoff = 0.4
            try:
                max_matches = int(p.get('max-matches', '10'))
            except ValueError:
                max_matches = 10

            frontend_logger.info(
                'Looking for possible matches in gallery database '
                'for non-matched archives (cutoff: {}, max matches: {}) '
                'using provider filter "{}"'.format(cutoff, max_matches,
                                                    provider))
            matching_thread = threading.Thread(
                name='match_unmatched_worker',
                target=generate_possible_matches_for_archives,
                args=(archives, ),
                kwargs={
                    'logger': frontend_logger,
                    'cutoff': cutoff,
                    'max_matches': max_matches,
                    'filters': (provider, ),
                    'match_local': True,
                    'match_web': False
                })
            matching_thread.daemon = True
            matching_thread.start()
            messages.success(request, 'Starting internal match worker.')

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
        'filename': title,
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    results = filter_archives_simple(params)

    results = results.filter(gallery__isnull=True).prefetch_related(
        Prefetch('archivematches_set',
                 queryset=ArchiveMatches.objects.select_related(
                     'gallery', 'archive').prefetch_related(
                         Prefetch('gallery__tags',
                                  queryset=Tag.objects.filter(
                                      scope__exact='artist'),
                                  to_attr='artist_tags')),
                 to_attr='possible_galleries'),
        'possible_galleries__gallery',
    )

    if 'no-custom-tags' in get:
        results = results.annotate(
            num_custom_tags=Count('custom_tags')).filter(num_custom_tags=0)
    if 'with-possible-matches' in get:
        results = results.annotate(
            n_possible_matches=Count('possible_matches')).filter(
                n_possible_matches__gt=0)

    paginator = Paginator(results, 100)
    try:
        results = paginator.page(page)
    except (InvalidPage, EmptyPage):
        results = paginator.page(paginator.num_pages)

    d = {
        'results':
        results,
        'providers':
        Gallery.objects.all().values_list('provider', flat=True).distinct(),
        'matchers':
        crawler_settings.provider_context.get_matchers(crawler_settings,
                                                       force=True),
        'api_key':
        crawler_settings.api_key,
        'form':
        form
    }
    return render(request, "viewer/archives_not_matched.html", d)
Example #4
0
def archive_group_edit(request: HttpRequest, pk: int = None, slug: str = None) -> HttpResponse:
    """ArchiveGroup listing."""
    try:
        if pk is not None:
            archive_group_instance = ArchiveGroup.objects.get(pk=pk)
        elif slug is not None:
            archive_group_instance = ArchiveGroup.objects.get(title_slug=slug)
        else:
            raise Http404("Archive Group does not exist")
    except ArchiveGroup.DoesNotExist:
        raise Http404("Archive Group does not exist")
    if not archive_group_instance.public and not request.user.is_authenticated:
        raise Http404("Archive Group does not exist")

    get = request.GET
    p = request.POST

    user_reason = p.get('reason', '')

    d = {
        'archive_group': archive_group_instance,
    }

    if request.POST.get('submit-archive-group'):
        # create a form instance and populate it with data from the request:
        edit_form = ArchiveGroupCreateOrEditForm(request.POST, instance=archive_group_instance)
        # check whether it's valid:
        if edit_form.is_valid():
            new_archive_group = edit_form.save()
            message = 'Archive group successfully modified'
            messages.success(request, message)
            frontend_logger.info("User {}: {}".format(request.user.username, message))
            event_log(
                request.user,
                'CHANGE_ARCHIVE_GROUP',
                content_object=new_archive_group,
                result='changed'
            )
            # return HttpResponseRedirect(request.META["HTTP_REFERER"])
        else:
            messages.error(request, 'The provided data is not valid', extra_tags='danger')
            # return HttpResponseRedirect(request.META["HTTP_REFERER"])
    else:
        edit_form = ArchiveGroupCreateOrEditForm(instance=archive_group_instance)

    if 'add_to_group' in p:

        pks = []
        for k, v in p.items():
            if k.startswith("sel-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)
        archives = Archive.objects.filter(id__in=pks).order_by('-create_date')

        for archive in archives:
            if not ArchiveGroupEntry.objects.filter(archive=archive, archive_group=archive_group_instance).exists():

                archive_group_entry = ArchiveGroupEntry(archive=archive, archive_group=archive_group_instance)
                archive_group_entry.save()

                message = 'Adding archive: {}, link: {}, to group: {}, link {}'.format(
                    archive.title, archive.get_absolute_url(),
                    archive_group_instance.title, archive_group_instance.get_absolute_url()
                )
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                frontend_logger.info("User {}: {}".format(request.user.username, message))
                messages.success(request, message)
                event_log(
                    request.user,
                    'ADD_ARCHIVE_TO_GROUP',
                    content_object=archive,
                    reason=user_reason,
                    result='added'
                )

    d.update(edit_form=edit_form)

    # Multi add search form

    title = get.get("title", '')
    tags = get.get("tags", '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'clear' in get:
        search_form = ArchiveSearchForm()
    else:
        search_form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
        'filename': title,
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    search_results = filter_archives_simple(params)

    search_results = search_results.exclude(archive_groups=archive_group_instance)

    search_results = search_results.prefetch_related('gallery')

    paginator = Paginator(search_results, 100)
    try:
        search_results = paginator.page(page)
    except (InvalidPage, EmptyPage):
        search_results = paginator.page(paginator.num_pages)

    d.update(search_form=search_form, search_results=search_results)

    return render(request, "viewer/archive_group_edit.html", d)
Example #5
0
def archives_not_matched_with_gallery(request: HttpRequest) -> HttpResponse:
    p = request.POST
    get = request.GET

    title = get.get("title", '')
    tags = get.get("tags", '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'clear' in get:
        form = ArchiveSearchForm()
    else:
        form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    if p:
        pks = []
        for k, v in p.items():
            if k.startswith("sel-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)

        preserved = Case(
            *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)])

        archives = Archive.objects.filter(id__in=pks).order_by(preserved)
        if 'create_possible_matches' in p:
            if thread_exists('match_unmatched_worker'):
                return render_error(
                    request, "Local matching worker is already running.")
            provider = p['create_possible_matches']
            try:
                cutoff = float(p.get('cutoff', '0.4'))
            except ValueError:
                cutoff = 0.4
            try:
                max_matches = int(p.get('max-matches', '10'))
            except ValueError:
                max_matches = 10

            frontend_logger.info(
                'User {}: Looking for possible matches in gallery database '
                'for non-matched archives (cutoff: {}, max matches: {}) '
                'using provider filter "{}"'.format(request.user.username,
                                                    cutoff, max_matches,
                                                    provider))
            matching_thread = threading.Thread(
                name='match_unmatched_worker',
                target=generate_possible_matches_for_archives,
                args=(archives, ),
                kwargs={
                    'logger': frontend_logger,
                    'cutoff': cutoff,
                    'max_matches': max_matches,
                    'filters': (provider, ),
                    'match_local': True,
                    'match_web': False
                })
            matching_thread.daemon = True
            matching_thread.start()
            messages.success(request, 'Starting internal match worker.')
        elif 'clear_possible_matches' in p:

            for archive in archives:
                archive.possible_matches.clear()

            frontend_logger.info(
                'User {}: Clearing possible matches for archives'.format(
                    request.user.username))
            messages.success(request, 'Clearing possible matches.')

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
        'filename': title,
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    results = filter_archives_simple(params)

    if 'show-matched' not in get:
        results = results.filter(gallery__isnull=True)

    results = results.prefetch_related(
        Prefetch('archivematches_set',
                 queryset=ArchiveMatches.objects.select_related(
                     'gallery', 'archive').prefetch_related(
                         Prefetch('gallery__tags',
                                  queryset=Tag.objects.filter(
                                      scope__exact='artist'),
                                  to_attr='artist_tags')),
                 to_attr='possible_galleries'),
        'possible_galleries__gallery',
    )

    if 'with-possible-matches' in get:
        results = results.annotate(
            n_possible_matches=Count('possible_matches')).filter(
                n_possible_matches__gt=0)

    paginator = Paginator(results, 50)
    try:
        results = paginator.page(page)
    except (InvalidPage, EmptyPage):
        results = paginator.page(paginator.num_pages)

    d = {
        'results':
        results,
        'providers':
        Gallery.objects.all().values_list('provider', flat=True).distinct(),
        'form':
        form
    }
    return render(request, "viewer/collaborators/unmatched_archives.html", d)
Example #6
0
def manage_archives(request: HttpRequest) -> HttpResponse:
    p = request.POST
    get = request.GET

    title = get.get("title", '')
    tags = get.get("tags", '')

    user_reason = p.get('reason', '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'clear' in get:
        form = ArchiveSearchForm()
    else:
        form = ArchiveSearchForm(initial={'title': title, 'tags': tags})

    if p:
        pks = []
        for k, v in p.items():
            if k.startswith("sel-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)

        preserved = Case(
            *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)])

        archives = Archive.objects.filter(id__in=pks).order_by(preserved)
        if 'publish_archives' in p and request.user.has_perm(
                'viewer.publish_archive'):
            for archive in archives:
                message = 'Publishing archive: {}, link: {}'.format(
                    archive.title, archive.get_absolute_url())
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                frontend_logger.info("User {}: {}".format(
                    request.user.username, message))
                messages.success(request, message)
                archive.set_public(reason=user_reason)
                event_log(request.user,
                          'PUBLISH_ARCHIVE',
                          reason=user_reason,
                          content_object=archive,
                          result='published')
        elif 'unpublish_archives' in p and request.user.has_perm(
                'viewer.publish_archive'):
            for archive in archives:
                message = 'Unpublishing archive: {}, link: {}'.format(
                    archive.title, archive.get_absolute_url())
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                frontend_logger.info("User {}: {}".format(
                    request.user.username, message))
                messages.success(request, message)
                archive.set_private(reason=user_reason)
                event_log(request.user,
                          'UNPUBLISH_ARCHIVE',
                          reason=user_reason,
                          content_object=archive,
                          result='unpublished')
        elif 'delete_archives' in p and request.user.has_perm(
                'viewer.delete_archive'):
            for archive in archives:
                message = 'Deleting archive: {}, link: {}, with it\'s file: {} and associated gallery: {}'.format(
                    archive.title, archive.get_absolute_url(),
                    archive.zipped.path, archive.gallery)
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                frontend_logger.info("User {}: {}".format(
                    request.user.username, message))
                messages.success(request, message)
                gallery = archive.gallery
                archive.gallery.mark_as_deleted()
                archive.gallery = None
                archive.delete_all_files()
                archive.delete()
                event_log(request.user,
                          'DELETE_ARCHIVE',
                          content_object=gallery,
                          reason=user_reason,
                          result='deleted')
        elif 'update_metadata' in p and request.user.has_perm(
                'viewer.update_metadata'):
            for archive in archives:
                gallery = archive.gallery

                message = 'Updating gallery API data for gallery: {} and related archives'.format(
                    gallery.get_absolute_url())
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                frontend_logger.info("User {}: {}".format(
                    request.user.username, message))
                messages.success(request, message)

                current_settings = Settings(
                    load_from_config=crawler_settings.config)

                if current_settings.workers.web_queue:
                    current_settings.set_update_metadata_options(
                        providers=(gallery.provider, ))

                    def gallery_callback(x: Optional['Gallery'],
                                         crawled_url: Optional[str],
                                         result: str) -> None:
                        event_log(request.user,
                                  'UPDATE_METADATA',
                                  reason=user_reason,
                                  content_object=x,
                                  result=result,
                                  data=crawled_url)

                    current_settings.workers.web_queue.enqueue_args_list(
                        (gallery.get_link(), ),
                        override_options=current_settings,
                        gallery_callback=gallery_callback)

                    frontend_logger.info(
                        'Updating gallery API data for gallery: {} and related archives'
                        .format(gallery.get_absolute_url()))
        elif 'add_to_group' in p and request.user.has_perm(
                'viewer.change_archivegroup'):

            if 'archive_group' in p:
                archive_group_ids = p.getlist('archive_group')

                preserved = Case(*[
                    When(pk=pk, then=pos)
                    for pos, pk in enumerate(archive_group_ids)
                ])

                archive_groups = ArchiveGroup.objects.filter(
                    pk__in=archive_group_ids).order_by(preserved)

                for archive in archives:
                    for archive_group in archive_groups:
                        if not ArchiveGroupEntry.objects.filter(
                                archive=archive,
                                archive_group=archive_group).exists():

                            archive_group_entry = ArchiveGroupEntry(
                                archive=archive, archive_group=archive_group)
                            archive_group_entry.save()

                            message = 'Adding archive: {}, link: {}, to group: {}, link {}'.format(
                                archive.title, archive.get_absolute_url(),
                                archive_group.title,
                                archive_group.get_absolute_url())
                            if 'reason' in p and p['reason'] != '':
                                message += ', reason: {}'.format(p['reason'])
                            frontend_logger.info("User {}: {}".format(
                                request.user.username, message))
                            messages.success(request, message)
                            event_log(request.user,
                                      'ADD_ARCHIVE_TO_GROUP',
                                      content_object=archive,
                                      reason=user_reason,
                                      result='added')

    params = {
        'sort': 'create_date',
        'asc_desc': 'desc',
        'filename': title,
    }

    for k, v in get.items():
        params[k] = v

    for k in archive_filter_keys:
        if k not in params:
            params[k] = ''

    results = filter_archives_simple(params)

    results = results.prefetch_related('gallery')

    paginator = Paginator(results, 100)
    try:
        results = paginator.page(page)
    except (InvalidPage, EmptyPage):
        results = paginator.page(paginator.num_pages)

    d = {'results': results, 'form': form}

    if request.user.has_perm('viewer.change_archivegroup'):
        group_form = ArchiveGroupSelectForm()
        d.update(group_form=group_form)

    return render(request, "viewer/collaborators/manage_archives.html", d)
Example #7
0
def archive_group_edit(request: HttpRequest,
                       pk: int = None,
                       slug: str = None) -> HttpResponse:
    """ArchiveGroup listing."""
    try:
        if pk is not None:
            archive_group_instance = ArchiveGroup.objects.prefetch_related(
                'archivegroupentry_set__archive').get(pk=pk)
        elif slug is not None:
            archive_group_instance = ArchiveGroup.objects.prefetch_related(
                'archivegroupentry_set__archive').get(title_slug=slug)
        else:
            raise Http404("Archive Group does not exist")
    except ArchiveGroup.DoesNotExist:
        raise Http404("Archive Group does not exist")
    if not archive_group_instance.public and not request.user.is_authenticated:
        raise Http404("Archive Group does not exist")

    get = request.GET
    p = request.POST

    user_reason = p.get('reason', '')

    d: dict[str, Any] = {
        'archive_group': archive_group_instance,
    }

    if 'extract_all' in get and request.user.has_perm('viewer.expand_archive'):
        archives_to_extract = archive_group_instance.archivegroupentry_set.select_for_update(
        ).filter(archive__extracted=False)
        with transaction.atomic():
            for archive_group_entry in archives_to_extract:
                archive_group_entry.archive.extract()
        return HttpResponseRedirect(request.META["HTTP_REFERER"])

    if request.POST.get('submit-archive-group'):
        # create a form instance and populate it with data from the request:
        edit_form = ArchiveGroupCreateOrEditForm(
            request.POST, instance=archive_group_instance)
        archive_group_entry_formset = ArchiveGroupEntryFormSet(
            request.POST, instance=archive_group_instance)

        # check whether it's valid:
        if edit_form.is_valid() and archive_group_entry_formset.is_valid():
            new_archive_group = edit_form.save()

            # archive_group_entry_formset.save()
            archive_group_entries = archive_group_entry_formset.save(
                commit=False)
            for archive_group_entry in archive_group_entries:
                archive_group_entry.save()
            for archive_group_entry in archive_group_entry_formset.deleted_objects:
                archive_group_entry.delete()

            message = 'Archive group successfully modified'
            messages.success(request, message)
            logger.info("User {}: {}".format(request.user.username, message))
            event_log(request.user,
                      'CHANGE_ARCHIVE_GROUP',
                      content_object=new_archive_group,
                      result='changed')
            return HttpResponseRedirect(
                reverse('viewer:archive-group-edit',
                        args=[archive_group_instance.title_slug]))
        else:
            messages.error(request,
                           'The provided data is not valid',
                           extra_tags='danger')

        # archive_group_entry_formset = ArchiveGroupEntryFormSet(
        #     request.POST,
        #     initial=[{'archive_group_id': archive_group_instance.id}] * 2,
        #     queryset=ArchiveGroupEntry.objects.filter(archive_group=archive_group_instance),
        #     prefix='archive_group_entries'
        # )

    else:
        edit_form = ArchiveGroupCreateOrEditForm(
            instance=archive_group_instance)
        archive_group_entry_formset = ArchiveGroupEntryFormSet(
            instance=archive_group_instance)

    if 'add_to_group' in p:

        pks = []
        for k, v in p.items():
            if k.startswith("sel-"):
                # k, pk = k.split('-')
                # results[pk][k] = v
                pks.append(v)

        preserved = Case(
            *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)])

        archives = Archive.objects.filter(id__in=pks).order_by(preserved)

        for archive in archives:
            if not ArchiveGroupEntry.objects.filter(
                    archive=archive,
                    archive_group=archive_group_instance).exists():

                archive_group_entry = ArchiveGroupEntry(
                    archive=archive, archive_group=archive_group_instance)
                archive_group_entry.save()

                message = 'Adding archive: {}, link: {}, to group: {}, link {}'.format(
                    archive.title, archive.get_absolute_url(),
                    archive_group_instance.title,
                    archive_group_instance.get_absolute_url())
                if 'reason' in p and p['reason'] != '':
                    message += ', reason: {}'.format(p['reason'])
                logger.info("User {}: {}".format(request.user.username,
                                                 message))
                messages.success(request, message)
                event_log(request.user,
                          'ADD_ARCHIVE_TO_GROUP',
                          content_object=archive,
                          reason=user_reason,
                          result='added')

        return HttpResponseRedirect(
            reverse('viewer:archive-group-edit',
                    args=[archive_group_instance.title_slug]) + '?' +
            request.META['QUERY_STRING'])

    d.update(edit_form=edit_form)

    # Multi add search form

    title = get.get("title", '')
    tags = get.get("tags", '')

    try:
        page = int(get.get("page", '1'))
    except ValueError:
        page = 1

    if 'add_multiple' in get:
        if 'clear' in get:
            search_form = ArchiveSearchForm()
        else:
            search_form = ArchiveSearchForm(initial={
                'title': title,
                'tags': tags
            })

        params = {
            'sort': get.get("sort", 'create_date'),
            'asc_desc': get.get("asc_desc", 'desc'),
        }

        for k, v in get.items():
            params[k] = v

        for k in archive_filter_keys:
            if k not in params:
                params[k] = ''

        search_results = filter_archives_simple(params)

        search_results = search_results.exclude(
            archive_groups=archive_group_instance)

        if 'groupless' in get and get['groupless']:
            search_results = search_results.filter(archive_groups__isnull=True)

        search_results = search_results.prefetch_related('gallery')

        paginator = Paginator(search_results, 100)
        try:
            search_results_page = paginator.page(page)
        except (InvalidPage, EmptyPage):
            search_results_page = paginator.page(paginator.num_pages)

        d.update(
            search_form=search_form,
            search_results=search_results_page,
        )

    d.update(archive_group_entry_formset=archive_group_entry_formset)

    return render(request, "viewer/archive_group_edit.html", d)