def missing_archives_for_galleries(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = GallerySearchForm() else: form = GallerySearchForm(initial={'title': title, 'tags': tags}) if p and request.user.is_staff: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) results = Gallery.objects.filter(id__in=pks).order_by('-create_date') if 'delete_galleries' in p: for gallery in results: message = 'Removing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) gallery.mark_as_deleted() elif 'download_galleries' in p: for gallery in results: message = 'Queueing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) # Force replace_metadata when queueing from this list, since it's mostly used to download non used. current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.replace_metadata = True current_settings.retry_failed = True if 'reason' in p and p['reason'] != '': reason = p['reason'] # Force limit string length (reason field max_length) current_settings.archive_reason = reason[:200] current_settings.archive_details = gallery.reason current_settings.gallery_reason = reason[:200] elif gallery.reason: current_settings.archive_reason = gallery.reason current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings) elif 'recall_api' in p: message = 'Recalling API for {} galleries'.format(results.count()) frontend_logger.info(message) messages.success(request, message) gallery_links = [x.get_link() for x in results] gallery_providers = list( results.values_list('provider', flat=True).distinct()) current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.set_update_metadata_options( providers=gallery_providers) current_settings.workers.web_queue.enqueue_args_list( gallery_links, override_options=current_settings) if 'force_public' in request.GET: force_public = True else: force_public = False if request.user.is_staff and not force_public: providers = Gallery.objects.all().values_list('provider', flat=True).distinct() params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries().prefetch_related( 'foundgallery_set') paginator = Paginator(results, 50) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = { 'results': results, 'providers': providers, 'force_public': force_public, 'form': form } else: params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries(public=True, provider__in=['panda', 'fakku']) d = {'results': results} return render(request, "viewer/archives_missing_for_galleries.html", d)
def submit_queue(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') user_reason = p.get('reason', '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = GallerySearchForm() else: form = GallerySearchForm(initial={'title': title, 'tags': tags}) if p: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) preserved = Case( *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)]) if 'denied' in get: results = Gallery.objects.submitted_galleries( id__in=pks).order_by(preserved) else: results = Gallery.objects.submitted_galleries( ~Q(status=Gallery.DENIED), id__in=pks).order_by(preserved) if 'deny_galleries' in p: for gallery in results: message = 'Denying gallery: {}, link: {}, source link: {}'.format( gallery.title, gallery.get_absolute_url(), gallery.get_link()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) gallery.mark_as_denied() event_log(request.user, 'DENY_GALLERY', reason=user_reason, content_object=gallery, result='denied') elif 'download_galleries' in p: for gallery in results: message = 'Queueing gallery: {}, link: {}, source link: {}'.format( gallery.title, gallery.get_absolute_url(), gallery.get_link()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) event_log(request.user, 'ACCEPT_GALLERY', reason=user_reason, content_object=gallery, result='accepted') # Force replace_metadata when queueing from this list, since it's mostly used to download non used. current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.replace_metadata = True current_settings.retry_failed = True if 'reason' in p and p['reason'] != '': reason = p['reason'] # Force limit string length (reason field max_length) current_settings.archive_reason = reason[:200] current_settings.archive_details = gallery.reason current_settings.gallery_reason = reason[:200] elif gallery.reason: current_settings.archive_reason = gallery.reason def archive_callback(x: Optional['Archive'], crawled_url: Optional[str], result: str) -> None: event_log(request.user, 'ADD_ARCHIVE', reason=user_reason, content_object=x, result=result, data=crawled_url) def gallery_callback(x: Optional['Gallery'], crawled_url: Optional[str], result: str) -> None: event_log(request.user, 'ADD_GALLERY', reason=user_reason, content_object=x, result=result, data=crawled_url) current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings, archive_callback=archive_callback, gallery_callback=gallery_callback, ) providers = Gallery.objects.all().values_list('provider', flat=True).distinct() params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) if 'denied' in get: results = results.submitted_galleries().prefetch_related( 'foundgallery_set') else: results = results.submitted_galleries(~Q( status=Gallery.DENIED)).prefetch_related('foundgallery_set') paginator = Paginator(results, 50) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = {'results': results, 'providers': providers, 'form': form} return render(request, "viewer/collaborators/submit_queue.html", d)