def submit_queue(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') user_reason = p.get('reason', '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = GallerySearchForm() else: form = GallerySearchForm(initial={'title': title, 'tags': tags}) if p: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) preserved = Case( *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)]) if 'denied' in get: results = Gallery.objects.submitted_galleries( id__in=pks).order_by(preserved) else: results = Gallery.objects.submitted_galleries( ~Q(status=Gallery.DENIED), id__in=pks).order_by(preserved) if 'deny_galleries' in p: for gallery in results: message = 'Denying gallery: {}, link: {}, source link: {}'.format( gallery.title, gallery.get_absolute_url(), gallery.get_link()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) gallery.mark_as_denied() event_log(request.user, 'DENY_GALLERY', reason=user_reason, content_object=gallery, result='denied') elif 'download_galleries' in p: for gallery in results: message = 'Queueing gallery: {}, link: {}, source link: {}'.format( gallery.title, gallery.get_absolute_url(), gallery.get_link()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) event_log(request.user, 'ACCEPT_GALLERY', reason=user_reason, content_object=gallery, result='accepted') # Force replace_metadata when queueing from this list, since it's mostly used to download non used. current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.replace_metadata = True current_settings.retry_failed = True if 'reason' in p and p['reason'] != '': reason = p['reason'] # Force limit string length (reason field max_length) current_settings.archive_reason = reason[:200] current_settings.archive_details = gallery.reason current_settings.gallery_reason = reason[:200] elif gallery.reason: current_settings.archive_reason = gallery.reason def archive_callback(x: Optional['Archive'], crawled_url: Optional[str], result: str) -> None: event_log(request.user, 'ADD_ARCHIVE', reason=user_reason, content_object=x, result=result, data=crawled_url) def gallery_callback(x: Optional['Gallery'], crawled_url: Optional[str], result: str) -> None: event_log(request.user, 'ADD_GALLERY', reason=user_reason, content_object=x, result=result, data=crawled_url) current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings, archive_callback=archive_callback, gallery_callback=gallery_callback, ) providers = Gallery.objects.all().values_list('provider', flat=True).distinct() params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) if 'denied' in get: results = results.submitted_galleries().prefetch_related( 'foundgallery_set') else: results = results.submitted_galleries(~Q( status=Gallery.DENIED)).prefetch_related('foundgallery_set') paginator = Paginator(results, 50) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = {'results': results, 'providers': providers, 'form': form} return render(request, "viewer/collaborators/submit_queue.html", d)
def missing_archives_for_galleries(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = GallerySearchForm() else: form = GallerySearchForm(initial={'title': title, 'tags': tags}) if p and request.user.is_staff: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) results = Gallery.objects.filter(id__in=pks).order_by('-create_date') if 'delete_galleries' in p: for gallery in results: message = 'Removing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) gallery.mark_as_deleted() elif 'download_galleries' in p: for gallery in results: message = 'Queueing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) # Force replace_metadata when queueing from this list, since it's mostly used to download non used. current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.replace_metadata = True current_settings.retry_failed = True if 'reason' in p and p['reason'] != '': reason = p['reason'] # Force limit string length (reason field max_length) current_settings.archive_reason = reason[:200] current_settings.archive_details = gallery.reason current_settings.gallery_reason = reason[:200] elif gallery.reason: current_settings.archive_reason = gallery.reason current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings) elif 'recall_api' in p: message = 'Recalling API for {} galleries'.format(results.count()) frontend_logger.info(message) messages.success(request, message) gallery_links = [x.get_link() for x in results] gallery_providers = list( results.values_list('provider', flat=True).distinct()) current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.set_update_metadata_options( providers=gallery_providers) current_settings.workers.web_queue.enqueue_args_list( gallery_links, override_options=current_settings) if 'force_public' in request.GET: force_public = True else: force_public = False if request.user.is_staff and not force_public: providers = Gallery.objects.all().values_list('provider', flat=True).distinct() params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries().prefetch_related( 'foundgallery_set') paginator = Paginator(results, 50) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = { 'results': results, 'providers': providers, 'force_public': force_public, 'form': form } else: params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries(public=True, provider__in=['panda', 'fakku']) d = {'results': results} return render(request, "viewer/archives_missing_for_galleries.html", d)
def json_parser(request: HttpRequest) -> HttpResponse: response = {} if request.method == 'POST': if not request.body: response['error'] = 'Empty request' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") data = json.loads(request.body.decode("utf-8")) if 'api_key' not in data: response['error'] = 'Missing API key' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") elif data['api_key'] != crawler_settings.api_key: response['error'] = 'Incorrect API key' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") # send some 'ok' back else: if 'operation' not in data or 'args' not in data: response['error'] = 'Wrong format' else: args = data['args'] response = {} # Used by internal pages and userscript if data['operation'] == 'webcrawler' and 'link' in args: if not crawler_settings.workers.web_queue: response['error'] = 'The webqueue is not running' elif 'downloader' in args: current_settings = Settings(load_from_config=crawler_settings.config) if not current_settings.workers.web_queue: response['error'] = 'The webqueue is not running' else: current_settings.allow_downloaders_only([args['downloader']], True, True, True) archive = None parsers = current_settings.provider_context.get_parsers(current_settings, crawler_logger) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls((args['link'], )) for url_filtered in urls_filtered: gallery_gid = parser.id_from_url(url_filtered) if gallery_gid: archive = Archive.objects.filter(gallery__gid=gallery_gid).first() if urls_filtered: break current_settings.workers.web_queue.enqueue_args_list((args['link'],), override_options=current_settings) if archive: response['message'] = "Archive exists, crawling to check for redownload: " + args['link'] else: response['message'] = "Crawling: " + args['link'] else: if 'parentLink' in args: parent_archive = None parsers = crawler_settings.provider_context.get_parsers(crawler_settings, crawler_logger) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls((args['parentLink'],)) for url_filtered in urls_filtered: gallery_gid = parser.id_from_url(url_filtered) if gallery_gid: parent_archive = Archive.objects.filter(gallery__gid=gallery_gid).first() if urls_filtered: break if parent_archive: link = parent_archive.gallery.get_link() if 'action' in args and args['action'] == 'replaceFound': parent_archive.gallery.mark_as_deleted() parent_archive.gallery = None parent_archive.delete_all_files() parent_archive.delete_files_but_archive() parent_archive.delete() response['message'] = "Crawling: " + args['link'] + ", deleting parent: " + link crawler_settings.workers.web_queue.enqueue_args(args['link']) elif 'action' in args and args['action'] == 'queueFound': response['message'] = "Crawling: " + args['link'] + ", keeping parent: " + link crawler_settings.workers.web_queue.enqueue_args(args['link']) else: response['message'] = "Please confirm deletion of parent: " + link response['action'] = 'confirmDeletion' else: archive = None parsers = crawler_settings.provider_context.get_parsers(crawler_settings, crawler_logger) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls((args['link'],)) for url_filtered in urls_filtered: gallery_gid = parser.id_from_url(url_filtered) if gallery_gid: archive = Archive.objects.filter(gallery__gid=gallery_gid).first() if urls_filtered: break if archive: response['message'] = "Archive exists, crawling to check for redownload: " + args['link'] else: response['message'] = "Crawling: " + args['link'] crawler_settings.workers.web_queue.enqueue_args(args['link']) else: archive = None parsers = crawler_settings.provider_context.get_parsers(crawler_settings, crawler_logger) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls((args['link'],)) for url_filtered in urls_filtered: gallery_gid = parser.id_from_url(url_filtered) if gallery_gid: archive = Archive.objects.filter(gallery__gid=gallery_gid).first() if urls_filtered: break if archive: response['message'] = "Archive exists, crawling to check for redownload: " + args['link'] else: response['message'] = "Crawling: " + args['link'] crawler_settings.workers.web_queue.enqueue_args(args['link']) if not response: response['error'] = 'Could not parse request' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") # Used by remotesite command elif data['operation'] == 'archive_request': archives_query = Archive.objects.filter_non_existent(crawler_settings.MEDIA_ROOT, gallery__gid__in=args) archives = [{'gid': archive.gallery.gid, 'id': archive.id, 'zipped': archive.zipped.name, 'filesize': archive.filesize} for archive in archives_query] response_text = json.dumps({'result': archives}) return HttpResponse(response_text, content_type="application/json; charset=utf-8") # Used by remotesite command elif data['operation'] in ('queue_archives', 'queue_galleries'): urls = args new_urls_set = set() gids_set = set() parsers = crawler_settings.provider_context.get_parsers(crawler_settings, crawler_logger) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls(urls) for url in urls_filtered: gid = parser.id_from_url(url) gids_set.add(gid) gids_list = list(gids_set) existing_galleries = Gallery.objects.filter(gid__in=gids_list) for gallery_object in existing_galleries: if gallery_object.is_submitted(): gallery_object.delete() # Delete queue galleries that failed, and does not have archives. elif data['operation'] == 'queue_archives' and "failed" in gallery_object.dl_type and not gallery_object.archive_set.all(): gallery_object.delete() elif data['operation'] == 'queue_archives' and not gallery_object.archive_set.all(): gallery_object.delete() already_present_gids = list(Gallery.objects.filter(gid__in=gids_list).values_list('gid', flat=True)) # new_gids = list(gids_set - set(already_present_gids)) for parser in parsers: if parser.id_from_url_implemented(): urls_filtered = parser.filter_accepted_urls(urls) for url in urls_filtered: gid = parser.id_from_url(url) if gid not in already_present_gids: new_urls_set.add(url) pages_links = list(new_urls_set) if len(pages_links) > 0: current_settings = Settings(load_from_config=crawler_settings.config) if data['operation'] == 'queue_galleries': current_settings.allow_type_downloaders_only('info') elif data['operation'] == 'queue_archives': if 'archive_reason' in data: current_settings.archive_reason = data['archive_reason'] if 'archive_details' in data: current_settings.archive_details = data['archive_details'] current_settings.allow_type_downloaders_only('fake') if current_settings.workers.web_queue: current_settings.workers.web_queue.enqueue_args_list(pages_links, override_options=current_settings) else: pages_links = [] return HttpResponse(json.dumps({'result': str(len(pages_links))}), content_type="application/json; charset=utf-8") # Used by remotesite command elif data['operation'] == 'links': links = args if len(links) > 0: crawler_settings.workers.web_queue.enqueue_args_list(links) return HttpResponse(json.dumps({'result': str(len(links))}), content_type="application/json; charset=utf-8") # Used by archive page elif data['operation'] == 'match_archive': archive = Archive.objects.filter(pk=args['archive']) if archive: generate_possible_matches_for_archives( archive, filters=(args['match_filter'],), logger=crawler_logger, match_local=False, match_web=True, ) return HttpResponse(json.dumps({'message': 'web matcher done, check the logs for results'}), content_type="application/json; charset=utf-8") elif data['operation'] == 'match_archive_internally': archive = Archive.objects.get(pk=args['archive']) if archive: clear_title = True if 'clear' in args else False provider_filter = args.get('provider', '') try: cutoff = float(request.GET.get('cutoff', '0.4')) except ValueError: cutoff = 0.4 try: max_matches = int(request.GET.get('max-matches', '10')) except ValueError: max_matches = 10 archive.generate_possible_matches( clear_title=clear_title, provider_filter=provider_filter, cutoff=cutoff, max_matches=max_matches ) archive.save() return HttpResponse(json.dumps({'message': 'internal matcher done, check the archive for results'}), content_type="application/json; charset=utf-8") else: response['error'] = 'Unknown function' elif request.method == 'GET': data = request.GET if 'api_key' not in data: response['error'] = 'Missing API key' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") elif data['api_key'] != crawler_settings.api_key: response['error'] = 'Incorrect API key' return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8") # send some 'ok' back else: if 'gc' in data: args = data.copy() for k in gallery_filter_keys: if k not in args: args[k] = '' keys = ("sort", "asc_desc") for k in keys: if k not in args: args[k] = '' # args = data # Already authorized by api key. args['public'] = False results = filter_galleries_no_request(args) if not results: return HttpResponse(json.dumps([]), content_type="application/json; charset=utf-8") response_text = json.dumps( [{ 'gid': gallery.gid, 'token': gallery.token, 'title': gallery.title, 'title_jpn': gallery.title_jpn, 'category': gallery.category, 'uploader': gallery.uploader, 'comment': gallery.comment, 'posted': int(timestamp_or_zero(gallery.posted)), 'filecount': gallery.filecount, 'filesize': gallery.filesize, 'expunged': gallery.expunged, 'rating': gallery.rating, 'hidden': gallery.hidden, 'fjord': gallery.fjord, 'public': gallery.public, 'provider': gallery.provider, 'dl_type': gallery.dl_type, 'tags': gallery.tag_list(), 'link': gallery.get_link(), 'thumbnail': request.build_absolute_uri(reverse('viewer:gallery-thumb', args=(gallery.pk,))) if gallery.thumbnail else '', 'thumbnail_url': gallery.thumbnail_url } for gallery in results ], # indent=2, sort_keys=True, ensure_ascii=False, ) return HttpResponse(response_text, content_type="application/json; charset=utf-8") else: response['error'] = 'Unknown function' else: response['error'] = 'Unsupported method: {}'.format(request.method) return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8")