def recall_api(request: HttpRequest, pk: int) -> HttpResponse: """Recall provider API, if possible.""" if not request.user.is_staff: return render_error(request, "You need to be an admin to recall the API.") try: archive = Archive.objects.get(pk=pk) except Archive.DoesNotExist: raise Http404("Archive does not exist") if not archive.gallery_id: return render_error(request, "No gallery associated with this archive.") gallery = Gallery.objects.get(pk=archive.gallery_id) current_settings = Settings(load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.set_update_metadata_options( providers=(gallery.provider, )) current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings) frontend_logger.info( 'Updating gallery API data for gallery: {} and related archives'. format(gallery.get_absolute_url())) return HttpResponseRedirect(request.META["HTTP_REFERER"])
def recall_api(request: HttpRequest, pk: int) -> HttpResponse: """Recall provider API, if possible.""" if not request.user.has_perm('viewer.update_metadata'): return render_error(request, "You don't have the permission to refresh source metadata on an Archive.") try: archive = Archive.objects.get(pk=pk) except Archive.DoesNotExist: raise Http404("Archive does not exist") if not archive.gallery_id: return render_error(request, "No gallery associated with this archive.") gallery = Gallery.objects.get(pk=archive.gallery_id) current_settings = Settings(load_from_config=crawler_settings.config) if current_settings.workers.web_queue and gallery.provider: current_settings.set_update_metadata_options(providers=(gallery.provider,)) def gallery_callback(x: Optional['Gallery'], crawled_url: Optional[str], result: str) -> None: event_log( request.user, 'UPDATE_METADATA', content_object=x, result=result, data=crawled_url ) current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(),), override_options=current_settings, gallery_callback=gallery_callback ) logger.info( 'Updating gallery API data for gallery: {} and related archives'.format( gallery.get_absolute_url() ) ) return HttpResponseRedirect(request.META["HTTP_REFERER"])
def missing_archives_for_galleries(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = GallerySearchForm() else: form = GallerySearchForm(initial={'title': title, 'tags': tags}) if p and request.user.is_staff: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) results = Gallery.objects.filter(id__in=pks).order_by('-create_date') if 'delete_galleries' in p: for gallery in results: message = 'Removing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) gallery.mark_as_deleted() elif 'download_galleries' in p: for gallery in results: message = 'Queueing gallery: {}, link: {}'.format( gallery.title, gallery.get_link()) frontend_logger.info(message) messages.success(request, message) # Force replace_metadata when queueing from this list, since it's mostly used to download non used. current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.replace_metadata = True current_settings.retry_failed = True if 'reason' in p and p['reason'] != '': reason = p['reason'] # Force limit string length (reason field max_length) current_settings.archive_reason = reason[:200] current_settings.archive_details = gallery.reason current_settings.gallery_reason = reason[:200] elif gallery.reason: current_settings.archive_reason = gallery.reason current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings) elif 'recall_api' in p: message = 'Recalling API for {} galleries'.format(results.count()) frontend_logger.info(message) messages.success(request, message) gallery_links = [x.get_link() for x in results] gallery_providers = list( results.values_list('provider', flat=True).distinct()) current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.set_update_metadata_options( providers=gallery_providers) current_settings.workers.web_queue.enqueue_args_list( gallery_links, override_options=current_settings) if 'force_public' in request.GET: force_public = True else: force_public = False if request.user.is_staff and not force_public: providers = Gallery.objects.all().values_list('provider', flat=True).distinct() params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries().prefetch_related( 'foundgallery_set') paginator = Paginator(results, 50) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = { 'results': results, 'providers': providers, 'force_public': force_public, 'form': form } else: params = {} for k, v in get.items(): params[k] = v for k in gallery_filter_keys: if k not in params: params[k] = '' results = filter_galleries_simple(params) results = results.non_used_galleries(public=True, provider__in=['panda', 'fakku']) d = {'results': results} return render(request, "viewer/archives_missing_for_galleries.html", d)
def manage_archives(request: HttpRequest) -> HttpResponse: p = request.POST get = request.GET title = get.get("title", '') tags = get.get("tags", '') user_reason = p.get('reason', '') try: page = int(get.get("page", '1')) except ValueError: page = 1 if 'clear' in get: form = ArchiveSearchForm() else: form = ArchiveSearchForm(initial={'title': title, 'tags': tags}) if p: pks = [] for k, v in p.items(): if k.startswith("sel-"): # k, pk = k.split('-') # results[pk][k] = v pks.append(v) preserved = Case( *[When(pk=pk, then=pos) for pos, pk in enumerate(pks)]) archives = Archive.objects.filter(id__in=pks).order_by(preserved) if 'publish_archives' in p and request.user.has_perm( 'viewer.publish_archive'): for archive in archives: message = 'Publishing archive: {}, link: {}'.format( archive.title, archive.get_absolute_url()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) archive.set_public(reason=user_reason) event_log(request.user, 'PUBLISH_ARCHIVE', reason=user_reason, content_object=archive, result='published') elif 'unpublish_archives' in p and request.user.has_perm( 'viewer.publish_archive'): for archive in archives: message = 'Unpublishing archive: {}, link: {}'.format( archive.title, archive.get_absolute_url()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) archive.set_private(reason=user_reason) event_log(request.user, 'UNPUBLISH_ARCHIVE', reason=user_reason, content_object=archive, result='unpublished') elif 'delete_archives' in p and request.user.has_perm( 'viewer.delete_archive'): for archive in archives: message = 'Deleting archive: {}, link: {}, with it\'s file: {} and associated gallery: {}'.format( archive.title, archive.get_absolute_url(), archive.zipped.path, archive.gallery) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) gallery = archive.gallery archive.gallery.mark_as_deleted() archive.gallery = None archive.delete_all_files() archive.delete() event_log(request.user, 'DELETE_ARCHIVE', content_object=gallery, reason=user_reason, result='deleted') elif 'update_metadata' in p and request.user.has_perm( 'viewer.update_metadata'): for archive in archives: gallery = archive.gallery message = 'Updating gallery API data for gallery: {} and related archives'.format( gallery.get_absolute_url()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) current_settings = Settings( load_from_config=crawler_settings.config) if current_settings.workers.web_queue: current_settings.set_update_metadata_options( providers=(gallery.provider, )) def gallery_callback(x: Optional['Gallery'], crawled_url: Optional[str], result: str) -> None: event_log(request.user, 'UPDATE_METADATA', reason=user_reason, content_object=x, result=result, data=crawled_url) current_settings.workers.web_queue.enqueue_args_list( (gallery.get_link(), ), override_options=current_settings, gallery_callback=gallery_callback) frontend_logger.info( 'Updating gallery API data for gallery: {} and related archives' .format(gallery.get_absolute_url())) elif 'add_to_group' in p and request.user.has_perm( 'viewer.change_archivegroup'): if 'archive_group' in p: archive_group_ids = p.getlist('archive_group') preserved = Case(*[ When(pk=pk, then=pos) for pos, pk in enumerate(archive_group_ids) ]) archive_groups = ArchiveGroup.objects.filter( pk__in=archive_group_ids).order_by(preserved) for archive in archives: for archive_group in archive_groups: if not ArchiveGroupEntry.objects.filter( archive=archive, archive_group=archive_group).exists(): archive_group_entry = ArchiveGroupEntry( archive=archive, archive_group=archive_group) archive_group_entry.save() message = 'Adding archive: {}, link: {}, to group: {}, link {}'.format( archive.title, archive.get_absolute_url(), archive_group.title, archive_group.get_absolute_url()) if 'reason' in p and p['reason'] != '': message += ', reason: {}'.format(p['reason']) frontend_logger.info("User {}: {}".format( request.user.username, message)) messages.success(request, message) event_log(request.user, 'ADD_ARCHIVE_TO_GROUP', content_object=archive, reason=user_reason, result='added') params = { 'sort': 'create_date', 'asc_desc': 'desc', 'filename': title, } for k, v in get.items(): params[k] = v for k in archive_filter_keys: if k not in params: params[k] = '' results = filter_archives_simple(params) results = results.prefetch_related('gallery') paginator = Paginator(results, 100) try: results = paginator.page(page) except (InvalidPage, EmptyPage): results = paginator.page(paginator.num_pages) d = {'results': results, 'form': form} if request.user.has_perm('viewer.change_archivegroup'): group_form = ArchiveGroupSelectForm() d.update(group_form=group_form) return render(request, "viewer/collaborators/manage_archives.html", d)