def parse_and_calculate_links(self): """Calculate What Links Here data for links going out from this. Also returns a parsed version of the current html, because that is a byproduct of the process, and is useful. """ if not self.current_revision: return "" # Remove "what links here" reverse links, because they might be # stale and re-rendering will re-add them. This cannot be done # reliably in the parser's parse() function, because that is # often called multiple times per document. self.links_from().delete() # Also delete the DocumentImage instances for this document. DocumentImage.objects.filter(document=self).delete() from kitsune.wiki.parser import wiki_to_html, WhatLinksHereParser return wiki_to_html( self.current_revision.content, locale=self.locale, doc_id=self.id, parser_cls=WhatLinksHereParser, )
def save(self, *args, **kwargs): """Set slug on first save and parse information to html.""" if not self.slug: self.slug = slugify(self.group.name) self.information_html = wiki_to_html(self.information) super(GroupProfile, self).save(*args, **kwargs)
def preview_revision(request): """Create an HTML fragment preview of the posted wiki syntax.""" wiki_content = request.POST.get('content', '') statsd.incr('wiki.preview') # TODO: Get doc ID from JSON. data = {'content': wiki_to_html(wiki_content, request.LANGUAGE_CODE)} data.update(showfor_data()) return render(request, 'wiki/preview.html', data)
def make_result(self, hit): return { "type": "thread", "title": hit.thread_title, "search_summary": strip_html(wiki_to_html(hit.content))[:1000], "last_updated": parser.parse(hit.updated), "url": reverse( "forums.posts", kwargs={"forum_slug": hit.forum_slug, "thread_id": hit.thread_id}, ) + f"#post-{hit.meta.id}", }
def preview_revision(request): """Create an HTML fragment preview of the posted wiki syntax.""" wiki_content = request.POST.get("content", "") slug = request.POST.get("slug") locale = request.POST.get("locale") statsd.incr("wiki.preview") if slug and locale: doc = get_object_or_404(Document, slug=slug, locale=locale) products = doc.get_products() else: products = Product.objects.all() data = {"content": wiki_to_html(wiki_content, request.LANGUAGE_CODE), "products": products} return render(request, "wiki/preview.html", data)
def preview_revision(request): """Create an HTML fragment preview of the posted wiki syntax.""" wiki_content = request.POST.get('content', '') slug = request.POST.get('slug') locale = request.POST.get('locale') statsd.incr('wiki.preview') if slug and locale: doc = get_object_or_404(Document, slug=slug, locale=locale) products = doc.get_products() else: products = Product.objects.all() data = { 'content': wiki_to_html(wiki_content, request.LANGUAGE_CODE), 'products': products } return render(request, 'wiki/preview.html', data)
def parse_and_calculate_links(self): """Calculate What Links Here data for links going out from this. Also returns a parsed version of the current html, because that is a byproduct of the process, and is useful. """ if not self.current_revision: return '' # Remove "what links here" reverse links, because they might be # stale and re-rendering will re-add them. This cannot be done # reliably in the parser's parse() function, because that is # often called multiple times per document. self.links_from().delete() from kitsune.wiki.parser import wiki_to_html, WhatLinksHereParser return wiki_to_html(self.current_revision.content, locale=self.locale, doc_id=self.id, parser_cls=WhatLinksHereParser)
def test_redirect_prefix(self): """Test accuracy of the prefix that helps us recognize redirects.""" assert wiki_to_html(REDIRECT_CONTENT % 'foo').startswith(REDIRECT_HTML)
def content_parsed(self): from kitsune.wiki.parser import wiki_to_html return wiki_to_html(self.content, locale=self.document.locale, doc_id=self.document.id)
def test_redirect_prefix(self): """Test accuracy of the prefix that helps us recognize redirects.""" assert wiki_to_html(REDIRECT_CONTENT % "foo").startswith(REDIRECT_HTML)
default=None, help="Diff just one document", ) def handle(self, *args, **options): documents = Document.objects if doc_id := options["document_id"]: documents = documents.filter(pk=doc_id) else: documents = documents.all() for document in documents: if not document.current_revision: continue try: diff = difflib.ndiff( document.html.split("\n"), wiki_to_html( document.current_revision.content, locale=document.locale, ).split("\n"), ) diff_lines = "\n".join(filter(lambda x: not x.startswith(" "), diff)) if diff_lines: print(f"DOCUMENT_ID: {document.id}") print(diff_lines) except Exception as e: if doc_id: raise e print(f"DOCUMENT_ID EXCEPTION: {document.id}") print(f"{type(e)}: {e}")