def reset_ref(request, tref): """ resets cache, versionstate, toc, varnish, & book TOC template :param tref: :return: """ oref = model.Ref(tref) if oref.is_book_level(): model.library.refresh_index_record_in_cache(oref.index) vs = model.VersionState(index=oref.index) vs.refresh() model.library.update_index_in_toc(oref.index) if MULTISERVER_ENABLED: server_coordinator.publish_event("library", "refresh_index_record_in_cache", [oref.index.title]) server_coordinator.publish_event("library", "update_index_in_toc", [oref.index.title]) elif USE_VARNISH: invalidate_title(oref.index.title) return HttpResponseRedirect("/{}?m=Reset-Index".format(oref.url())) elif USE_VARNISH: invalidate_ref(oref) return HttpResponseRedirect("/{}?m=Reset-Ref".format(oref.url())) else: return HttpResponseRedirect("/?m=Nothing-to-Reset")
def reset_ref(request, tref): """ resets cache, versionstate, toc, varnish, & book TOC template :param tref: :return: """ oref = model.Ref(tref) if oref.is_book_level(): model.library.refresh_index_record_in_cache(oref.index) vs = model.VersionState(index=oref.index) vs.refresh() model.library.update_index_in_toc(oref.index) if MULTISERVER_ENABLED: server_coordinator.publish_event("library", "refresh_index_record_in_cache", [oref.index.title]) server_coordinator.publish_event("library", "update_index_in_toc", [oref.index.title]) elif USE_VARNISH: invalidate_title(oref.index.title) return HttpResponseRedirect("/{}?m=Reset-Index".format(oref.url())) elif USE_VARNISH: invalidate_ref(oref) return HttpResponseRedirect("/{}?m=Reset-Ref".format(oref.url())) else: return HttpResponseRedirect("/?m=Nothing-to-Reset")
def reset_varnish(request, tref): if USE_VARNISH: oref = model.Ref(tref) if oref.is_book_level(): invalidate_index(oref.index) invalidate_counts(oref.index) invalidate_ref(oref) return HttpResponseRedirect("/?m=Varnish-Reset-For-{}".format(oref.url())) return HttpResponseRedirect("/?m=Varnish-Not-Enabled")
def delete_links(self, **kwargs): """ Deletes all of the citation generated links from text 'title' """ links = self._load_links() for link in links: if USE_VARNISH: try: invalidate_ref(Ref(link.refs[0])) except InputError: pass try: invalidate_ref(Ref(link.refs[1])) except InputError: pass self._delete_link(link)
def delete_links(self, **kwargs): """ Deletes all of the citation generated links from text 'title' """ links = self._load_links() for link in links: if USE_VARNISH: try: invalidate_ref(Ref(link.refs[0])) except InputError: pass try: invalidate_ref(Ref(link.refs[1])) except InputError: pass self._delete_link(link)
def delete_links_from_text(title, user): """ Deletes all of the citation generated links from text 'title' """ regex = Ref(title).regex() links = LinkSet({"refs.0": {"$regex": regex}, "generated_by": "add_links_from_text"}) for link in links: if USE_VARNISH: try: invalidate_ref(Ref(link.refs[0])) except InputError: pass try: invalidate_ref(Ref(link.refs[1])) except InputError: pass tracker.delete(user, Link, link._id)
def delete_links_from_text(title, user): """ Deletes all of the citation generated links from text 'title' """ regex = Ref(title).regex() links = LinkSet({"refs.0": {"$regex": regex}, "generated_by": "add_links_from_text"}) for link in links: if USE_VARNISH: try: invalidate_ref(Ref(link.refs[0])) except InputError: pass try: invalidate_ref(Ref(link.refs[1])) except InputError: pass tracker.delete(user, Link, link._id)
def post_modify_text(user, action, oref, lang, vtitle, old_text, curr_text, version_id, **kwargs) -> None: model.log_text(user, action, oref, lang, vtitle, old_text, curr_text, **kwargs) if USE_VARNISH: invalidate_ref(oref, lang=lang, version=vtitle, purge=True) if oref.next_section_ref(): invalidate_ref(oref.next_section_ref(), lang=lang, version=vtitle, purge=True) if oref.prev_section_ref(): invalidate_ref(oref.prev_section_ref(), lang=lang, version=vtitle, purge=True) if not kwargs.get("skip_links", None): from sefaria.helper.link import add_links_from_text # Some commentaries can generate links to their base text automatically linker = oref.autolinker(user=user) if linker: linker.refresh_links(**kwargs) # scan text for links to auto add add_links_from_text(oref, lang, curr_text, version_id, user, **kwargs) if USE_VARNISH: invalidate_linked(oref) count_and_index(oref, lang, vtitle, to_count=kwargs.get("count_after", 1))
def refresh_links(self, **kwargs): """ This function both adds links and deletes pre existing ones that are no longer valid, by virtue of the fact that they were not detected as commentary links while iterating over the text. :param tref: :param user: :param kwargs: :return: """ existing_links = self._load_links() found_links = self._build_links_internal(self._requested_oref) for exLink in existing_links: for r in exLink.refs: if self._title not in r: #current base ref continue if USE_VARNISH: try: invalidate_ref(Ref(r)) except InputError: pass if r not in found_links: self._delete_link(exLink) break
def refresh_links(self, **kwargs): """ This function both adds links and deletes pre existing ones that are no longer valid, by virtue of the fact that they were not detected as commentary links while iterating over the text. :param tref: :param user: :param kwargs: :return: """ existing_links = self._load_links() found_links = self._build_links_internal(self._requested_oref) for exLink in existing_links: for r in exLink.refs: if self._title not in r: #current base ref continue if USE_VARNISH: try: invalidate_ref(Ref(r)) except InputError: pass if r not in found_links: self._delete_link(exLink) break
def modify_text(user, oref, vtitle, lang, text, vsource=None, **kwargs): """ Updates a chunk of text, identified by oref, versionTitle, and lang, and records history. :param user: :param oref: :param vtitle: :param lang: :param text: :param vsource: :return: """ chunk = model.TextChunk(oref, lang, vtitle) if getattr(chunk.version(), "status", "") == "locked" and not model.user_profile.is_user_staff(user): raise InputError("This text has been locked against further edits.") action = kwargs.get("type") or "edit" if chunk.text else "add" old_text = chunk.text chunk.text = text if vsource: chunk.versionSource = vsource # todo: log this change if chunk.save(): model.log_text(user, action, oref, lang, vtitle, old_text, text, **kwargs) if USE_VARNISH: invalidate_ref(oref, lang=lang, version=vtitle, purge=True) if oref.next_section_ref(): invalidate_ref(oref.next_section_ref(), lang=lang, version=vtitle, purge=True) if oref.prev_section_ref(): invalidate_ref(oref.prev_section_ref(), lang=lang, version=vtitle, purge=True) if not kwargs.get("skip_links", None): from sefaria.helper.link import add_links_from_text # Some commentaries can generate links to their base text automatically linker = oref.autolinker(user=user) if linker: linker.refresh_links(**kwargs) # scan text for links to auto add add_links_from_text(oref, lang, chunk.text, chunk.full_version._id, user, **kwargs) if USE_VARNISH: invalidate_linked(oref) return chunk
def add_links_from_text(oref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. The set of no longer supported links (`existingLinks` - `found`) is deleted. If Varnish is used, all linked refs, old and new, are refreshed Returns `links` - the list of links added. """ if not text: return [] elif isinstance(text, list): subrefs = oref.subrefs(len(text)) links = [] for i in range(len(text)): single = add_links_from_text(subrefs[i], lang, text[i], text_id, user, **kwargs) links += single return links elif isinstance(text, str): """ Keeps three lists: * existingLinks - The links that existed before the text was rescanned * found - The links found in this scan of the text * links - The new links added in this scan of the text The set of no longer supported links (`existingLinks` - `found`) is deleted. The set of all links (`existingLinks` + `Links`) is refreshed in Varnish. """ existingLinks = LinkSet({ "refs": oref.normal(), "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array( ) # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes if kwargs.get('citing_only') is not None: citing_only = kwargs['citing_only'] else: citing_only = True refs = library.get_refs_in_string(text, lang, citing_only=citing_only) for linked_oref in refs: link = { # Note -- ref of the citing text is in the first position "refs": [oref.normal(), linked_oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [ linked_oref.normal() ] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] if USE_VARNISH: invalidate_ref(linked_oref) except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == oref.normal(): # current base ref continue if USE_VARNISH: try: invalidate_ref(Ref(r)) except InputError: pass if r not in found: tracker.delete(user, Link, exLink._id) break return links
def add_links_from_text(oref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. The set of no longer supported links (`existingLinks` - `found`) is deleted. If Varnish is used, all linked refs, old and new, are refreshed Returns `links` - the list of links added. """ if not text: return [] elif isinstance(text, list): subrefs = oref.subrefs(len(text)) links = [] for i in range(len(text)): single = add_links_from_text(subrefs[i], lang, text[i], text_id, user, **kwargs) links += single return links elif isinstance(text, basestring): """ Keeps three lists: * existingLinks - The links that existed before the text was rescanned * found - The links found in this scan of the text * links - The new links added in this scan of the text The set of no longer supported links (`existingLinks` - `found`) is deleted. The set of all links (`existingLinks` + `Links`) is refreshed in Varnish. """ existingLinks = LinkSet({ "refs": oref.normal(), "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array() # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes if kwargs.get('citing_only') is not None: citing_only = kwargs['citing_only'] else: citing_only = True refs = library.get_refs_in_string(text, lang, citing_only=citing_only) for linked_oref in refs: link = { # Note -- ref of the citing text is in the first position "refs": [oref.normal(), linked_oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [linked_oref.normal()] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] if USE_VARNISH: invalidate_ref(linked_oref) except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == oref.normal(): # current base ref continue if USE_VARNISH: try: invalidate_ref(Ref(r)) except InputError: pass if r not in found: tracker.delete(user, Link, exLink._id) break return links