def add_and_delete_invalid_commentary_links(oref, user, **kwargs): """ This functino both adds links and deletes pre existing ones that are no longer valid, by virtue of the fact that they were not detected as commentary links while iterating over the text. :param tref: :param user: :param kwargs: :return: """ assert oref.is_commentary() tref = oref.normal() commentary_book_name = oref.index.title ref_regex = oref.regex() existing_links = LinkSet({"refs": {"$regex": ref_regex}, "generated_by": "add_commentary_links"}) found_links = add_commentary_links(oref, user, **kwargs) for exLink in existing_links: for r in exLink.refs: if commentary_book_name not in r: #current base ref continue if USE_VARNISH: invalidate_ref(Ref(r)) if r not in found_links: tracker.delete(user, Link, exLink._id) break
def add_and_delete_invalid_commentary_links(oref, user, **kwargs): """ This functino both adds links and deletes pre existing ones that are no longer valid, by virtue of the fact that they were not detected as commentary links while iterating over the text. :param tref: :param user: :param kwargs: :return: """ assert oref.is_commentary() tref = oref.normal() commentary_book_name = oref.index.title ref_regex = oref.regex() existing_links = LinkSet({ "refs": { "$regex": ref_regex }, "generated_by": "add_commentary_links" }) found_links = add_commentary_links(oref, user, **kwargs) for exLink in existing_links: for r in exLink.refs: if commentary_book_name not in r: #current base ref continue if USE_VARNISH: invalidate_ref(Ref(r)) if r not in found_links: tracker.delete(user, Link, exLink._id) break
def add_links_from_text(ref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. Lev - added return on 13 July 2014 """ if not text: return [] elif isinstance(text, list): links = [] for i in range(len(text)): subtext = text[i] single = add_links_from_text("%s:%d" % (ref, i + 1), lang, subtext, text_id, user, **kwargs) links += single return links elif isinstance(text, basestring): existingLinks = LinkSet({ "refs": ref, "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array( ) # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes refs = library.get_refs_in_string(text, lang) for oref in refs: link = { "refs": [ref, oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [ oref.normal() ] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == ref: # current base ref continue if r not in found: tracker.delete(user, Link, exLink._id) break return links
def delete_links_from_text(title, user): """ Deletes all of the citation generated links from text 'title' """ regex = Ref(title).regex() links = LinkSet({"refs.0": {"$regex": regex}, "generated_by": "add_links_from_text"}) for link in links: tracker.delete(user, Link, link._id)
def add_links_from_text(ref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. Returns a list of links added. """ if not text: return [] elif isinstance(text, list): oref = Ref(ref) subrefs = oref.subrefs(len(text)) links = [] for i in range(len(text)): single = add_links_from_text(subrefs[i].normal(), lang, text[i], text_id, user, **kwargs) links += single return links elif isinstance(text, basestring): existingLinks = LinkSet({ "refs": ref, "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array() # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes refs = library.get_refs_in_string(text, lang) for oref in refs: link = { # Note -- ref of the citing text is in the first position "refs": [ref, oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [oref.normal()] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == ref: # current base ref continue if r not in found: tracker.delete(user, Link, exLink._id) break return links
def delete_commentary_links(title, user): """ Deletes all of the citation generated links from text 'title' """ regex = Ref(title).regex() links = LinkSet({"refs": {"$regex": regex}, "generated_by": "add_commentary_links"}) for link in links: if USE_VARNISH: invalidate_ref(Ref(link.refs[0])) invalidate_ref(Ref(link.refs[1])) tracker.delete(user, Link, link._id)
def delete_links_from_text(title, user): """ Deletes all of the citation generated links from text 'title' """ regex = Ref(title).regex() links = LinkSet({"refs.0": {"$regex": regex}, "generated_by": "add_links_from_text"}) for link in links: if USE_VARNISH: try: invalidate_ref(Ref(link.refs[0])) except InputError: pass try: invalidate_ref(Ref(link.refs[1])) except InputError: pass tracker.delete(user, Link, link._id)
def add_links_from_text(oref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. The set of no longer supported links (`existingLinks` - `found`) is deleted. If Varnish is used, all linked refs, old and new, are refreshed Returns `links` - the list of links added. """ if not text: return [] elif isinstance(text, list): subrefs = oref.subrefs(len(text)) links = [] for i in range(len(text)): single = add_links_from_text(subrefs[i], lang, text[i], text_id, user, **kwargs) links += single return links elif isinstance(text, basestring): """ Keeps three lists: * existingLinks - The links that existed before the text was rescanned * found - The links found in this scan of the text * links - The new links added in this scan of the text The set of no longer supported links (`existingLinks` - `found`) is deleted. The set of all links (`existingLinks` + `Links`) is refreshed in Varnish. """ existingLinks = LinkSet({ "refs": oref.normal(), "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array( ) # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes refs = library.get_refs_in_string(text, lang) for linked_oref in refs: link = { # Note -- ref of the citing text is in the first position "refs": [oref.normal(), linked_oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [ linked_oref.normal() ] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] if USE_VARNISH: invalidate_ref(linked_oref) except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == oref.normal(): # current base ref continue if USE_VARNISH: invalidate_ref(Ref(r)) if r not in found: tracker.delete(user, Link, exLink._id) break return links
def add_links_from_text(oref, lang, text, text_id, user, **kwargs): """ Scan a text for explicit references to other texts and automatically add new links between ref and the mentioned text. text["text"] may be a list of segments, an individual segment, or None. The set of no longer supported links (`existingLinks` - `found`) is deleted. If Varnish is used, all linked refs, old and new, are refreshed Returns `links` - the list of links added. """ if not text: return [] elif isinstance(text, list): subrefs = oref.subrefs(len(text)) links = [] for i in range(len(text)): single = add_links_from_text(subrefs[i], lang, text[i], text_id, user, **kwargs) links += single return links elif isinstance(text, basestring): """ Keeps three lists: * existingLinks - The links that existed before the text was rescanned * found - The links found in this scan of the text * links - The new links added in this scan of the text The set of no longer supported links (`existingLinks` - `found`) is deleted. The set of all links (`existingLinks` + `Links`) is refreshed in Varnish. """ existingLinks = LinkSet({ "refs": oref.normal(), "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id }).array() # Added the array here to force population, so that new links don't end up in this set found = [] # The normal refs of the links found in this text links = [] # New link objects created by this processes refs = library.get_refs_in_string(text, lang) for linked_oref in refs: link = { # Note -- ref of the citing text is in the first position "refs": [oref.normal(), linked_oref.normal()], "type": "", "auto": True, "generated_by": "add_links_from_text", "source_text_oid": text_id } found += [linked_oref.normal()] # Keep this here, since tracker.add will throw an error if the link exists try: tracker.add(user, Link, link, **kwargs) links += [link] if USE_VARNISH: invalidate_ref(linked_oref) except InputError as e: pass # Remove existing links that are no longer supported by the text for exLink in existingLinks: for r in exLink.refs: if r == oref.normal(): # current base ref continue if USE_VARNISH: invalidate_ref(Ref(r)) if r not in found: tracker.delete(user, Link, exLink._id) break return links
def _delete_link(self, link): if not self._user: link.delete() else: tracker.delete(self._user, Link, link._id)