Example #1
0
def add_and_delete_invalid_commentary_links(oref, user, **kwargs):
    """
    This functino both adds links and deletes pre existing ones that are no longer valid,
    by virtue of the fact that they were not detected as commentary links while iterating over the text.
    :param tref:
    :param user:
    :param kwargs:
    :return:
    """
    assert oref.is_commentary()
    tref = oref.normal()
    commentary_book_name = oref.index.title

    ref_regex = oref.regex()
    existing_links = LinkSet({
        "refs": {
            "$regex": ref_regex
        },
        "generated_by": "add_commentary_links"
    })
    found_links = add_commentary_links(oref, user, **kwargs)
    for exLink in existing_links:
        for r in exLink.refs:
            if commentary_book_name not in r:  #current base ref
                continue
            if USE_VARNISH:
                invalidate_ref(Ref(r))
            if r not in found_links:
                tracker.delete(user, Link, exLink._id)
            break
Example #2
0
def add_and_delete_invalid_commentary_links(oref, user, **kwargs):
    """
    This functino both adds links and deletes pre existing ones that are no longer valid,
    by virtue of the fact that they were not detected as commentary links while iterating over the text.
    :param tref:
    :param user:
    :param kwargs:
    :return:
    """
    assert oref.is_commentary()
    tref = oref.normal()
    commentary_book_name = oref.index.title

    ref_regex = oref.regex()
    existing_links = LinkSet({"refs": {"$regex": ref_regex}, "generated_by": "add_commentary_links"})
    found_links = add_commentary_links(oref, user, **kwargs)
    for exLink in existing_links:
        for r in exLink.refs:
            if commentary_book_name not in r:  #current base ref
                continue
            if USE_VARNISH:
                invalidate_ref(Ref(r))
            if r not in found_links:
                tracker.delete(user, Link, exLink._id)
            break
Example #3
0
def reset_varnish(request, tref):
    if USE_VARNISH:
        oref = model.Ref(tref)
        if oref.is_book_level():
            invalidate_index(oref.index)
            invalidate_counts(oref.index)
        invalidate_ref(oref)
        return HttpResponseRedirect("/?m=Varnish-Reset-For-{}".format(oref.url()))
    return HttpResponseRedirect("/?m=Varnish-Not-Enabled")
Example #4
0
def delete_links_from_text(title, user):
    """
    Deletes all of the citation generated links from text 'title'
    """
    regex    = Ref(title).regex()
    links    = LinkSet({"refs.0": {"$regex": regex}, "generated_by": "add_links_from_text"})
    for link in links:
        if USE_VARNISH:
            invalidate_ref(Ref(link.refs[0]))
            invalidate_ref(Ref(link.refs[1]))
        tracker.delete(user, Link, link._id)
Example #5
0
def rebuild_commentary_links(tref, user, **kwargs):
    """
    Deletes any commentary links for which there is no content (in any ref),
    then adds all commentary links again.
    """
    try:
        oref = Ref(tref)
    except InputError:
        # Allow commentators alone, rebuild for each text we have
        i = get_index(tref)
        for c in library.get_commentary_version_titles(i.title):
            rebuild_commentary_links(c, user, **kwargs)
        return

    links = LinkSet(oref)
    for link in links:
        try:
            oref1, oref2 = Ref(link.refs[0]), Ref(link.refs[1])
        except InputError:
            link.delete()
            if USE_VARNISH:
                invalidate_ref(oref1)
                invalidate_ref(oref2)
            continue
        t1, t2 = TextFamily(oref1, commentary=0, context=0), TextFamily(oref2, commentary=0, context=0)
        if not (t1.text + t1.he) or not (t2.text + t2.he):
            # Delete any link that doesn't have some textual content on one side or the other
            link.delete()
            if USE_VARNISH:
                invalidate_ref(oref1)
                invalidate_ref(oref2)
    add_commentary_links(oref, user, **kwargs)
Example #6
0
 def delete_links(self, **kwargs):
     """
     Deletes all of the citation generated links from text 'title'
     """
     links = self._load_links()
     for link in links:
         if USE_VARNISH:
             try:
                 invalidate_ref(Ref(link.refs[0]))
             except InputError:
                 pass
             try:
                 invalidate_ref(Ref(link.refs[1]))
             except InputError:
                 pass
         self._delete_link(link)
Example #7
0
 def delete_links(self, **kwargs):
     """
     Deletes all of the citation generated links from text 'title'
     """
     links = self._load_links()
     for link in links:
         if USE_VARNISH:
             try:
                 invalidate_ref(Ref(link.refs[0]))
             except InputError:
                 pass
             try:
                 invalidate_ref(Ref(link.refs[1]))
             except InputError:
                 pass
         self._delete_link(link)
Example #8
0
def delete_commentary_links(title, user, **kwargs):
    """
    Deletes all of the citation generated links from text 'title'
    """
    regex = Ref(title).regex()
    links = LinkSet({"refs": {"$regex": regex}, "generated_by": "add_commentary_links"})
    for link in links:
        if USE_VARNISH:
            try:
                invalidate_ref(Ref(link.refs[0]))
            except InputError:
                pass
            try:
                invalidate_ref(Ref(link.refs[1]))
            except InputError:
                pass
        tracker.delete(user, Link, link._id)
Example #9
0
 def refresh_links(self, **kwargs):
     """
     This functino both adds links and deletes pre existing ones that are no longer valid,
     by virtue of the fact that they were not detected as commentary links while iterating over the text.
     :param tref:
     :param user:
     :param kwargs:
     :return:
     """
     existing_links = self._load_links()
     found_links = self._build_links_internal(self._requested_oref)
     for exLink in existing_links:
         for r in exLink.refs:
             if self._title not in r:  #current base ref
                 continue
             if USE_VARNISH:
                 try:
                     invalidate_ref(Ref(r))
                 except InputError:
                     pass
             if r not in found_links:
                 self._delete_link(exLink)
             break
Example #10
0
def reset_ref(request, tref):
    """
    resets cache, versionstate, toc, varnish, & book TOC template
    :param tref:
    :return:
    """
    oref = model.Ref(tref)
    if oref.is_book_level():
        model.library.refresh_index_record_in_cache(oref.index)
        vs = model.VersionState(index=oref.index)
        vs.refresh()
        model.library.update_index_in_toc(oref.index)
        scache.delete_cache_elem(scache.generate_text_toc_cache_key(oref.index.title))
        if USE_VARNISH:
            invalidate_index(oref.index)
            invalidate_counts(oref.index)
            invalidate_ref(oref)
        return HttpResponseRedirect("/{}?m=Reset-Index".format(oref.url()))
    elif USE_VARNISH:
        invalidate_ref(oref)
        return HttpResponseRedirect("/{}?m=Reset-Ref".format(oref.url()))
    else:
        return HttpResponseRedirect("/?m=Nothing-to-Reset")
Example #11
0
 def refresh_links(self, **kwargs):
     """
     This functino both adds links and deletes pre existing ones that are no longer valid,
     by virtue of the fact that they were not detected as commentary links while iterating over the text.
     :param tref:
     :param user:
     :param kwargs:
     :return:
     """
     existing_links = self._load_links()
     found_links = self._build_links_internal(self._requested_oref)
     for exLink in existing_links:
         for r in exLink.refs:
             if self._title not in r:  #current base ref
                 continue
             if USE_VARNISH:
                 try:
                     invalidate_ref(Ref(r))
                 except InputError:
                     pass
             if r not in found_links:
                 self._delete_link(exLink)
             break
Example #12
0
def modify_text(user, oref, vtitle, lang, text, vsource=None, **kwargs):
    """
    Updates a chunk of text, identified by oref, versionTitle, and lang, and records history.
    :param user:
    :param oref:
    :param vtitle:
    :param lang:
    :param text:
    :param vsource:
    :return:
    """
    chunk = model.TextChunk(oref, lang, vtitle)
    if getattr(chunk.version(), "status", "") == "locked" and not is_user_staff(user):
        raise InputError("This text has been locked against further edits.")
    action = kwargs.get("type") or "edit" if chunk.text else "add"
    old_text = chunk.text
    chunk.text = text
    if vsource:
        chunk.versionSource = vsource  # todo: log this change
    if chunk.save():
        model.log_text(user, action, oref, lang, vtitle, old_text, text, **kwargs)
        if USE_VARNISH:
            invalidate_ref(oref, lang=lang, version=vtitle, purge=True)
            if oref.next_section_ref():
                invalidate_ref(oref.next_section_ref(), lang=lang, version=vtitle, purge=True)
            if oref.prev_section_ref():
                invalidate_ref(oref.prev_section_ref(), lang=lang, version=vtitle, purge=True)
        if not kwargs.get("skip_links", None):
            from sefaria.helper.link import add_commentary_links, add_links_from_text, rebuild_commentary_links
            # Commentaries generate links to their base text automatically
            if oref.type == "Commentary":
                rebuild_commentary_links(oref.normal(), user, **kwargs)
            # scan text for links to auto add
            add_links_from_text(oref.normal(), lang, chunk.text, chunk.full_version._id, user, **kwargs)

            if USE_VARNISH:
                for linkref in {r.section_ref() for r in oref.linkset().refs_from(oref)}:
                    invalidate_ref(linkref)

    return chunk
Example #13
0
def modify_text(user, oref, vtitle, lang, text, vsource=None, **kwargs):
    """
    Updates a chunk of text, identified by oref, versionTitle, and lang, and records history.
    :param user:
    :param oref:
    :param vtitle:
    :param lang:
    :param text:
    :param vsource:
    :return:
    """
    chunk = model.TextChunk(oref, lang, vtitle)
    if getattr(chunk.version(), "status",
               "") == "locked" and not model.user_profile.is_user_staff(user):
        raise InputError("This text has been locked against further edits.")
    action = kwargs.get("type") or "edit" if chunk.text else "add"
    old_text = chunk.text
    chunk.text = text
    if vsource:
        chunk.versionSource = vsource  # todo: log this change
    if chunk.save():
        model.log_text(user, action, oref, lang, vtitle, old_text, text,
                       **kwargs)
        if USE_VARNISH:
            invalidate_ref(oref, lang=lang, version=vtitle, purge=True)
            if oref.next_section_ref():
                invalidate_ref(oref.next_section_ref(),
                               lang=lang,
                               version=vtitle,
                               purge=True)
            if oref.prev_section_ref():
                invalidate_ref(oref.prev_section_ref(),
                               lang=lang,
                               version=vtitle,
                               purge=True)
        if not kwargs.get("skip_links", None):
            from sefaria.helper.link import add_links_from_text
            # Some commentaries can generate links to their base text automatically
            linker = oref.autolinker(user=user)
            if linker:
                linker.refresh_links(**kwargs)
            # scan text for links to auto add
            add_links_from_text(oref, lang, chunk.text, chunk.full_version._id,
                                user, **kwargs)

            if USE_VARNISH:
                invalidate_linked(oref)

    return chunk
Example #14
0
def add_links_from_text(oref, lang, text, text_id, user, **kwargs):
    """
    Scan a text for explicit references to other texts and automatically add new links between
    ref and the mentioned text.

    text["text"] may be a list of segments, an individual segment, or None.

    The set of no longer supported links (`existingLinks` - `found`) is deleted.
    If Varnish is used, all linked refs, old and new, are refreshed

    Returns `links` - the list of links added.
    """
    if not text:
        return []
    elif isinstance(text, list):
        subrefs = oref.subrefs(len(text))
        links = []
        for i in range(len(text)):
            single = add_links_from_text(subrefs[i], lang, text[i], text_id,
                                         user, **kwargs)
            links += single
        return links
    elif isinstance(text, basestring):
        """
            Keeps three lists:
            * existingLinks - The links that existed before the text was rescanned
            * found - The links found in this scan of the text
            * links - The new links added in this scan of the text

            The set of no longer supported links (`existingLinks` - `found`) is deleted.
            The set of all links (`existingLinks` + `Links`) is refreshed in Varnish.
        """
        existingLinks = LinkSet({
            "refs": oref.normal(),
            "auto": True,
            "generated_by": "add_links_from_text",
            "source_text_oid": text_id
        }).array(
        )  # Added the array here to force population, so that new links don't end up in this set

        found = []  # The normal refs of the links found in this text
        links = []  # New link objects created by this processes

        refs = library.get_refs_in_string(text, lang)

        for linked_oref in refs:
            link = {
                # Note -- ref of the citing text is in the first position
                "refs": [oref.normal(), linked_oref.normal()],
                "type": "",
                "auto": True,
                "generated_by": "add_links_from_text",
                "source_text_oid": text_id
            }
            found += [
                linked_oref.normal()
            ]  # Keep this here, since tracker.add will throw an error if the link exists
            try:
                tracker.add(user, Link, link, **kwargs)
                links += [link]
                if USE_VARNISH:
                    invalidate_ref(linked_oref)
            except InputError as e:
                pass

        # Remove existing links that are no longer supported by the text
        for exLink in existingLinks:
            for r in exLink.refs:
                if r == oref.normal():  # current base ref
                    continue
                if USE_VARNISH:
                    invalidate_ref(Ref(r))
                if r not in found:
                    tracker.delete(user, Link, exLink._id)
                break

        return links
Example #15
0
def add_commentary_links(oref, user, **kwargs):
    """
    Automatically add links for each comment in the commentary text denoted by 'tref'.
    E.g., for the ref 'Sforno on Kohelet 3:2', automatically set links for
    Kohelet 3:2 <-> Sforno on Kohelet 3:2:1, Kohelet 3:2 <-> Sforno on Kohelet 3:2:2, etc.
    for each segment of text (comment) that is in 'Sforno on Kohelet 3:2'.
    """
    try:
        text = TextFamily(oref, commentary=0, context=0, pad=False).contents()
    except AssertionError:
        logger.warning(u"Structure node passed to add_commentary_links: {}".format(oref.normal()))
        return

    assert oref.is_commentary()

    tref = oref.normal()

    base_tref = tref[tref.find(" on ") + 4:]

    if len(text["sections"]) == len(text["sectionNames"]):
        # this is a single comment, trim the last section number (comment) from ref
        base_tref = base_tref[0:base_tref.rfind(":")]
        link = {
            "refs": [base_tref, tref],
            "type": "commentary",
            "anchorText": "",
            "auto": True,
            "generated_by": "add_commentary_links"
        }
        try:
            tracker.add(user, Link, link, **kwargs)
        except DuplicateRecordError as e:
            pass

    elif len(text["sections"]) == (len(text["sectionNames"]) - 1):
        # This means that the text (and it's corresponding ref) being posted has the amount of sections like the parent text
        # (the text being commented on) so this is single group of comments on the lowest unit of the parent text.
        # and we simply iterate and create a link for each existing one to point to the same unit of parent text
        length = max(len(text["text"]), len(text["he"]))
        for i in range(length):
                link = {
                    "refs": [base_tref, tref + ":" + str(i + 1)],
                    "type": "commentary",
                    "anchorText": "",
                    "auto": True,
                    "generated_by": "add_commentary_links"
                }
                try:
                    tracker.add(user, Link, link, **kwargs)
                except DuplicateRecordError as e:
                    pass

    elif len(text["sections"]) > 0:
        # any other case where the posted ref sections do not match the length of the parent texts sections
        # this is a larger group of comments meaning it needs to be further broken down
        # in order to be able to match the commentary to the basic parent text units,
        # recur on each section
        length = max(len(text["text"]), len(text["he"]))
        for r in oref.subrefs(length):
            add_commentary_links(r, user, **kwargs)

    else:
        #This is a special case of the above, where the sections length is 0 and that means this is
        # a whole text that has been posted. For  this we need a better way than get_text() to get the correct length of
        # highest order section counts.
        # We use the counts document for that.
        #text_counts = counts.count_texts(tref)
        #length = len(text_counts["counts"])

        sn = StateNode(tref)
        if not sn.versionState.is_new_state:
            sn.versionState.refresh()  # Needed when saving multiple nodes in a complex text.  This may be moderately inefficient.
            sn = StateNode(tref)
        length = sn.ja('all').length()
        for r in oref.subrefs(length):
            add_commentary_links(r, user, **kwargs)

        if USE_VARNISH:
            invalidate_ref(oref)
            invalidate_ref(Ref(base_tref))
Example #16
0
def add_links_from_text(ref, lang, text, text_id, user, **kwargs):
    """
    Scan a text for explicit references to other texts and automatically add new links between
    ref and the mentioned text.

    text["text"] may be a list of segments, an individual segment, or None.

    The set of no longer supported links (`existingLinks` - `found`) is deleted.
    If Varnish is used, all linked refs, old and new, are refreshed

    Returns `links` - the list of links added.
    """
    if not text:
        return []
    elif isinstance(text, list):
        oref    = Ref(ref)
        subrefs = oref.subrefs(len(text))
        links   = []
        for i in range(len(text)):
            single = add_links_from_text(subrefs[i].normal(), lang, text[i], text_id, user, **kwargs)
            links += single
        return links
    elif isinstance(text, basestring):
        """
            Keeps three lists:
            * existingLinks - The links that existed before the text was rescanned
            * found - The links found in this scan of the text
            * links - The new links added in this scan of the text

            The set of no longer supported links (`existingLinks` - `found`) is deleted.
            The set of all links (`existingLinks` + `Links`) is refreshed in Varnish.
        """
        existingLinks = LinkSet({
            "refs": ref,
            "auto": True,
            "generated_by": "add_links_from_text",
            "source_text_oid": text_id
        }).array()  # Added the array here to force population, so that new links don't end up in this set

        found = []  # The normal refs of the links found in this text
        links = []  # New link objects created by this processes

        refs = library.get_refs_in_string(text, lang)

        for oref in refs:
            link = {
                # Note -- ref of the citing text is in the first position
                "refs": [ref, oref.normal()],
                "type": "",
                "auto": True,
                "generated_by": "add_links_from_text",
                "source_text_oid": text_id
            }
            found += [oref.normal()]  # Keep this here, since tracker.add will throw an error if the link exists
            try:
                tracker.add(user, Link, link, **kwargs)
                links += [link]
                if USE_VARNISH:
                    invalidate_ref(oref)
            except InputError as e:
                pass

        # Remove existing links that are no longer supported by the text
        for exLink in existingLinks:
            for r in exLink.refs:
                if r == ref:  # current base ref
                    continue
                if USE_VARNISH:
                    invalidate_ref(Ref(r))
                if r not in found:
                    tracker.delete(user, Link, exLink._id)
                break

        return links