Пример #1
0
def search_advising_notes(
    search_phrase,
    author_csid=None,
    student_csid=None,
    topic=None,
    datetime_from=None,
    datetime_to=None,
    offset=0,
    limit=20,
):

    benchmark = get_benchmarker('search_advising_notes')
    benchmark('begin')

    if search_phrase:
        search_terms = [
            t.group(0)
            for t in list(re.finditer(NOTE_SEARCH_PATTERN, search_phrase)) if t
        ]
        search_phrase = ' & '.join(search_terms)
    else:
        search_terms = []

    # TODO We're currently retrieving all results for the sake of subsequent offset calculations. As the number of notes in
    # BOA grows (and possibly requires us to use some kind of staging table for search indexing), we'll need to revisit.
    benchmark('begin local notes query')
    author_uid = get_uid_for_csid(app, author_csid) if author_csid else None
    local_results = Note.search(
        search_phrase=search_phrase,
        author_uid=author_uid,
        student_csid=student_csid,
        topic=topic,
        datetime_from=datetime_from,
        datetime_to=datetime_to,
    )
    benchmark('end local notes query')
    local_notes_count = len(local_results)
    cutoff = min(local_notes_count, offset + limit)

    benchmark('begin local notes parsing')
    notes_feed = _get_local_notes_search_results(local_results[offset:cutoff],
                                                 search_terms)
    benchmark('end local notes parsing')

    if len(notes_feed) == limit:
        return notes_feed

    benchmark('begin loch notes query')
    loch_results = data_loch.search_advising_notes(
        search_phrase=search_phrase,
        author_uid=author_uid,
        author_csid=author_csid,
        student_csid=student_csid,
        topic=topic,
        datetime_from=datetime_from,
        datetime_to=datetime_to,
        offset=max(0, offset - local_notes_count),
        limit=(limit - len(notes_feed)),
    )
    benchmark('end loch notes query')

    benchmark('begin loch notes parsing')
    notes_feed += _get_loch_notes_search_results(loch_results, search_terms)
    benchmark('end loch notes parsing')

    return notes_feed
Пример #2
0
def search_advising_notes(
    search_phrase,
    author_csid=None,
    author_uid=None,
    student_csid=None,
    topic=None,
    datetime_from=None,
    datetime_to=None,
    offset=0,
    limit=20,
):
    benchmark = get_benchmarker('search_advising_notes')
    benchmark('begin')

    if search_phrase:
        search_terms = list({
            t.group(0)
            for t in list(re.finditer(TEXT_SEARCH_PATTERN, search_phrase)) if t
        })
        search_phrase = ' & '.join(search_terms)
    else:
        search_terms = []

    author_uid = get_uid_for_csid(
        app, author_csid) if (not author_uid and author_csid) else author_uid

    # TODO We're currently retrieving all results for the sake of subsequent offset calculations. As the number of notes in
    # BOA grows (and possibly requires us to use some kind of staging table for search indexing), we'll need to revisit.
    benchmark('begin local notes query')
    local_results = Note.search(
        search_phrase=search_phrase,
        author_uid=author_uid,
        student_csid=student_csid,
        topic=topic,
        datetime_from=datetime_from,
        datetime_to=datetime_to,
    )
    benchmark('end local notes query')

    benchmark('begin local notes parsing')
    # Our offset calculations are unforuntately fussy because note parsing might reveal notes associated with students no
    # longer in BOA, which we won't include in the feed; so we don't actually know the length of our result set until parsing
    # is complete.
    cutoff = min(len(local_results), offset + limit)
    notes_feed = _get_local_notes_search_results(local_results, cutoff,
                                                 search_terms)
    local_notes_count = len(notes_feed)
    notes_feed = notes_feed[offset:]

    benchmark('end local notes parsing')

    if len(notes_feed) == limit:
        return notes_feed

    benchmark('begin loch notes query')
    loch_results = data_loch.search_advising_notes(
        search_phrase=search_phrase,
        author_uid=author_uid,
        author_csid=author_csid,
        student_csid=student_csid,
        topic=topic,
        datetime_from=datetime_from,
        datetime_to=datetime_to,
        offset=max(0, offset - local_notes_count),
        limit=(limit - len(notes_feed)),
    )
    benchmark('end loch notes query')

    benchmark('begin loch notes parsing')
    notes_feed += _get_loch_notes_search_results(loch_results, search_terms)
    benchmark('end loch notes parsing')

    return notes_feed