Exemplo n.º 1
0
def site_index(request):
    "Site home page.  Currently includes browse letter links."

    return render_to_response('content/site_index.html', {
                'letters': title_letters(),
                 # images rotating home page banner
                'banner': BANNER_IMAGES,
            }, context_instance=RequestContext(request))
Exemplo n.º 2
0
def site_index(request):
    "Site home page.  Currently includes browse letter links."

    return render(request, 'content/site_index.html', {
                'letters': title_letters(),
                 # images rotating home page banner
                'banner': BANNER_IMAGES,
            })
Exemplo n.º 3
0
def titles_by_letter(request, letter):
    """Paginated list of finding aids by first letter in list title.
    Includes list of browse first-letters as in :meth:`browse_titles`.
    """

    # set last browse letter and page in session
    page = request.REQUEST.get('page', 1)
    last_search = "%s?page=%s" % (reverse("fa:titles-by-letter", kwargs={'letter': letter}), page)
    last_search = {"url": last_search, "txt": "Return to Browse Results"}
    request.session['last_search'] = last_search
    request.session.set_expiry(0)  # set to expire when browser closes

    # using ~ to do case-insensitive ordering
    fa = FindingAid.objects.filter(list_title__startswith=letter).order_by('~list_title').only(*fa_listfields)
    fa_subset, paginator = paginate_queryset(request, fa, per_page=10, orphans=5)
    page_labels = alpha_pagelabels(paginator, fa, label_attribute='list_title')
    # No longer restricting the number of page labels shown using pages_to_show (like we do for numeric pages).
    # That doesn't make sense here, since the alpha range labels should ideally allow anyone to jump directly
    # to the section they want based on the labels.

    response_context = {
        'findingaids': fa_subset,
        'querytime': [fa.queryTime()],
        'letters': title_letters(),
        'current_letter': letter,
        'show_pages': page_labels,
    }
    if page_labels:
        response_context['title_range'] = page_labels[fa_subset.number]
         # current page range label is displayed in the title bar of the browser to indicate which page you are on
         #example: (Cua - Cut)

    # no special first/last page label is required, since we are displaying all labels (not limiting to 9)

    return render_to_response('fa/titles_list.html',
                              response_context,
                              context_instance=RequestContext(request))
Exemplo n.º 4
0
def browse_titles(request):
    "List all first letters in finding aid list title, with a link to browse by letter."
    return render(request, 'fa/browse_letters.html',
                  {'letters': title_letters()})
Exemplo n.º 5
0
def site_index(request):
    "Site home page.  Currently includes browse letter links."
    return render(request, 'fa/index.html', {'letters': title_letters()})
Exemplo n.º 6
0
    def handle(self, mode, *args, **options):
        verbosity = int(options['verbosity'])    # 1 = normal, 0 = minimal, 2 = all
        v_normal = 1
        v_all = 2

        # if cmd not in self._args:
        #     print "Command '%s' not recognized\n" % cmd
        #     print self.help
        #     return


        # BROWSE
        if mode == 'browse':
            first_letters = title_letters()

            if not options['pages_only']:
                # get eXist query times without page load
                if verbosity == v_all:
                    print 'Testing response times for browse xqueries'

                query_times = {}
                # eXist query times only (without page rendering / content returned)
                for letter in first_letters:
                    # same query used in browse view
                    fa = FindingAid.objects.filter(list_title__startswith=letter).order_by('list_title').only(*fa_listfields)
                    time, total = fa.queryTime(), fa.count()
                    query_times[letter] = time
                    if verbosity >= v_normal:
                        print '%s : %dms, %d records' % (letter, time, total)
                    if fa.queryTime() > self.threshold:
                        print "Warning: query for %s took %dms and returned %d records" % \
                            (letter, time, total)

                max_min_avg(query_times.values())

            if not options['xquery_only']:
                if verbosity == v_all:
                    print 'Testing response times for browse pages'

                client = Client()
                query_times = {}
                for letter in first_letters:
                    current_times = {}  # times for the current letter
                    uri = reverse('fa:titles-by-letter', kwargs={'letter': letter})
                    if verbosity == v_all:
                        print letter
                    for page in range(1, 11):
                        start_time = datetime.now()
                        response = client.get(uri, {'page': page})
                        end_time = datetime.now()
                        if response.status_code == 200:
                            duration = end_time - start_time
                            current_times['%s %d' % (letter, page)] = duration
                            if duration > self.timedelta_threshold:
                                print "Warning: page load for page %d of %s (%s) took %s" % \
                                    (page, letter, uri, duration)
                            if verbosity == v_all:
                                print "  page %d : %s" % (page, duration)

                    if verbosity >= v_normal and len(current_times) > 1:
                        # summarize times for current letter
                        print "\nMax/Min/Average for %s (all pages)" % letter
                        max_min_avg(current_times.values(), zero=timedelta())
                    # add times for current letter to all query times
                    query_times.update(current_times)

                print "\nMax/Min/Average - all letters, all pages"
                max_min_avg(query_times.values(), zero=timedelta())

        # SEARCH
        elif mode == 'search':
            client = Client()
            query_times = {}

            if not options['pages_only']:
                # get eXist query times without page load
                if verbosity == v_all:
                    print 'Testing response times for search xqueries'
                for search_terms in self.test_searches:
                    # NOTE: search syntax duplicated from search view
                    search_fields = fa_listfields
                    search_fields.append('fulltext_score')
                    fa = FindingAid.objects.filter(fulltext_terms=search_terms).or_filter(
                        fulltext_terms=search_terms,
                        boostfields__fulltext_terms=search_terms,
                    ).order_by('-fulltext_score').only(*search_fields)

                    time, total = fa.queryTime(), fa.count()
                    query_times[search_terms] = time
                    if verbosity >= v_normal:
                        print '%s : %dms, %d records' % (search_terms, time, total)
                    if fa.queryTime() > self.threshold:
                        print "Warning: query for %s took %dms and returned %d records" % \
                            (letter, time, total)

                print "\nMax/Min/Average - search queries, eXist response time"
                max_min_avg(query_times.values())

            if not options['xquery_only']:
                if verbosity == v_all:
                    print 'Testing response times for search pages'

                query_times = {}
                for search_terms in self.test_searches:
                    current_times = {}  # times for the current search
                    uri = reverse('fa:search')
                    if verbosity == v_all:
                        print search_terms
                    for page in range(1,11):
                        start_time = datetime.now()
                        response = client.get(uri, {'page': page, 'keywords': search_terms})
                        end_time = datetime.now()
                        if response.status_code == 200:
                            duration = end_time - start_time
                            current_times['%s %d' % (search_terms, page)] = duration
                            if duration > self.timedelta_threshold:
                                print "Warning: page load for page %d of %s (%s) took %s" % \
                                    (page, search_terms, uri, duration)
                            if verbosity == v_all:
                                print "  page %d : %s" % (page, duration)


                    # summarize times for current search
                    print "\nMax/Min/Average for %s (all pages)" % search_terms
                    max_min_avg(current_times.values(), zero=timedelta())
                    # add times for current letter to all query times
                    query_times.update(current_times)

                print "\nMax/Min/Average - all letters, all pages"
                max_min_avg(query_times.values(), zero=timedelta())

        # PAGES
        elif mode == 'pages':
            client = Client()
            query_times = {}

            if verbosity == v_all:
                print 'Testing response times for pre-specified pages'

            query_times = {}
            for uri in self.test_pages:
                start_time = datetime.now()
                response = client.get(uri)
                end_time = datetime.now()
                if response.status_code == 200:
                    duration = end_time - start_time
                    query_times[uri] = duration
                    if duration > self.timedelta_threshold:
                        print "Warning: page load for %s took %s" % \
                            (uri, duration)
                    if verbosity == v_all:
                        print "%s : %s" % (uri, duration)

            print "\nMax/Min/Average - all letters, all pages"
            max_min_avg(query_times.values(), zero=timedelta())
Exemplo n.º 7
0
def browse_titles(request):
    "List all first letters in finding aid list title, with a link to browse by letter."
    return render_to_response('fa/browse_letters.html',
                              {'letters': title_letters()},
                              context_instance=RequestContext(request))
Exemplo n.º 8
0
def site_index(request):
    "Site home page.  Currently includes browse letter links."
    return render_to_response('fa/index.html', {'letters': title_letters()},
                              context_instance=RequestContext(request))