Пример #1
0
def ajax_search(request, taskid=-1):
    """
    David's crummy AJAX search implementation.
    Actually, it's not that crummy at all.
    """
    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

        return HttpResponseRedirect('/treconomics/searcha/')

    # Has the experiment timed out? If so, indicate to the user.
    # Send a JSON object back which will be interpreted by the JavaScript.
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')
    else:
        context = RequestContext(request)
        context_dict = {}

        context_dict['ajax_enabled'] = True
        context_dict['application_root'] = '/treconomics/'
        context_dict['ajax_search_url'] = 'searcha/'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = context_dict['application_root'] + 'searcha/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        # Gather the usual suspects...
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        context_dict['participant'] = uname
        context_dict['task'] = taskid
        context_dict['condition'] = condition
        context_dict['interface'] = interface
        context_dict['autocomplete'] = experiment_setups[condition].autocomplete
        context_dict['is_fast'] = 'true' if experiment_setups[condition].delay_results == 0 else 'false'

        if request.method == 'POST':
            # AJAX POST request for a given query.
            # Returns a AJAX response with the document list to populate the container <DIV>.

            # Should we do a delay? This is true when a user navigates back to the results page from elsewhere.
            do_delay = bool(request.POST.get('noDelay'))

            if interface == 1:
                querystring = request.POST.copy()
                del querystring['csrfmiddlewaretoken']
                request.session['last_ajax_interface1_querystring'] = querystring

                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST.get('query').strip()

            if not do_delay:  # Do not log the query issued event if the user is returning to the results page.
                log_event(event="QUERY_ISSUED", request=request, query=user_query)

            page_request = request.POST.get('page')

            if page_request:
                page = int(page_request)

            if user_query == "":
                # Nothing to query, tell the client.
                return HttpResponse(json.dumps({'no_results': True}), content_type='application/json')
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(request,
                                           page,
                                           page_len,
                                           condition,
                                           user_query,
                                           request.POST.get('noperf'),
                                           experiment_setups[ec['condition']].engine)

                #  Caching is now handled by WhooshTrecNews
                #if not request.POST.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                queryurl = context_dict['application_root'] + context_dict['ajax_search_url'] + '#query=' + user_query.replace(' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                print "Delay time - query execution time: {0}".format(experiment_setups[condition].delay_results - result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (experiment_setups[condition].delay_results - result_dict['query_time'] > 0) and not do_delay:
                    log_event(event='DELAY_RESULTS_PAGE', request=request, page=page)
                    sleep(experiment_setups[condition].delay_results - result_dict['query_time'])  # Delay search results.

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict['display_query'] = result_dict['query'][0:50] + '...'

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                set_results_session_var(request, result_dict)

                # Serialis(z?)e the data structure and send it back
                #if not do_delay:  # Only log the following if the user is not returning back to the results page.
                log_event(event='VIEW_SEARCH_RESULTS_PAGE', request=request, page=page)
                request.session['last_request_time'] = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
                return HttpResponse(json.dumps(result_dict), content_type='application/json')
        else:
            # Render the search template as usual...
            log_event(event="VIEW_SEARCH_BOX", request=request, page=page)
            context_dict['delay_results'] = experiment_setups[condition].delay_results
            context_dict['delay_docview'] = experiment_setups[condition].delay_docview
            return render_to_response('trecdo/search.html', context_dict, context)
Пример #2
0
def ajax_search(request, taskid=-1):
    """
    David's crummy AJAX search implementation.
    Actually, it's not that crummy at all.
    """
    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

        return HttpResponseRedirect('/treconomics/searcha/')

    # Has the experiment timed out? If so, indicate to the user.
    # Send a JSON object back which will be interpreted by the JavaScript.
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')
    else:
        context = RequestContext(request)
        context_dict = {}

        context_dict['ajax_enabled'] = True
        context_dict['application_root'] = '/treconomics/'
        context_dict['ajax_search_url'] = 'searcha/'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = context_dict['application_root'] + 'searcha/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        # Gather the usual suspects...
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        context_dict['participant'] = uname
        context_dict['task'] = taskid
        context_dict['condition'] = condition
        context_dict['interface'] = interface
        context_dict['autocomplete'] = experiment_setups[
            condition].autocomplete
        context_dict['is_fast'] = 'true' if experiment_setups[
            condition].delay_results == 0 else 'false'

        if request.method == 'POST':
            # AJAX POST request for a given query.
            # Returns a AJAX response with the document list to populate the container <DIV>.

            # Should we do a delay? This is true when a user navigates back to the results page from elsewhere.
            do_delay = bool(request.POST.get('noDelay'))

            if interface == 1:
                querystring = request.POST.copy()
                del querystring['csrfmiddlewaretoken']
                request.session[
                    'last_ajax_interface1_querystring'] = querystring

                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST.get('query').strip()

            if not do_delay:  # Do not log the query issued event if the user is returning to the results page.
                log_event(event="QUERY_ISSUED",
                          request=request,
                          query=user_query)

            page_request = request.POST.get('page')

            if page_request:
                page = int(page_request)

            if user_query == "":
                # Nothing to query, tell the client.
                return HttpResponse(json.dumps({'no_results': True}),
                                    content_type='application/json')
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(
                    request, page, page_len, condition, user_query,
                    request.POST.get('noperf'),
                    experiment_setups[ec['condition']].engine)

                #  Caching is now handled by WhooshTrecNews
                #if not request.POST.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                queryurl = context_dict['application_root'] + context_dict[
                    'ajax_search_url'] + '#query=' + user_query.replace(
                        ' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                print "Delay time - query execution time: {0}".format(
                    experiment_setups[condition].delay_results -
                    result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (
                        experiment_setups[condition].delay_results -
                        result_dict['query_time'] > 0) and not do_delay:
                    log_event(event='DELAY_RESULTS_PAGE',
                              request=request,
                              page=page)
                    sleep(experiment_setups[condition].delay_results -
                          result_dict['query_time'])  # Delay search results.

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict[
                        'display_query'] = result_dict['query'][0:50] + '...'

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(
                        result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                set_results_session_var(request, result_dict)

                # Serialis(z?)e the data structure and send it back
                #if not do_delay:  # Only log the following if the user is not returning back to the results page.
                log_event(event='VIEW_SEARCH_RESULTS_PAGE',
                          request=request,
                          page=page)
                request.session[
                    'last_request_time'] = datetime.datetime.utcnow().strftime(
                        '%Y-%m-%d %H:%M:%S.%f')
                return HttpResponse(json.dumps(result_dict),
                                    content_type='application/json')
        else:
            # Render the search template as usual...
            log_event(event="VIEW_SEARCH_BOX", request=request, page=page)
            context_dict['delay_results'] = experiment_setups[
                condition].delay_results
            context_dict['delay_docview'] = experiment_setups[
                condition].delay_docview
            return render_to_response('trecdo/search.html', context_dict,
                                      context)
Пример #3
0
def search(request, taskid=-1):

    def is_from_search_request(new_page_no):
        """
        Returns True iif the URL of the referer is a standard search request.
        This is used to determine if we should delay results appearing.

        The new page number of required to check against the page number from the referer.
        If they match, we don't delay - if they don't, we do.
        """
        http_referer = request.META['HTTP_REFERER']
        http_referer = http_referer.strip().split('&')
        page = 1

        for item in http_referer:
            if 'page=' in item:
                item = item.split('=')
                page = int(item[1])

        if request.POST.get('newquery') == 'true':
            return '/treconomics/search/' in request.META['HTTP_REFERER']

        return '/treconomics/search/' in request.META['HTTP_REFERER'] and new_page_no == page

    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')
    else:
        """show base index view"""
        context = RequestContext(request)
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        result_dict = {}
        result_dict['participant'] = uname
        result_dict['task'] = taskid
        result_dict['condition'] = condition
        result_dict['interface'] = interface
        result_dict['application_root'] = '/treconomics/'
        result_dict['ajax_search_url'] = 'searcha/'
        result_dict['autocomplete'] = experiment_setups[condition].autocomplete
        result_dict['is_fast'] = 'true'

        if experiment_setups[condition].delay_results == 0:
            result_dict['is_fast'] = 'false'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = result_dict['application_root'] + 'search/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        suggestions = False
        query_flag = False
        if request.method =='POST':
            # handle the searches from the different interfaces
            if interface == 1:
                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST['query'].strip()
            log_event(event="QUERY_ISSUED", request=request, query=user_query)
            query_flag = True
            result_dict['page'] = page
        elif request.method == 'GET':
            getdict = request.GET
            if 'query' in getdict:
                user_query = getdict['query']
                query_flag = True
            if 'suggestion' in getdict:
                suggestions = True
            if suggestions:
                log_event(event="QUERY_SUGGESTION_ISSUED", request=request, query=user_query)

            if 'page' in getdict:
                page = int(getdict['page'])
            else:
                page = 1

        if query_flag:
            # If the user poses a blank query, we just send back a results page saying so.
            if user_query == '':
                result_dict['blank_query'] = True
                return render_to_response('trecdo/results.html', result_dict, context)
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(request,
                                          page,
                                          page_len,
                                          condition,
                                          user_query,
                                          request.GET.get('noperf'),
                                          experiment_setups[ec['condition']].engine)

                #  Caching is now handled in WhooshTrecNews
                #if not request.GET.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                result_dict['participant'] = uname
                result_dict['task'] = taskid
                result_dict['condition'] = condition
                result_dict['interface'] = interface
                result_dict['application_root'] = '/treconomics/'
                result_dict['ajax_search_url'] = 'searcha/'
                result_dict['autocomplete'] = experiment_setups[condition].autocomplete
                result_dict['page'] = page
                result_dict['is_fast'] = 'true'
                result_dict['focus_querybox'] = 'false'

                if result_dict['trec_results'] is None:
                    result_dict['focus_querybox'] = 'true'

                if experiment_setups[condition].delay_results == 0:
                    result_dict['is_fast'] = 'false'

                if interface == 3:
                        # getQuerySuggestions(topic_num)
                        suggestions = TopicQuerySuggestion.objects.filter(topic_num=topic_num)
                        if suggestions:
                            result_dict['query_suggest_search'] = True
                            entries = []
                            for s in suggestions:
                                entries.append({'title': s.title, 'link': s.link})
                            print entries
                            result_dict['query_suggest_results'] = entries
                        # addSuggestions to results dictionary

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                result_dict['delay_results'] = experiment_setups[condition].delay_results
                result_dict['delay_docview'] = experiment_setups[condition].delay_docview

                queryurl = '/treconomics/search/?query=' + user_query.replace(' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict['display_query'] = result_dict['query'][0:50] + '...'

                print "Delay time - query execution time: {0}".format(experiment_setups[condition].delay_results - result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (experiment_setups[condition].delay_results - result_dict['query_time'] > 0) and is_from_search_request(page):
                    log_event(event='DELAY_RESULTS_PAGE', request=request, page=page)
                    sleep(experiment_setups[condition].delay_results - result_dict['query_time'])  # Delay search results.

                set_results_session_var(request, result_dict)

                log_event(event='VIEW_SEARCH_RESULTS_PAGE', request=request, page=page)
                request.session['last_request_time'] = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
                return render_to_response('trecdo/results.html', result_dict, context)
        else:
            log_event(event='VIEW_SEARCH_BOX', request=request, page=page)
            result_dict['delay_results'] = experiment_setups[condition].delay_results
            result_dict['delay_docview'] = experiment_setups[condition].delay_docview
            return render_to_response('trecdo/search.html', result_dict, context)
Пример #4
0
def search(request, taskid=-1):
    def is_from_search_request(new_page_no):
        """
        Returns True iif the URL of the referer is a standard search request.
        This is used to determine if we should delay results appearing.

        The new page number of required to check against the page number from the referer.
        If they match, we don't delay - if they don't, we do.
        """
        http_referer = request.META['HTTP_REFERER']
        http_referer = http_referer.strip().split('&')
        page = 1

        for item in http_referer:
            if 'page=' in item:
                item = item.split('=')
                page = int(item[1])

        if request.POST.get('newquery') == 'true':
            return '/treconomics/search/' in request.META['HTTP_REFERER']

        return '/treconomics/search/' in request.META[
            'HTTP_REFERER'] and new_page_no == page

    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')
    else:
        """show base index view"""
        context = RequestContext(request)
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        result_dict = {}
        result_dict['participant'] = uname
        result_dict['task'] = taskid
        result_dict['condition'] = condition
        result_dict['interface'] = interface
        result_dict['application_root'] = '/treconomics/'
        result_dict['ajax_search_url'] = 'searcha/'
        result_dict['autocomplete'] = experiment_setups[condition].autocomplete
        result_dict['is_fast'] = 'true'

        if experiment_setups[condition].delay_results == 0:
            result_dict['is_fast'] = 'false'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = result_dict['application_root'] + 'search/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        suggestions = False
        query_flag = False
        if request.method == 'POST':
            # handle the searches from the different interfaces
            if interface == 1:
                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST['query'].strip()
            log_event(event="QUERY_ISSUED", request=request, query=user_query)
            query_flag = True
            result_dict['page'] = page
        elif request.method == 'GET':
            getdict = request.GET
            if 'query' in getdict:
                user_query = getdict['query']
                query_flag = True
            if 'suggestion' in getdict:
                suggestions = True
            if suggestions:
                log_event(event="QUERY_SUGGESTION_ISSUED",
                          request=request,
                          query=user_query)

            if 'page' in getdict:
                page = int(getdict['page'])
            else:
                page = 1

        if query_flag:
            # If the user poses a blank query, we just send back a results page saying so.
            if user_query == '':
                result_dict['blank_query'] = True
                return render_to_response('trecdo/results.html', result_dict,
                                          context)
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(
                    request, page, page_len, condition, user_query,
                    request.GET.get('noperf'),
                    experiment_setups[ec['condition']].engine)

                #  Caching is now handled in WhooshTrecNews
                #if not request.GET.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                result_dict['participant'] = uname
                result_dict['task'] = taskid
                result_dict['condition'] = condition
                result_dict['interface'] = interface
                result_dict['application_root'] = '/treconomics/'
                result_dict['ajax_search_url'] = 'searcha/'
                result_dict['autocomplete'] = experiment_setups[
                    condition].autocomplete
                result_dict['page'] = page
                result_dict['is_fast'] = 'true'
                result_dict['focus_querybox'] = 'false'

                if result_dict['trec_results'] is None:
                    result_dict['focus_querybox'] = 'true'

                if experiment_setups[condition].delay_results == 0:
                    result_dict['is_fast'] = 'false'

                if interface == 3:
                    # getQuerySuggestions(topic_num)
                    suggestions = TopicQuerySuggestion.objects.filter(
                        topic_num=topic_num)
                    if suggestions:
                        result_dict['query_suggest_search'] = True
                        entries = []
                        for s in suggestions:
                            entries.append({'title': s.title, 'link': s.link})
                        print entries
                        result_dict['query_suggest_results'] = entries
                    # addSuggestions to results dictionary

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(
                        result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                result_dict['delay_results'] = experiment_setups[
                    condition].delay_results
                result_dict['delay_docview'] = experiment_setups[
                    condition].delay_docview

                queryurl = '/treconomics/search/?query=' + user_query.replace(
                    ' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict[
                        'display_query'] = result_dict['query'][0:50] + '...'

                print "Delay time - query execution time: {0}".format(
                    experiment_setups[condition].delay_results -
                    result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (
                        experiment_setups[condition].delay_results -
                        result_dict['query_time'] >
                        0) and is_from_search_request(page):
                    log_event(event='DELAY_RESULTS_PAGE',
                              request=request,
                              page=page)
                    sleep(experiment_setups[condition].delay_results -
                          result_dict['query_time'])  # Delay search results.

                set_results_session_var(request, result_dict)

                log_event(event='VIEW_SEARCH_RESULTS_PAGE',
                          request=request,
                          page=page)
                request.session[
                    'last_request_time'] = datetime.datetime.utcnow().strftime(
                        '%Y-%m-%d %H:%M:%S.%f')
                return render_to_response('trecdo/results.html', result_dict,
                                          context)
        else:
            log_event(event='VIEW_SEARCH_BOX', request=request, page=page)
            result_dict['delay_results'] = experiment_setups[
                condition].delay_results
            result_dict['delay_docview'] = experiment_setups[
                condition].delay_docview
            return render_to_response('trecdo/search.html', result_dict,
                                      context)