Example #1
0
def view_log_query_focus(request):
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    context = RequestContext(request)
    log_event(event='QUERY_FOCUS', request=request)
    return HttpResponse(1)
Example #2
0
def view_log_query_focus(request):
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    context = RequestContext(request)
    log_event(event='QUERY_FOCUS', request=request)
    return HttpResponse(1)
Example #3
0
def suggestion_selected(request):
    """
    Called when a suggestion is selected from the suggestion interface.
    Logs the suggestion being selected.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    new_query = request.GET.get('new_query')
    log_event(event='AUTOCOMPLETE_QUERY_SELECTED', query=new_query, request=request)
    return HttpResponse(json.dumps({'logged': True}), content_type='application/json')
Example #4
0
def suggestion_hover(request):
    """
    Called when a user hovers over a query suggestion.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    suggestion = request.GET.get('suggestion')
    rank = int(request.GET.get('rank'))

    log_event(event='AUTOCOMPLETE_QUERY_HOVER', query=suggestion, rank=rank, request=request)
    return HttpResponse(json.dumps({'logged': True}), content_type='application/json')
Example #5
0
def view_log_hover(request):
    """
    View which logs a user hovering over a search result.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    ec = get_experiment_context(request)

    uname = ec['username']
    taskid = ec['taskid']
    u = User.objects.get(username=uname)

    status = request.GET.get('status')
    rank = request.GET.get('rank')
    page = request.GET.get('page')
    trec_id = request.GET.get('trecID')
    whoosh_id = request.GET.get('whooshID')
    doc_length = ixr.doc_field_length(long(whoosh_id), 'content')

    try:
        examined = DocumentsExamined.objects.get(user=u,
                                                 task=taskid,
                                                 doc_num=trec_id)
        judgement = examined.judgement
    except ObjectDoesNotExist:
        judgement = -2

    if status == 'in':
        log_event(event="DOCUMENT_HOVER_IN",
                  request=request,
                  whooshid=whoosh_id,
                  trecid=trec_id,
                  rank=rank,
                  page=page,
                  judgement=judgement,
                  doc_length=doc_length)
    elif status == 'out':
        log_event(event="DOCUMENT_HOVER_OUT",
                  request=request,
                  whooshid=whoosh_id,
                  trecid=trec_id,
                  rank=rank,
                  page=page,
                  judgement=judgement,
                  doc_length=doc_length)

    return HttpResponse(json.dumps({'logged': True}),
                        content_type='application/json')
Example #6
0
def show_saved_documents(request):
    context = RequestContext(request)

    # Timed out?
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')

    ec = get_experiment_context(request)
    taskid = ec['taskid']
    condition = ec['condition']
    uname = ec['username']
    current_search = request.session['queryurl']

    user_judgement = -2
    if request.method == 'GET':
        getdict = request.GET

        if 'judge' not in getdict and 'docid' not in getdict:
            # Log only if user is entering the page, not after clicking a relevant button
            print "LOG_VIEW_SAVED_DOCS"
            log_event(event="VIEW_SAVED_DOCS", request=request)

        if 'judge' in getdict:
            user_judgement = int(getdict['judge'])
        if 'docid' in getdict:
            docid = int(getdict['docid'])
        if (user_judgement > -2) and (docid > -1):
            #updates the judgement for this document
            doc_length = ixr.doc_field_length(docid, 'content')
            trecid = ixr.stored_fields(docid)['docid']

            user_judgement = mark_document(request=request,
                                           whooshid=docid,
                                           trecid=trecid,
                                           judgement=user_judgement,
                                           doc_length=doc_length)

    # Get documents that are for this task, and for this user
    u = User.objects.get(username=uname)
    docs = DocumentsExamined.objects.filter(user=u).filter(task=taskid)
    return render_to_response(
        'trecdo/saved_documents.html', {
            'participant': uname,
            'task': taskid,
            'condition': condition,
            'current_search': current_search,
            'docs': docs
        }, context)
Example #7
0
def suggestion_selected(request):
    """
    Called when a suggestion is selected from the suggestion interface.
    Logs the suggestion being selected.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    new_query = request.GET.get('new_query')
    log_event(event='AUTOCOMPLETE_QUERY_SELECTED',
              query=new_query,
              request=request)
    return HttpResponse(json.dumps({'logged': True}),
                        content_type='application/json')
Example #8
0
def view_log_hover(request):
    """
    View which logs a user hovering over a search result.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    ec = get_experiment_context(request)

    uname = ec['username']
    taskid = ec['taskid']
    u = User.objects.get(username=uname)

    status = request.GET.get('status')
    rank = request.GET.get('rank')
    page = request.GET.get('page')
    trec_id = request.GET.get('trecID')
    whoosh_id = request.GET.get('whooshID')
    doc_length = ixr.doc_field_length(long(whoosh_id), 'content')

    try:
        examined = DocumentsExamined.objects.get(user=u, task=taskid, doc_num=trec_id)
        judgement = examined.judgement
    except ObjectDoesNotExist:
        judgement = -2

    if status == 'in':
        log_event(event="DOCUMENT_HOVER_IN",
                  request=request,
                  whooshid=whoosh_id,
                  trecid=trec_id,
                  rank=rank,
                  page=page,
                  judgement=judgement,
                  doc_length=doc_length)
    elif status == 'out':
        log_event(event="DOCUMENT_HOVER_OUT",
                  request=request,
                  whooshid=whoosh_id,
                  trecid=trec_id,
                  rank=rank,
                  page=page,
                  judgement=judgement,
                  doc_length=doc_length)

    return HttpResponse(json.dumps({'logged': True}), content_type='application/json')
Example #9
0
def suggestion_hover(request):
    """
    Called when a user hovers over a query suggestion.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    suggestion = request.GET.get('suggestion')
    rank = int(request.GET.get('rank'))

    log_event(event='AUTOCOMPLETE_QUERY_HOVER',
              query=suggestion,
              rank=rank,
              request=request)
    return HttpResponse(json.dumps({'logged': True}),
                        content_type='application/json')
Example #10
0
def autocomplete_suggestion(request):
    """
    Handles the autocomplete suggestion service.
    """
    # Get the condition from the user's experiment context.
    # This will yield us access to the autocomplete trie!
    ec = get_experiment_context(request)
    condition = ec['condition']

    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    if request.GET.get('suggest'):
        results = []

        if experiment_setups[condition].autocomplete:
            chars = unicode(request.GET.get('suggest'))

            # See if the cache has what we are looking for.
            # If it does, pull it out and use that.
            # If it doesn't, query the trie and store the results in the cache before returning.
            autocomplete_cache = cache.get_cache('autocomplete')
            results = autocomplete_cache.get(chars)

            if not results:
                suggestion_trie = experiment_setups[condition].get_trie()
                results = suggestion_trie.suggest(chars)
                cache_time = 300

                autocomplete_cache.set(chars, results, cache_time)

        response_data = {
            'count': len(results),
            'results': results,
        }

        return HttpResponse(json.dumps(response_data),
                            content_type='application/json')

    return HttpResponseBadRequest(json.dumps({'error': True}),
                                  content_type='application/json')
Example #11
0
def docview_delay(request):
    """
    Logs when a user clicks on a document, but is delayed from viewing the document.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')

    ec = get_experiment_context(request)

    uname = ec['username']
    taskid = ec['taskid']
    u = User.objects.get(username=uname)

    rank = request.GET.get('rank')
    page = request.GET.get('page')
    trec_id = request.GET.get('trecID')
    whoosh_id = request.GET.get('whooshID')
    doc_length = ixr.doc_field_length(long(whoosh_id), 'content')

    try:
        examined = DocumentsExamined.objects.get(user=u,
                                                 task=taskid,
                                                 doc_num=trec_id)
        judgement = examined.judgement
    except ObjectDoesNotExist:
        judgement = -2

    log_event(event="DOCUMENT_DELAY_VIEW",
              request=request,
              whooshid=whoosh_id,
              trecid=trec_id,
              rank=rank,
              page=page,
              judgement=judgement,
              doc_length=doc_length)

    return HttpResponse(json.dumps({'logged': True}),
                        content_type='application/json')
Example #12
0
def autocomplete_suggestion(request):
    """
    Handles the autocomplete suggestion service.
    """
    # Get the condition from the user's experiment context.
    # This will yield us access to the autocomplete trie!
    ec = get_experiment_context(request)
    condition = ec['condition']

    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    if request.GET.get('suggest'):
        results = []

        if experiment_setups[condition].autocomplete:
            chars = unicode(request.GET.get('suggest'))

            # See if the cache has what we are looking for.
            # If it does, pull it out and use that.
            # If it doesn't, query the trie and store the results in the cache before returning.
            autocomplete_cache = cache.get_cache('autocomplete')
            results = autocomplete_cache.get(chars)

            if not results:
                suggestion_trie = experiment_setups[condition].get_trie()
                results = suggestion_trie.suggest(chars)
                cache_time = 300

                autocomplete_cache.set(chars, results, cache_time)

        response_data = {
            'count': len(results),
            'results': results,
        }

        return HttpResponse(json.dumps(response_data), content_type='application/json')

    return HttpResponseBadRequest(json.dumps({'error': True}), content_type='application/json')
Example #13
0
def show_saved_documents(request):
    context = RequestContext(request)

    # Timed out?
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')

    ec = get_experiment_context(request)
    taskid = ec['taskid']
    condition = ec['condition']
    uname = ec['username']
    current_search = request.session['queryurl']

    user_judgement = -2
    if request.method == 'GET':
        getdict = request.GET

        if 'judge' not in getdict and 'docid' not in getdict:
            # Log only if user is entering the page, not after clicking a relevant button
            print "LOG_VIEW_SAVED_DOCS"
            log_event(event="VIEW_SAVED_DOCS", request=request)

        if 'judge' in getdict:
            user_judgement = int(getdict['judge'])
        if 'docid' in getdict:
            docid = int(getdict['docid'])
        if (user_judgement > -2) and (docid > -1):
            #updates the judgement for this document
            doc_length = ixr.doc_field_length(docid, 'content')
            trecid = ixr.stored_fields(docid)['docid']

            user_judgement = mark_document(request=request, whooshid=docid, trecid=trecid, judgement=user_judgement, doc_length=doc_length)

    # Get documents that are for this task, and for this user
    u = User.objects.get(username=uname)
    docs = DocumentsExamined.objects.filter(user=u).filter(task=taskid)
    return render_to_response('trecdo/saved_documents.html', {'participant': uname, 'task': taskid, 'condition': condition, 'current_search': current_search, 'docs': docs}, context)
Example #14
0
def docview_delay(request):
    """
    Logs when a user clicks on a document, but is delayed from viewing the document.
    """
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')

    ec = get_experiment_context(request)

    uname = ec['username']
    taskid = ec['taskid']
    u = User.objects.get(username=uname)

    rank = request.GET.get('rank')
    page = request.GET.get('page')
    trec_id = request.GET.get('trecID')
    whoosh_id = request.GET.get('whooshID')
    doc_length = ixr.doc_field_length(long(whoosh_id), 'content')

    try:
        examined = DocumentsExamined.objects.get(user=u, task=taskid, doc_num=trec_id)
        judgement = examined.judgement
    except ObjectDoesNotExist:
        judgement = -2

    log_event(event="DOCUMENT_DELAY_VIEW",
              request=request,
              whooshid=whoosh_id,
              trecid=trec_id,
              rank=rank,
              page=page,
              judgement=judgement,
              doc_length=doc_length)

    return HttpResponse(json.dumps({'logged': True}), content_type='application/json')
Example #15
0
def show_document(request, whoosh_docid):
    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')

    context = RequestContext(request)
    ec = get_experiment_context(request)
    uname = ec["username"]
    taskid = ec["taskid"]

    condition = ec["condition"]
    current_search = request.session['queryurl']

    # get document from index
    fields = ixr.stored_fields(int(whoosh_docid))
    title = fields["title"]
    content = fields["content"]
    docnum = fields["docid"]
    doc_date = fields["timedate"]
    doc_source = fields["source"]
    docid = whoosh_docid
    topicnum = ec["topicnum"]

    def get_document_rank():
        """
        Returns the rank (integer) for the given document ID.
        -1 is returned if the document is not found in the session ranked list.
        """
        the_docid = int(whoosh_docid)
        ranked_results = request.session.get('results_ranked', [])

        # Some list comprehension - returns a list of one integer with the rank of a given document
        # if it exists in ranked_results; returns a blank list if the document is not present.
        at_rank = [item[1] for item in ranked_results if item[0] == the_docid]

        if len(at_rank) > 0:
            return at_rank[0]
        else:
            return -1

    # check if there are any get parameters.
    user_judgement = -2
    rank = 0
    if request.is_ajax():
        getdict = request.GET

        if 'judge' in getdict:
            user_judgement = int(getdict['judge'])
            rank = get_document_rank()

            #marks that the document has been marked rel or nonrel
            doc_length = ixr.doc_field_length(
                long(request.GET.get('docid', 0)), 'content')
            user_judgement = mark_document(request, docid, user_judgement,
                                           title, docnum, rank, doc_length)
            #mark_document handles logging of this event
        return HttpResponse(simplejson.dumps(user_judgement),
                            mimetype='application/javascript')
    else:
        if time_search_experiment_out(request):
            return HttpResponseRedirect('/treconomics/next/')
        else:
            #marks that the document has been viewed
            rank = get_document_rank()

            doc_length = ixr.doc_field_length(long(docid), 'content')
            user_judgement = mark_document(request, docid, user_judgement,
                                           title, docnum, rank, doc_length)

            context_dict = {
                'participant': uname,
                'task': taskid,
                'condition': condition,
                'current_search': current_search,
                'docid': docid,
                'docnum': docnum,
                'title': title,
                'doc_date': doc_date,
                'doc_source': doc_source,
                'content': content,
                'user_judgement': user_judgement,
                'rank': rank
            }

            if request.GET.get('backtoassessment', False):
                context_dict['backtoassessment'] = True

            return render_to_response('trecdo/document.html', context_dict,
                                      context)
Example #16
0
def ajax_search(request, taskid=-1):
    """
    David's crummy AJAX search implementation.
    Actually, it's not that crummy at all.
    """
    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

        return HttpResponseRedirect('/treconomics/searcha/')

    # Has the experiment timed out? If so, indicate to the user.
    # Send a JSON object back which will be interpreted by the JavaScript.
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}),
                                      content_type='application/json')
    else:
        context = RequestContext(request)
        context_dict = {}

        context_dict['ajax_enabled'] = True
        context_dict['application_root'] = '/treconomics/'
        context_dict['ajax_search_url'] = 'searcha/'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = context_dict['application_root'] + 'searcha/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        # Gather the usual suspects...
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        context_dict['participant'] = uname
        context_dict['task'] = taskid
        context_dict['condition'] = condition
        context_dict['interface'] = interface
        context_dict['autocomplete'] = experiment_setups[
            condition].autocomplete
        context_dict['is_fast'] = 'true' if experiment_setups[
            condition].delay_results == 0 else 'false'

        if request.method == 'POST':
            # AJAX POST request for a given query.
            # Returns a AJAX response with the document list to populate the container <DIV>.

            # Should we do a delay? This is true when a user navigates back to the results page from elsewhere.
            do_delay = bool(request.POST.get('noDelay'))

            if interface == 1:
                querystring = request.POST.copy()
                del querystring['csrfmiddlewaretoken']
                request.session[
                    'last_ajax_interface1_querystring'] = querystring

                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST.get('query').strip()

            if not do_delay:  # Do not log the query issued event if the user is returning to the results page.
                log_event(event="QUERY_ISSUED",
                          request=request,
                          query=user_query)

            page_request = request.POST.get('page')

            if page_request:
                page = int(page_request)

            if user_query == "":
                # Nothing to query, tell the client.
                return HttpResponse(json.dumps({'no_results': True}),
                                    content_type='application/json')
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(
                    request, page, page_len, condition, user_query,
                    request.POST.get('noperf'),
                    experiment_setups[ec['condition']].engine)

                #  Caching is now handled by WhooshTrecNews
                #if not request.POST.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                queryurl = context_dict['application_root'] + context_dict[
                    'ajax_search_url'] + '#query=' + user_query.replace(
                        ' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                print "Delay time - query execution time: {0}".format(
                    experiment_setups[condition].delay_results -
                    result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (
                        experiment_setups[condition].delay_results -
                        result_dict['query_time'] > 0) and not do_delay:
                    log_event(event='DELAY_RESULTS_PAGE',
                              request=request,
                              page=page)
                    sleep(experiment_setups[condition].delay_results -
                          result_dict['query_time'])  # Delay search results.

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict[
                        'display_query'] = result_dict['query'][0:50] + '...'

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(
                        result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                set_results_session_var(request, result_dict)

                # Serialis(z?)e the data structure and send it back
                #if not do_delay:  # Only log the following if the user is not returning back to the results page.
                log_event(event='VIEW_SEARCH_RESULTS_PAGE',
                          request=request,
                          page=page)
                request.session[
                    'last_request_time'] = datetime.datetime.utcnow().strftime(
                        '%Y-%m-%d %H:%M:%S.%f')
                return HttpResponse(json.dumps(result_dict),
                                    content_type='application/json')
        else:
            # Render the search template as usual...
            log_event(event="VIEW_SEARCH_BOX", request=request, page=page)
            context_dict['delay_results'] = experiment_setups[
                condition].delay_results
            context_dict['delay_docview'] = experiment_setups[
                condition].delay_docview
            return render_to_response('trecdo/search.html', context_dict,
                                      context)
Example #17
0
def search(request, taskid=-1):
    def is_from_search_request(new_page_no):
        """
        Returns True iif the URL of the referer is a standard search request.
        This is used to determine if we should delay results appearing.

        The new page number of required to check against the page number from the referer.
        If they match, we don't delay - if they don't, we do.
        """
        http_referer = request.META['HTTP_REFERER']
        http_referer = http_referer.strip().split('&')
        page = 1

        for item in http_referer:
            if 'page=' in item:
                item = item.split('=')
                page = int(item[1])

        if request.POST.get('newquery') == 'true':
            return '/treconomics/search/' in request.META['HTTP_REFERER']

        return '/treconomics/search/' in request.META[
            'HTTP_REFERER'] and new_page_no == page

    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')
    else:
        """show base index view"""
        context = RequestContext(request)
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        result_dict = {}
        result_dict['participant'] = uname
        result_dict['task'] = taskid
        result_dict['condition'] = condition
        result_dict['interface'] = interface
        result_dict['application_root'] = '/treconomics/'
        result_dict['ajax_search_url'] = 'searcha/'
        result_dict['autocomplete'] = experiment_setups[condition].autocomplete
        result_dict['is_fast'] = 'true'

        if experiment_setups[condition].delay_results == 0:
            result_dict['is_fast'] = 'false'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = result_dict['application_root'] + 'search/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        suggestions = False
        query_flag = False
        if request.method == 'POST':
            # handle the searches from the different interfaces
            if interface == 1:
                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST['query'].strip()
            log_event(event="QUERY_ISSUED", request=request, query=user_query)
            query_flag = True
            result_dict['page'] = page
        elif request.method == 'GET':
            getdict = request.GET
            if 'query' in getdict:
                user_query = getdict['query']
                query_flag = True
            if 'suggestion' in getdict:
                suggestions = True
            if suggestions:
                log_event(event="QUERY_SUGGESTION_ISSUED",
                          request=request,
                          query=user_query)

            if 'page' in getdict:
                page = int(getdict['page'])
            else:
                page = 1

        if query_flag:
            # If the user poses a blank query, we just send back a results page saying so.
            if user_query == '':
                result_dict['blank_query'] = True
                return render_to_response('trecdo/results.html', result_dict,
                                          context)
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(
                    request, page, page_len, condition, user_query,
                    request.GET.get('noperf'),
                    experiment_setups[ec['condition']].engine)

                #  Caching is now handled in WhooshTrecNews
                #if not request.GET.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                result_dict['participant'] = uname
                result_dict['task'] = taskid
                result_dict['condition'] = condition
                result_dict['interface'] = interface
                result_dict['application_root'] = '/treconomics/'
                result_dict['ajax_search_url'] = 'searcha/'
                result_dict['autocomplete'] = experiment_setups[
                    condition].autocomplete
                result_dict['page'] = page
                result_dict['is_fast'] = 'true'
                result_dict['focus_querybox'] = 'false'

                if result_dict['trec_results'] is None:
                    result_dict['focus_querybox'] = 'true'

                if experiment_setups[condition].delay_results == 0:
                    result_dict['is_fast'] = 'false'

                if interface == 3:
                    # getQuerySuggestions(topic_num)
                    suggestions = TopicQuerySuggestion.objects.filter(
                        topic_num=topic_num)
                    if suggestions:
                        result_dict['query_suggest_search'] = True
                        entries = []
                        for s in suggestions:
                            entries.append({'title': s.title, 'link': s.link})
                        print entries
                        result_dict['query_suggest_results'] = entries
                    # addSuggestions to results dictionary

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(
                        result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                result_dict['delay_results'] = experiment_setups[
                    condition].delay_results
                result_dict['delay_docview'] = experiment_setups[
                    condition].delay_docview

                queryurl = '/treconomics/search/?query=' + user_query.replace(
                    ' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict[
                        'display_query'] = result_dict['query'][0:50] + '...'

                print "Delay time - query execution time: {0}".format(
                    experiment_setups[condition].delay_results -
                    result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (
                        experiment_setups[condition].delay_results -
                        result_dict['query_time'] >
                        0) and is_from_search_request(page):
                    log_event(event='DELAY_RESULTS_PAGE',
                              request=request,
                              page=page)
                    sleep(experiment_setups[condition].delay_results -
                          result_dict['query_time'])  # Delay search results.

                set_results_session_var(request, result_dict)

                log_event(event='VIEW_SEARCH_RESULTS_PAGE',
                          request=request,
                          page=page)
                request.session[
                    'last_request_time'] = datetime.datetime.utcnow().strftime(
                        '%Y-%m-%d %H:%M:%S.%f')
                return render_to_response('trecdo/results.html', result_dict,
                                          context)
        else:
            log_event(event='VIEW_SEARCH_BOX', request=request, page=page)
            result_dict['delay_results'] = experiment_setups[
                condition].delay_results
            result_dict['delay_docview'] = experiment_setups[
                condition].delay_docview
            return render_to_response('trecdo/search.html', result_dict,
                                      context)
Example #18
0
def search(request, taskid=-1):

    def is_from_search_request(new_page_no):
        """
        Returns True iif the URL of the referer is a standard search request.
        This is used to determine if we should delay results appearing.

        The new page number of required to check against the page number from the referer.
        If they match, we don't delay - if they don't, we do.
        """
        http_referer = request.META['HTTP_REFERER']
        http_referer = http_referer.strip().split('&')
        page = 1

        for item in http_referer:
            if 'page=' in item:
                item = item.split('=')
                page = int(item[1])

        if request.POST.get('newquery') == 'true':
            return '/treconomics/search/' in request.META['HTTP_REFERER']

        return '/treconomics/search/' in request.META['HTTP_REFERER'] and new_page_no == page

    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')
    else:
        """show base index view"""
        context = RequestContext(request)
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        result_dict = {}
        result_dict['participant'] = uname
        result_dict['task'] = taskid
        result_dict['condition'] = condition
        result_dict['interface'] = interface
        result_dict['application_root'] = '/treconomics/'
        result_dict['ajax_search_url'] = 'searcha/'
        result_dict['autocomplete'] = experiment_setups[condition].autocomplete
        result_dict['is_fast'] = 'true'

        if experiment_setups[condition].delay_results == 0:
            result_dict['is_fast'] = 'false'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = result_dict['application_root'] + 'search/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        suggestions = False
        query_flag = False
        if request.method =='POST':
            # handle the searches from the different interfaces
            if interface == 1:
                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST['query'].strip()
            log_event(event="QUERY_ISSUED", request=request, query=user_query)
            query_flag = True
            result_dict['page'] = page
        elif request.method == 'GET':
            getdict = request.GET
            if 'query' in getdict:
                user_query = getdict['query']
                query_flag = True
            if 'suggestion' in getdict:
                suggestions = True
            if suggestions:
                log_event(event="QUERY_SUGGESTION_ISSUED", request=request, query=user_query)

            if 'page' in getdict:
                page = int(getdict['page'])
            else:
                page = 1

        if query_flag:
            # If the user poses a blank query, we just send back a results page saying so.
            if user_query == '':
                result_dict['blank_query'] = True
                return render_to_response('trecdo/results.html', result_dict, context)
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(request,
                                          page,
                                          page_len,
                                          condition,
                                          user_query,
                                          request.GET.get('noperf'),
                                          experiment_setups[ec['condition']].engine)

                #  Caching is now handled in WhooshTrecNews
                #if not request.GET.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.GET.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                result_dict['participant'] = uname
                result_dict['task'] = taskid
                result_dict['condition'] = condition
                result_dict['interface'] = interface
                result_dict['application_root'] = '/treconomics/'
                result_dict['ajax_search_url'] = 'searcha/'
                result_dict['autocomplete'] = experiment_setups[condition].autocomplete
                result_dict['page'] = page
                result_dict['is_fast'] = 'true'
                result_dict['focus_querybox'] = 'false'

                if result_dict['trec_results'] is None:
                    result_dict['focus_querybox'] = 'true'

                if experiment_setups[condition].delay_results == 0:
                    result_dict['is_fast'] = 'false'

                if interface == 3:
                        # getQuerySuggestions(topic_num)
                        suggestions = TopicQuerySuggestion.objects.filter(topic_num=topic_num)
                        if suggestions:
                            result_dict['query_suggest_search'] = True
                            entries = []
                            for s in suggestions:
                                entries.append({'title': s.title, 'link': s.link})
                            print entries
                            result_dict['query_suggest_results'] = entries
                        # addSuggestions to results dictionary

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                result_dict['delay_results'] = experiment_setups[condition].delay_results
                result_dict['delay_docview'] = experiment_setups[condition].delay_docview

                queryurl = '/treconomics/search/?query=' + user_query.replace(' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict['display_query'] = result_dict['query'][0:50] + '...'

                print "Delay time - query execution time: {0}".format(experiment_setups[condition].delay_results - result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (experiment_setups[condition].delay_results - result_dict['query_time'] > 0) and is_from_search_request(page):
                    log_event(event='DELAY_RESULTS_PAGE', request=request, page=page)
                    sleep(experiment_setups[condition].delay_results - result_dict['query_time'])  # Delay search results.

                set_results_session_var(request, result_dict)

                log_event(event='VIEW_SEARCH_RESULTS_PAGE', request=request, page=page)
                request.session['last_request_time'] = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
                return render_to_response('trecdo/results.html', result_dict, context)
        else:
            log_event(event='VIEW_SEARCH_BOX', request=request, page=page)
            result_dict['delay_results'] = experiment_setups[condition].delay_results
            result_dict['delay_docview'] = experiment_setups[condition].delay_docview
            return render_to_response('trecdo/search.html', result_dict, context)
Example #19
0
def ajax_search(request, taskid=-1):
    """
    David's crummy AJAX search implementation.
    Actually, it's not that crummy at all.
    """
    if isinstance(taskid, unicode):
        taskid = int(taskid)

    # If taskid is set, then it marks the start of a new search task
    # Update the session variable to reflect this
    if taskid >= 0:
        request.session['start_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        request.session['taskid'] = taskid

        if taskid == 0:
            log_event(event="PRACTICE_SEARCH_TASK_COMMENCED", request=request)
        else:
            log_event(event="SEARCH_TASK_COMMENCED", request=request)

        return HttpResponseRedirect('/treconomics/searcha/')

    # Has the experiment timed out? If so, indicate to the user.
    # Send a JSON object back which will be interpreted by the JavaScript.
    if time_search_experiment_out(request):
        log_event(event="EXPERIMENT_TIMEOUT", request=request)
        return HttpResponseBadRequest(json.dumps({'timeout': True}), content_type='application/json')
    else:
        context = RequestContext(request)
        context_dict = {}

        context_dict['ajax_enabled'] = True
        context_dict['application_root'] = '/treconomics/'
        context_dict['ajax_search_url'] = 'searcha/'

        # Ensure that we set a queryurl.
        # This means that if a user clicks "View Saved" before posing a query, there will be something
        # to go back to!
        if not request.session.get('queryurl'):
            queryurl = context_dict['application_root'] + 'searcha/'
            print "Set queryurl to : " + queryurl
            request.session['queryurl'] = queryurl

        # Gather the usual suspects...
        ec = get_experiment_context(request)
        uname = ec["username"]
        condition = ec["condition"]
        taskid = ec["taskid"]
        topic_num = ec["topicnum"]
        interface = experiment_setups[condition].get_interface()
        page_len = experiment_setups[condition].rpp
        page = 1

        context_dict['participant'] = uname
        context_dict['task'] = taskid
        context_dict['condition'] = condition
        context_dict['interface'] = interface
        context_dict['autocomplete'] = experiment_setups[condition].autocomplete
        context_dict['is_fast'] = 'true' if experiment_setups[condition].delay_results == 0 else 'false'

        if request.method == 'POST':
            # AJAX POST request for a given query.
            # Returns a AJAX response with the document list to populate the container <DIV>.

            # Should we do a delay? This is true when a user navigates back to the results page from elsewhere.
            do_delay = bool(request.POST.get('noDelay'))

            if interface == 1:
                querystring = request.POST.copy()
                del querystring['csrfmiddlewaretoken']
                request.session['last_ajax_interface1_querystring'] = querystring

                user_query = constructStructuredQuery(request)
            else:
                user_query = request.POST.get('query').strip()

            if not do_delay:  # Do not log the query issued event if the user is returning to the results page.
                log_event(event="QUERY_ISSUED", request=request, query=user_query)

            page_request = request.POST.get('page')

            if page_request:
                page = int(page_request)

            if user_query == "":
                # Nothing to query, tell the client.
                return HttpResponse(json.dumps({'no_results': True}), content_type='application/json')
            else:
                # Get some results! Call this wrapper function which uses the Django cache backend.
                result_dict = get_results(request,
                                           page,
                                           page_len,
                                           condition,
                                           user_query,
                                           request.POST.get('noperf'),
                                           experiment_setups[ec['condition']].engine)

                #  Caching is now handled by WhooshTrecNews
                #if not request.POST.get('noperf'):
                #    # Now query for the next page of results so they are cached and ready when the user asks for them.
                #    print "Starting thread(s) to get cache next page of results..."
                #    for i in range(1, (CACHING_FORWARD_LOOK + 1)):
                #        if i == 1:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, 0))
                #        else:
                #            forward_thread = Thread(target=get_results, args=(request, (page + i), page_len, condition, user_query, request.POST.get('noperf'), experiment_setups[ec['condition']].engine, (i * CACHING_DELAY_FACTOR)))
                #        forward_thread.start()

                queryurl = context_dict['application_root'] + context_dict['ajax_search_url'] + '#query=' + user_query.replace(' ', '+') + '&page=' + str(page) + '&noperf=true'
                print "Set queryurl to : " + queryurl
                request.session['queryurl'] = queryurl

                print "Delay time - query execution time: {0}".format(experiment_setups[condition].delay_results - result_dict['query_time'])

                if experiment_setups[condition].delay_results > 0 and (experiment_setups[condition].delay_results - result_dict['query_time'] > 0) and not do_delay:
                    log_event(event='DELAY_RESULTS_PAGE', request=request, page=page)
                    sleep(experiment_setups[condition].delay_results - result_dict['query_time'])  # Delay search results.

                result_dict['display_query'] = result_dict['query']

                if len(result_dict['query']) > 50:
                    result_dict['display_query'] = result_dict['query'][0:50] + '...'

                if result_dict['trec_results']:
                    qrp = getQueryResultPerformance(result_dict['trec_results'], topic_num)
                    log_event(event='SEARCH_RESULTS_PAGE_QUALITY',
                              request=request,
                              whooshid=page,
                              rank=qrp[0],
                              judgement=qrp[1])

                set_results_session_var(request, result_dict)

                # Serialis(z?)e the data structure and send it back
                #if not do_delay:  # Only log the following if the user is not returning back to the results page.
                log_event(event='VIEW_SEARCH_RESULTS_PAGE', request=request, page=page)
                request.session['last_request_time'] = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
                return HttpResponse(json.dumps(result_dict), content_type='application/json')
        else:
            # Render the search template as usual...
            log_event(event="VIEW_SEARCH_BOX", request=request, page=page)
            context_dict['delay_results'] = experiment_setups[condition].delay_results
            context_dict['delay_docview'] = experiment_setups[condition].delay_docview
            return render_to_response('trecdo/search.html', context_dict, context)
Example #20
0
def show_document(request, whoosh_docid):
    #check for timeout
    if time_search_experiment_out(request):
        return HttpResponseRedirect('/treconomics/timeout/')

    context = RequestContext(request)
    ec = get_experiment_context(request)
    uname = ec["username"]
    taskid = ec["taskid"]

    condition = ec["condition"]
    current_search = request.session['queryurl']

    # get document from index
    fields = ixr.stored_fields(int(whoosh_docid))
    title = fields["title"]
    content = fields["content"]
    docnum = fields["docid"]
    doc_date = fields["timedate"]
    doc_source = fields["source"]
    docid = whoosh_docid
    topicnum = ec["topicnum"]

    def get_document_rank():
        """
        Returns the rank (integer) for the given document ID.
        -1 is returned if the document is not found in the session ranked list.
        """
        the_docid = int(whoosh_docid)
        ranked_results = request.session.get('results_ranked', [])

        # Some list comprehension - returns a list of one integer with the rank of a given document
        # if it exists in ranked_results; returns a blank list if the document is not present.
        at_rank = [item[1] for item in ranked_results if item[0] == the_docid]

        if len(at_rank) > 0:
            return at_rank[0]
        else:
            return -1

    # check if there are any get parameters.
    user_judgement = -2
    rank = 0
    if request.is_ajax():
        getdict = request.GET

        if 'judge' in getdict:
            user_judgement = int(getdict['judge'])
            rank = get_document_rank()

            #marks that the document has been marked rel or nonrel
            doc_length = ixr.doc_field_length(long(request.GET.get('docid', 0)), 'content')
            user_judgement = mark_document(request, docid, user_judgement, title, docnum, rank, doc_length)
            #mark_document handles logging of this event
        return HttpResponse(simplejson.dumps(user_judgement), mimetype='application/javascript')
    else:
        if time_search_experiment_out( request ):
            return HttpResponseRedirect('/treconomics/next/')
        else:
            #marks that the document has been viewed
            rank = get_document_rank()

            doc_length = ixr.doc_field_length(long(docid), 'content')
            user_judgement = mark_document(request, docid, user_judgement, title, docnum, rank, doc_length)

            context_dict = {'participant': uname,
                            'task': taskid,
                            'condition': condition,
                            'current_search': current_search,
                            'docid': docid,
                            'docnum': docnum,
                            'title': title,
                            'doc_date': doc_date,
                            'doc_source': doc_source,
                            'content': content,
                            'user_judgement': user_judgement,
                            'rank': rank}

            if request.GET.get('backtoassessment', False):
                context_dict['backtoassessment'] = True

            return render_to_response('trecdo/document.html', context_dict, context)