Esempio n. 1
0
def search_by_name():

    query = request.args.get('query')

    results_people = []

    if query is None:
        return error_reply("No query given")
    elif query == '' or query == ' ':
        return json.dumps({})

    new_limit = DEFAULT_LIMIT

    try:
        new_limit = int(request.values['limit_results'])
    except:
        pass

    if validProofQuery(query):
        return search_proofs_index(query)

    elif test_alphanumeric(query) is False:
        pass

    else:

        threads = []

        t1 = QueryThread(query, 'username_search', new_limit)
        t2 = QueryThread(query, 'twitter_search', new_limit)
        t3 = QueryThread(query, 'people_search', new_limit)

        threads.append(t1)
        threads.append(t2)
        threads.append(t3)

        # start all threads
        [x.start() for x in threads]

        # wait for all of them to finish
        [x.join() for x in threads]

        # at this point all threads have finished and all queries have been performed

        results_username = t1.results
        results_twitter = t2.results
        results_people = t3.results

        results_people += results_username + results_twitter

        # dedup all results before sending out
        from substring_search import dedup_search_results
        results_people = dedup_search_results(results_people)

    results = {}
    results['results'] = results_people[:new_limit]

    return jsonify(results)
Esempio n. 2
0
def search_by_name():

    query = request.args.get('query')

    results_people = []

    if query is None:
        return error_reply("No query given")
    elif query == '' or query == ' ':
        return json.dumps({})

    if MEMCACHED_ENABLED:

        cache_key = str('search_cache_' + query.lower())
        cache_reply = mc.get(cache_key)

        # if a cache hit, respond straight away
        if (cache_reply is not None):
            return jsonify(cache_reply)

    new_limit = DEFAULT_LIMIT

    try:
        new_limit = int(request.values['limit_results'])
    except:
        pass

    if validProofQuery(query):
        return search_proofs_index(query)

    elif test_alphanumeric(query) is False:
        pass

    else:

        threads = []

        t1 = QueryThread(query, 'username_search', new_limit)
        t2 = QueryThread(query, 'twitter_search', new_limit)
        t3 = QueryThread(query, 'people_search', new_limit)

        if LUCENE_ENABLED:
            t4 = QueryThread(query, 'lucene_search', new_limit)

        threads.append(t1)
        threads.append(t2)
        threads.append(t3)

        if LUCENE_ENABLED:
            threads.append(t4)

        # start all threads
        [x.start() for x in threads]

        # wait for all of them to finish
        [x.join() for x in threads]

        # at this point all threads have finished and all queries have been performed

        results_username = t1.results
        results_twitter = t2.results
        results_people = t3.results

        if LUCENE_ENABLED:
            results_bio = t4.results

        results_people += results_username + results_twitter
        if LUCENE_ENABLED:
            results_people += results_bio

        # dedup all results before sending out
        from substring_search import dedup_search_results
        results_people = dedup_search_results(results_people)

    results = {}
    results['results'] = results_people[:new_limit]

    if MEMCACHED_ENABLED:
        mc.set(cache_key, results, int(time() + MEMCACHED_TIMEOUT))

    return jsonify(results)
Esempio n. 3
0
def search_by_name():

    query = request.args.get('query')

    results_people = []

    if query is None:
        return error_reply("No query given")
    elif query == '' or query == ' ':
        return json.dumps({})

    if MEMCACHED_ENABLED:

        cache_key = str('search_cache_' + query.lower())
        cache_reply = mc.get(cache_key)

        # if a cache hit, respond straight away
        if(cache_reply is not None):
            return jsonify(cache_reply)

    new_limit = DEFAULT_LIMIT

    try:
        new_limit = int(request.values['limit_results'])
    except:
        pass

    if test_alphanumeric(query) is False:
        pass
    else:

        threads = []

        t1 = QueryThread(query, 'username_search', new_limit)
        t2 = QueryThread(query, 'twitter_search', new_limit)
        t3 = QueryThread(query, 'people_search', new_limit)

        if LUCENE_ENABLED:
            t4 = QueryThread(query, 'lucene_search', new_limit)

        threads.append(t1)
        threads.append(t2)
        threads.append(t3)

        if LUCENE_ENABLED:
            threads.append(t4)

        # start all threads
        [x.start() for x in threads]

        # wait for all of them to finish
        [x.join() for x in threads]

        # at this point all threads have finished and all queries have been performed

        results_username = t1.results
        results_twitter = t2.results
        results_people = t3.results

        if LUCENE_ENABLED:
            results_bio = t4.results

        results_people += results_username + results_twitter
        if LUCENE_ENABLED:
            results_people += results_bio

        # dedup all results before sending out
        from substring_search import dedup_search_results
        results_people = dedup_search_results(results_people)

    results = {}
    results['results'] = results_people[:new_limit]

    if MEMCACHED_ENABLED:
        mc.set(cache_key, results, int(time() + MEMCACHED_TIMEOUT))

    return jsonify(results)
Esempio n. 4
0
def search_by_name():

    query = request.args.get('query')

    results_people = []

    if query is None:
        return error_reply("No query given")
    elif query == '' or query == ' ':
        return json.dumps({})

    new_limit = DEFAULT_LIMIT

    try:
        new_limit = int(request.values['limit_results'])
    except:
        pass

    if test_alphanumeric(query) is False:
        pass

    else:

        threads = []

        t1 = QueryThread(query, 'username_search', new_limit)
        t2 = QueryThread(query, 'twitter_search', new_limit)
        t3 = QueryThread(query, 'people_search', new_limit)

        threads.append(t1)
        threads.append(t2)
        threads.append(t3)

        # start all threads
        [x.start() for x in threads]

        # wait for all of them to finish
        [x.join() for x in threads]

        # at this point all threads have finished and all queries have been performed

        results_username = t1.results
        results_twitter = t2.results
        results_people = t3.results

        results_people += results_username + results_twitter

        # dedup all results before sending out
        from substring_search import dedup_search_results
        results_people = dedup_search_results(results_people)

    results = {}
    results['results'] = results_people[:new_limit]

    resp = make_response(jsonify(results))
    if len(results['results']) > 0:
        cache_timeout = DEFAULT_CACHE_TIMEOUT
    else:
        cache_timeout = EMPTY_CACHE_TIMEOUT

    resp.headers['Cache-Control'] = 'public, max-age={:d}'.format(
        cache_timeout)

    return resp