Ejemplo n.º 1
0
def search_share_atom(**kwargs):
    q = request.args.get('q', '*')
    sort = request.args.get('sort', 'dateUpdated')

    # we want the results per page to be constant between pages
    # TODO -  move this functionality into build_query in util
    start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE)

    query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort)

    try:
        search_results = search.search_share(query)
    except MalformedQueryError:
        raise HTTPError(http.BAD_REQUEST)
    except IndexNotFoundError:
        search_results = {
            'count': 0,
            'results': []
        }

    atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True)

    return util.create_atom_feed(
        name='SHARE',
        data=search_results['results'],
        query=q,
        size=RESULTS_PER_PAGE,
        start=start,
        url=atom_url,
        to_atom=share_search.to_atom
    )
Ejemplo n.º 2
0
def search_share_atom(**kwargs):
    q = request.args.get('q', '*')
    sort = request.args.get('sort', 'dateUpdated')

    # we want the results per page to be constant between pages
    # TODO -  move this functionality into build_query in util
    start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE)

    query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort)

    try:
        search_results = search.search_share(query, index='share_v1')
    except MalformedQueryError:
        raise HTTPError(http.BAD_REQUEST)
    except IndexNotFoundError:
        search_results = {'count': 0, 'results': []}

    atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True)

    return util.create_atom_feed(name='SHARE',
                                 data=search_results['results'],
                                 query=q,
                                 size=RESULTS_PER_PAGE,
                                 start=start,
                                 url=atom_url,
                                 to_atom=share_search.to_atom)
Ejemplo n.º 3
0
def search_share_atom(**kwargs):
    json_query = request.args.get('jsonQuery')
    start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE)

    if not json_query:
        q = request.args.get('q', '*')
        sort = request.args.get('sort')

        # we want the results per page to be constant between pages
        # TODO -  move this functionality into build_query in util

        query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort)
    else:
        query = json.loads(unquote(json_query))
        query['from'] = start
        query['size'] = RESULTS_PER_PAGE

        # Aggregations are expensive, and we really don't want to
        # execute them if they won't be used
        for field in ['aggs', 'aggregations']:
            if query.get(field):
                del query[field]
        q = query  # Do we really want to display this?

    try:
        search_results = search.search_share(query)
    except MalformedQueryError:
        raise HTTPError(http.BAD_REQUEST)
    except IndexNotFoundError:
        search_results = {
            'count': 0,
            'results': []
        }

    atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True)

    return util.create_atom_feed(
        name='SHARE',
        data=search_results['results'],
        query=q,
        size=RESULTS_PER_PAGE,
        start=start,
        url=atom_url,
        to_atom=share_search.to_atom
    )
Ejemplo n.º 4
0
def search_share_atom(**kwargs):
    json_query = request.args.get('jsonQuery')
    start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE)

    if not json_query:
        q = request.args.get('q', '*')
        sort = request.args.get('sort')

        # we want the results per page to be constant between pages
        # TODO -  move this functionality into build_query in util

        query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort)
    else:
        query = json.loads(unquote(json_query))
        query['from'] = start
        query['size'] = RESULTS_PER_PAGE

        # Aggregations are expensive, and we really don't want to
        # execute them if they won't be used
        for field in ['aggs', 'aggregations']:
            if query.get(field):
                del query[field]
        q = query  # Do we really want to display this?

    try:
        search_results = search.search_share(query)
    except MalformedQueryError:
        raise HTTPError(http.BAD_REQUEST)
    except IndexNotFoundError:
        search_results = {'count': 0, 'results': []}

    atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True)

    return util.create_atom_feed(name='SHARE',
                                 data=search_results['results'],
                                 query=q,
                                 size=RESULTS_PER_PAGE,
                                 start=start,
                                 url=atom_url,
                                 to_atom=share_search.to_atom)
Ejemplo n.º 5
0
 def test_compute_start_normal(self):
     page = 50
     size = 10
     result = util.compute_start(page, size)
     assert_equal(result, 490)
Ejemplo n.º 6
0
 def test_compute_start_negative(self):
     page = -10
     size = 250
     result = util.compute_start(page, size)
     assert_equal(result, 0)
Ejemplo n.º 7
0
 def test_compute_start_non_number(self):
     page = 'cow'
     size = 250
     result = util.compute_start(page, size)
     assert_equal(result, 0)
Ejemplo n.º 8
0
 def test_compute_start_normal(self):
     page = 50
     size = 10
     result = util.compute_start(page, size)
     assert_equal(result, 490)
Ejemplo n.º 9
0
 def test_compute_start_negative(self):
     page = -10
     size = 250
     result = util.compute_start(page, size)
     assert_equal(result, 0)
Ejemplo n.º 10
0
 def test_compute_start_non_number(self):
     page = 'cow'
     size = 250
     result = util.compute_start(page, size)
     assert_equal(result, 0)