def test_page2(self): paginator = Paginator({}, self.repr_set, limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertEqual(meta['previous'], '/api/v1/notes/?limit=2&offset=0') self.assertEqual(meta['next'], '/api/v1/notes/?limit=2&offset=4') self.assertEqual(meta['total_count'], 6)
def get_refresh(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) if 'marked_at__gt' not in request.GET: results = UserStars.objects.filter(user_id=request.user.id).order_by('-marked_at').exclude(star=1) else: results = UserStars.objects.filter(user_id=request.user.id, marked_at__gt=request.GET['marked_at__gt'])\ .order_by('-marked_at').exclude(star=1) paginator = Paginator(request.GET, results, resource_uri='/api/v1/account/star/refresh/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result, request=request) bundle.data['refresh'] = True bundle.data['id'] = bundle.obj.article_id bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } self.log_throttled_access(request) return self.create_response(request, object_list)
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) self.log_throttled_access(request) query = request.GET.get('q', None) if not query: raise BadRequest('Please supply the search parameter (e.g. "/api/v1/feeds/search/?q=query")') results = SearchQuerySet().filter(content__contains=query).filter_or(keywords__contains=query).models(Feed) paginator = Paginator(request.GET, results, resource_uri='/api/v1/feeds/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } object_list['meta']['search_query'] = query self.log_throttled_access(request) return self.create_response(request, object_list)
def search(self, request, **kwargs): """ An endpoint for performing full-text searches. TKTK -- implement field searches TKTK -- implement wildcard + boolean searches """ self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) s = SolrSearch(self._solr()).query(full_text=request.GET.get('q')) paginator = Paginator(request.GET, s, resource_uri=request.path_info) page = paginator.page() objects = [] for result in page['objects']: obj = SolrObject(result) bundle = self.build_bundle(obj=obj, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) page['objects'] = objects self.log_throttled_access(request) return self.create_response(request, page)
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) query = request.GET.get('q', None) if not query: raise BadRequest('Please supply the search parameter (e.g. "/api/v1/articles/search/?q=query")') results = SearchQuerySet().models(Article).filter( feed_id__in=[x['id'] for x in request.user.feeds.values('id')])\ .filter(content=query).filter_or(title=query).order_by('-date_parsed') if not results: results = EmptySearchQuerySet() paginator = Paginator(request.GET, results, resource_uri='/api/v1/articles/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } object_list['meta']['search_query'] = query self.log_throttled_access(request) return self.create_response(request, object_list)
def get_populars(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) time_frame = datetime.utcnow() - timedelta(days=1) most_read = UserReads.objects.filter(marked_at__gte=time_frame)\ .annotate(occ=Count('article')).order_by('-occ') if not most_read or len(most_read) < 1000: results = Article.objects.filter(Q(id__in=most_read.values('article')) | Q(feed_id__in=CategoryFeeds.objects.all().values('feed_id')))\ .order_by('-date_parsed') else: results = Article.objects.filter(id__in=most_read.values('article')).order_by('-date_parsed') paginator = Paginator(request.GET, results, resource_uri='/api/v1/articles/populars/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } self.log_throttled_access(request) return self.create_response(request, object_list)
def test_regression_nonqueryset(self): request = QueryDict('', mutable=True) paginator = Paginator(request, ['foo', 'bar', 'baz'], limit=2, offset=0) # This would fail due to ``count`` being present on ``list`` but called # differently. page = paginator.page() self.assertEqual(page['objects'], ['foo', 'bar'])
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) print request.GET query = request.GET.get('q', None) if not query: raise BadRequest('Please supply the search parameter (e.g. "/api/v1/notes/search/?q=css")') # results = SearchQuerySet().models(Note).filter(user=request.user).auto_query(query) results = SearchQuerySet().models(Donate).auto_query(query) if not results: results = EmptySearchQuerySet() paginator = Paginator(request.GET, results, resource_uri='/api/v1/notes/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } object_list['meta']['search_query'] = query self.log_throttled_access(request) return self.create_response(request, object_list)
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) # Do the query. ret = [] for field in self._meta.object_class.haystack_index: kwargs = {field: request.GET.get('q', '')} sqs = SearchQuerySet().models(self._meta.object_class).autocomplete(**kwargs) ret.extend(sqs) paginator = Paginator(request.GET, ret) try: page = paginator.page() except InvalidPage: raise Http404("Sorry, no results on that page.") objects = [] for result in page['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) object_list = { 'objects': objects, } self.log_throttled_access(request) return self.create_response(request, object_list)
def get_custom_list(self, request, queryset): ''' Generic function to paginate a queryset with a set of items per page. ''' self.method_check(request, allowed=['get']) self.throttle_check(request) # Do the query. offset = int(request.GET.get('offset', 0)) limit = min(int(request.GET.get('limit', 20)), 1000) paginator = Paginator(request.GET, queryset) try: object_list = paginator.get_slice(limit, offset) except InvalidPage: raise Http404("Sorry, no results on that page.") objects = [] for result in object_list: bundle = self.build_bundle(obj=result, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) page = { "meta": { "limit": limit, "offset": offset, "total_count": queryset.count() }, 'objects': objects, } self.log_throttled_access(request) return self.create_response(request, page)
def get_search(self, request, **kwargs): query = kwargs.pop('query') source = 'flickr' relevant_docs = request.GET.getlist('relevant') irrelevant_docs = request.GET.getlist('irrelevant') feature = request.GET.get('feature') page = request.GET.get('page', 1) per_page = request.GET.get('limit', 20) results = flickr.photos_search(tags=query, page=str(page), per_page=str(per_page))[0] for photo in results: url = self._get_url(photo) cache_key = '%s-%s' % (source, photo.get('id')) feature_vectors = cache.get(cache_key) if not feature_vectors: features.extract_features(cache_key, url) results = features.filter(results, source, relevant_docs, irrelevant_docs, feature) paginator = Paginator(request.GET, results, resource_uri='/api/v1/flickr/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=self._output_adapter(result), request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles, } object_list['meta']['query'] = query return self.create_response(request, object_list)
def gather_steamies(self, request, **kwargs): self.method_check(request, allowed=['get']) self.throttle_check(request) qs = Steamies.objects\ .filter(top_level_id=kwargs['top_level_id']) paginator = Paginator(request.GET, qs) try: page = paginator.page() except InvalidPage: raise Http404("Sorry, no results on that page.") objects = [] for obj in page['objects']: bundle = self.build_bundle(obj=obj, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) page['objects'] = objects self.log_throttled_access(request) return self.create_response(request, page)
def test_unicode_request(self): request = { 'slug__startswith': u'☃', 'format': 'json', } paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertTrue('limit=2' in meta['previous']) self.assertTrue('offset=0' in meta['previous']) self.assertTrue('slug__startswith=%E2%98%83' in meta['previous']) self.assertTrue('format=json' in meta['previous']) self.assertTrue('limit=2' in meta['next']) self.assertTrue('offset=4' in meta['next']) self.assertTrue('slug__startswith=%E2%98%83' in meta['next']) self.assertTrue('format=json' in meta['next']) self.assertEqual(meta['total_count'], 6) request = QueryDict('slug__startswith=☃&format=json') paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertTrue('limit=2' in meta['previous']) self.assertTrue('offset=0' in meta['previous']) self.assertTrue('slug__startswith=%E2%98%83' in meta['previous']) self.assertTrue('format=json' in meta['previous']) self.assertTrue('limit=2' in meta['next']) self.assertTrue('offset=4' in meta['next']) self.assertTrue('slug__startswith=%E2%98%83' in meta['next']) self.assertTrue('format=json' in meta['next']) self.assertEqual(meta['total_count'], 6)
def get_search(self, request, **kwargs): ''' Custom endpoint for search ''' self.method_check(request, allowed=['get']) query = request.GET.get('q', "*:*") if not query: raise BadRequest( 'Please supply the search parameter (e.g. ' '"/api/v1/clips/search/?q=css")') results = SearchQuerySet().raw_search(query) if not results: results = EmptySearchQuerySet() paginator = Paginator(request.GET, results, resource_uri='/api/v1/sample/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } object_list['meta']['search_query'] = query self.log_throttled_access(request) return self.create_response(request, object_list)
def test_multiple(self): request = QueryDict('a=1&a=2') paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertEqual(meta['previous'], '/api/v1/notes/?a=1&a=2&limit=2&offset=0') self.assertEqual(meta['next'], '/api/v1/notes/?a=1&a=2&limit=2&offset=4')
def test_large_limit(self): paginator = Paginator({}, self.repr_set, limit=20, offset=0) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 20) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['previous'], None) self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6)
def test_large_limit(self): paginator = Paginator({}, self.data_set, resource_uri='/api/v1/notes/', limit=20, offset=0) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 20) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['previous'], None) self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6)
def test_page3(self): paginator = Paginator({}, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=4) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 4) self.assertEqual(meta['previous'], '/api/v1/notes/?limit=2&offset=2') self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6)
def test_page3(self): paginator = Paginator({}, self.data_set, resource_uri="/api/v1/notes/", limit=2, offset=4) meta = paginator.page()["meta"] self.assertEqual(meta["limit"], 2) self.assertEqual(meta["offset"], 4) self.assertEqual(meta["previous"], "/api/v1/notes/?limit=2&offset=2") self.assertEqual(meta["next"], None) self.assertEqual(meta["total_count"], 6)
def test_large_limit(self): paginator = Paginator({}, self.data_set, resource_uri="/api/v1/notes/", limit=20, offset=0) meta = paginator.page()["meta"] self.assertEqual(meta["limit"], 20) self.assertEqual(meta["offset"], 0) self.assertEqual(meta["previous"], None) self.assertEqual(meta["next"], None) self.assertEqual(meta["total_count"], 6)
def test_all(self): paginator = Paginator({'limit': 0}, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=0) page = paginator.page() meta = page['meta'] self.assertEqual(meta['limit'], 1000) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['total_count'], 6) self.assertEqual(len(page['objects']), 6)
def test_multiple(self): request = QueryDict("a=1&a=2") paginator = Paginator(request, self.data_set, resource_uri="/api/v1/notes/", limit=2, offset=2) meta = paginator.page()["meta"] self.assertEqual(meta["limit"], 2) self.assertEqual(meta["offset"], 2) self.assertEqual(meta["previous"], "/api/v1/notes/?a=1&a=2&limit=2&offset=0") self.assertEqual(meta["next"], "/api/v1/notes/?a=1&a=2&limit=2&offset=4")
def projectsheet_search(self, request, **kwargs): """ Search project sheets. """ self.method_check(request, allowed=['get']) self.throttle_check(request) self.is_authenticated(request) # Query params query = request.GET.get('q', '') autocomplete = request.GET.get('auto', None) selected_facets = request.GET.getlist('facet', None) order = request.GET.getlist('order', None) sqs = SearchQuerySet().models(self.Meta.object_class).facet('tags') # narrow down QS with facets if selected_facets: for facet in selected_facets: sqs = sqs.narrow('tags:%s' % (facet)) # A: if autocomplete, we return only a list of tags # starting with "auto" along with their count. if autocomplete is not None: tags = sqs.facet_counts() tags = tags['fields']['tags'] if len(autocomplete) > 0: tags = [t for t in tags if t[0].startswith(autocomplete)] tags = [{'name': t[0], 'count': t[1]} for t in tags] object_list = { 'objects': tags, } # B: else, we return a list of projectsheets else: # launch query if query != "": sqs = sqs.auto_query(query) uri = reverse('api_projectsheet_search', kwargs={'api_name': self.api_name, 'resource_name': self._meta.resource_name}) paginator = Paginator(request.GET, sqs, resource_uri=uri) objects = [] for result in paginator.page()['objects']: if result: bundle = self.build_bundle(obj=result.object, request=request) bundle = self.full_dehydrate(bundle, for_list=True) objects.append(bundle) object_list = { 'meta': paginator.page()['meta'], 'objects': objects, } self.log_throttled_access(request) return self.create_response(request, object_list)
def test_complex_get(self): request = {"slug__startswith": "food", "format": "json"} paginator = Paginator(request, self.data_set, resource_uri="/api/v1/notes/", limit=2, offset=2) meta = paginator.page()["meta"] self.assertEqual(meta["limit"], 2) self.assertEqual(meta["offset"], 2) self.assertEqual(meta["previous"], "/api/v1/notes/?slug__startswith=food&offset=0&limit=2&format=json") self.assertEqual(meta["next"], "/api/v1/notes/?slug__startswith=food&offset=4&limit=2&format=json") self.assertEqual(meta["total_count"], 6)
def test_page2(self): request = QueryDict('', mutable=True) paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertEqual(meta['previous'], '/api/v1/notes/?limit=2&offset=0') self.assertEqual(meta['next'], '/api/v1/notes/?limit=2&offset=4') self.assertEqual(meta['total_count'], 6)
def test_large_limit(self): request = QueryDict('', mutable=True) paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=20, offset=0) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 20) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['previous'], None) self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6)
def test_custom_collection_name(self): paginator = Paginator({}, self.data_set, resource_uri='/api/v1/notes/', limit=20, offset=0, collection_name='notes') meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 20) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['previous'], None) self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6) self.assertEqual(len(paginator.page()['notes']), 6)
def test_page3_with_request(self): for req in [{'offset' : '4', 'limit' : '2'}, QueryDict('offset=4&limit=2')]: paginator = Paginator(req, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=4) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 4) self.assertEqual(meta['previous'], '/api/v1/notes/?limit=2&offset=2') self.assertEqual(meta['next'], None) self.assertEqual(meta['total_count'], 6)
def test_page2(self): paginator = Paginator({}, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertTrue('limit=2' in meta['previous']) self.assertTrue('offset=0' in meta['previous']) self.assertTrue('limit=2' in meta['next']) self.assertTrue('offset=4' in meta['next']) self.assertEqual(meta['total_count'], 6)
def test_all(self): paginator = Paginator({'limit': 0}, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=0) page = paginator.page() meta = page['meta'] self.assertEqual(meta['limit'], 0) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['total_count'], 6) self.assertEqual(len(page['objects']), 6) self.assertFalse('previous' in meta) self.assertFalse('next' in meta)
def test_all(self): paginator = Paginator({"limit": 0}, self.data_set, resource_uri="/api/v1/notes/", limit=2, offset=0) page = paginator.page() meta = page["meta"] self.assertEqual(meta["limit"], 0) self.assertEqual(meta["offset"], 0) self.assertEqual(meta["total_count"], 6) self.assertEqual(len(page["objects"]), 6) self.assertFalse("previous" in meta) self.assertFalse("next" in meta)
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.dispatch_list(request) self.throttle_check(request) # Do the query. sqs = SearchQuerySet() query = request.GET.getlist("q") OR = request.GET.getlist("or") if query: for q in query: query_dict = {} encoded_q = encode_filter(q.split(':')[0], q.split(':')[1]) try: key = "{}Filter_exact".format(encoded_q.get('filter')) value = encoded_q.get('value') if ' ' in value: key = key.replace('_', '__') query_dict[key] = value if OR: sqs = sqs.filter_or(**query_dict) else: sqs = sqs.filter(**query_dict) except IndexError: sqs = sqs.filter(content=q) # Apply tastypie filters if any whatsoever sqs_objects = [sq.object for sq in sqs] filtered = self.apply_filters(request, applicable_filters={}) final_list = list(set(sqs_objects) & set(filtered)) ids = [fl.id for fl in final_list] final_list = lr.resourceInfoType_model.objects.filter(id__in=ids) if 'latest' in request.GET.get('sort', ''): final_list = self.apply_sorting(final_list, options={'sort': [u'latest']}) elif 'earliest' in request.GET.get('sort', ''): final_list = self.apply_sorting(final_list, options={'sort': [u'earliest']}) paginator = Paginator(request.GET, final_list, resource_uri='/api/v1/lr/search/') to_be_serialized = paginator.page() bundles = [ self.build_bundle(obj=result, request=request) for result in to_be_serialized['objects'] ] to_be_serialized['objects'] = [ self.full_dehydrate(bundle) for bundle in bundles ] to_be_serialized = self.alter_list_data_to_serialize( request, to_be_serialized) return self.create_response(request, to_be_serialized)
def test_complex_get(self): request = { 'slug__startswith': 'food', 'format': 'json', } paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertEqual(meta['previous'], '/api/v1/notes/?slug__startswith=food&offset=0&limit=2&format=json') self.assertEqual(meta['next'], '/api/v1/notes/?slug__startswith=food&offset=4&limit=2&format=json') self.assertEqual(meta['total_count'], 6)
def test_unicode_request(self): request = { 'slug__startswith': u'☃', 'format': 'json', } request = QueryDict('slug__startswith=☃&format=json') paginator = Paginator(request, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=2) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 2) self.assertEqual(meta['previous'], '/api/v1/notes/?slug__startswith=%E2%98%83&offset=0&limit=2&format=json') self.assertEqual(meta['next'], u'/api/v1/notes/?slug__startswith=%E2%98%83&offset=4&limit=2&format=json') self.assertEqual(meta['total_count'], 6)
def test_offset(self): paginator = Paginator({}, self.data_set, limit=20, offset=0) paginator.offset = '10' self.assertEqual(paginator.get_offset(), 10) paginator.offset = 0 self.assertEqual(paginator.get_offset(), 0) paginator.offset = 10 self.assertEqual(paginator.get_offset(), 10) paginator.offset = -10 self.assertRaises(BadRequest, paginator.get_offset) paginator.offset = 'hAI!' self.assertRaises(BadRequest, paginator.get_offset)
def test_page1(self): reset_queries() self.assertEqual(len(self._get_query_count()), 0) paginator = Paginator({}, self.data_set, resource_uri='/api/v1/notes/', limit=2, offset=0) # REGRESSION: Check to make sure only part of the cache is full. # We used to run ``len()`` on the ``QuerySet``, which would populate # the entire result set. Owwie. paginator.get_count() self.assertEqual(len(self._get_query_count()), 1) # Should be nothing in the cache. self.assertEqual(paginator.objects._result_cache, None) meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 2) self.assertEqual(meta['offset'], 0) self.assertEqual(meta['previous'], None) self.assertEqual(meta['next'], '/api/v1/notes/?limit=2&offset=2') self.assertEqual(meta['total_count'], 6)
def get_custom_list(self, request, queryset): ''' Generic function to paginate a queryset with a set of items per page. ''' self.method_check(request, allowed=['get']) self.throttle_check(request) # Do the query. try: offset = max(0, int(request.GET.get('offset', 0))) limit = max(1, min(int(request.GET.get('limit', 20)), 1000)) except: return HttpResponseBadRequest("Sorry, you did not provide valid input data") paginator = Paginator(request.GET, queryset) try: object_list = paginator.get_slice(limit, offset) except InvalidPage: raise Http404("Sorry, no results on that page.") objects = [] for result in object_list: bundle = self.build_bundle(obj=result, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) page = { "meta": { "limit": limit, "offset": offset, "total_count": queryset.count() }, 'objects': objects, } self.log_throttled_access(request) return self.create_response(request, page)
def prepare_result(self, request, sqs, limit): uri = reverse('api_ms_search', kwargs={ 'api_name': self.api_name, 'resource_name': self._meta.resource_name }) paginator = Paginator(request.GET, sqs, resource_uri=uri, limit=limit) objects = [] for result in paginator.page()['objects']: if result: try: bundle = self.build_bundle(obj=result.object, request=request) bundle = self.full_dehydrate(bundle) objects.append(bundle) except: pass return { 'meta': paginator.page()['meta'], 'objects': objects, }
def basic_search(self, request, results, method, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) paginator = Paginator( request.GET, results, resource_uri=self._build_reverse_url(method, kwargs=kwargs) ) objects = [] for result in paginator.page().get('objects'): bundle = self.build_bundle(obj=result, request=request) objects.append(self.full_dehydrate(bundle)) object_list = { 'objects': objects, 'meta': paginator.page().get('meta'), } self.log_throttled_access(request) return self.create_response(request, object_list)
def test_limit(self): paginator = Paginator({}, self.data_set, limit=20, offset=0) paginator.limit = '10' self.assertEqual(paginator.get_limit(), 10) paginator.limit = None self.assertEqual(paginator.get_limit(), 20) paginator.limit = 10 self.assertEqual(paginator.get_limit(), 10) paginator.limit = -10 raised = False try: paginator.get_limit() except BadRequest as e: raised = e self.assertTrue(raised) self.assertEqual( str(raised), "Invalid limit '-10' provided. Please provide a positive integer >= 0." ) paginator.limit = 'hAI!' raised = False try: paginator.get_limit() except BadRequest as e: raised = e self.assertTrue(raised) self.assertEqual( str(raised), "Invalid limit 'hAI!' provided. Please provide a positive integer." ) # Test the max_limit. paginator.limit = 1000 self.assertEqual(paginator.get_limit(), 1000) paginator.limit = 1001 self.assertEqual(paginator.get_limit(), 1000) paginator = Paginator({}, self.data_set, limit=20, offset=0, max_limit=10) self.assertEqual(paginator.get_limit(), 10)
def test_offset(self): paginator = Paginator({}, self.data_set, limit=20, offset=0) paginator.offset = '10' self.assertEqual(paginator.get_offset(), 10) paginator.offset = 0 self.assertEqual(paginator.get_offset(), 0) paginator.offset = 10 self.assertEqual(paginator.get_offset(), 10) paginator.offset = -10 raised = False try: paginator.get_offset() except BadRequest as e: raised = e self.assertTrue(raised) self.assertEqual( str(raised), "Invalid offset '-10' provided. Please provide a positive integer >= 0." ) paginator.offset = 'hAI!' raised = False try: paginator.get_offset() except BadRequest as e: raised = e self.assertTrue(raised) self.assertEqual( str(raised), "Invalid offset 'hAI!' provided. Please provide an integer.")
def test_regression_nonqueryset(self): paginator = Paginator({}, ['foo', 'bar', 'baz'], limit=2, offset=0) # This would fail due to ``count`` being present on ``list`` but called # differently. page = paginator.page() self.assertEqual(page['objects'], ['foo', 'bar'])
def test_max_limit_none(self): paginator = Paginator({'limit': 0}, self.data_set, max_limit=None, resource_uri='/api/v1/notes/') meta = paginator.page()['meta'] self.assertEqual(meta['limit'], 0)
self.assertTrue(raised) self.assertEqual( str(raised), "Invalid limit 'hAI!' provided. Please provide a positive integer." ) # Test the max_limit. paginator.limit = 1000 self.assertEqual(paginator.get_limit(), 1000) paginator.limit = 1001 self.assertEqual(paginator.get_limit(), 1000) paginator = Paginator({}, self.data_set, limit=20, offset=0, max_limit=10) self.assertEqual(paginator.get_limit(), 10) def test_offset(self): paginator = Paginator({}, self.data_set, limit=20, offset=0) paginator.offset = '10' self.assertEqual(paginator.get_offset(), 10) paginator.offset = 0 self.assertEqual(paginator.get_offset(), 0) paginator.offset = 10 self.assertEqual(paginator.get_offset(), 10)
def test_offset(self): paginator = Paginator({}, self.data_set, limit=20, offset=0) paginator.offset = '10' self.assertEqual(paginator.get_offset(), 10) paginator.offset = 0 self.assertEqual(paginator.get_offset(), 0) paginator.offset = 10 self.assertEqual(paginator.get_offset(), 10) paginator.offset = -10 raised = False try: paginator.get_offset() except BadRequest, e: raised = e
def get_search(self, request, **kwargs): self.method_check(request, allowed=['get']) self.is_authenticated(request) self.throttle_check(request) ##################################################################### # get query for multiple time used key, like: # ?q=author::peter&q=fulltext::blabla&tags=foo,bar,etc ##################################################################### results = SearchQuerySet().models(Entry).all() query = None for x in request.GET.lists(): if x[0] == 'q': query = x[1] order_by = request.GET.get('order_by', 'date') date_range = request.GET.get('date__range', None) tags = request.GET.get('tags', []) cat = request.GET.get('category', None) if cat: if cat == 'tone_systems': # for some f*****g reason, i must exclude and cannot use filter for 'TO' category.. :S results = SearchQuerySet().models(Entry).exclude(category='CO') elif cat == 'colour_systems': category = 'CO' results = SearchQuerySet().models(Entry).filter(category=category) else: results = EmptySearchQuerySet() match = request.GET.get('match', 'OR') operator = SQ.OR if (match == 'OR') else SQ.AND search_items = [] if query: ''' search_items = [ { 'scope': 'author', 'term': 'Zarlino' }, { 'scope': 'fulltext', 'term': 'blabla' }, ] ''' for item in query: search_item = item.split('::') search_items.append({'scope': search_item[0], 'term': search_item[1]}) print (search_items) # filter search masks sq = SQ() for item in search_items: kwargs = { # ie: author=AutoQuery item['scope']: get_query_class_for_item(item), } sq.add(SQ(**kwargs), operator) results = results.filter(sq) if not results: results = EmptySearchQuerySet() selected_tags = [] if tags: selected_tags = [t.strip() for t in tags.split(',')] for tag in selected_tags: results = results.filter(SQ(tags=tag)) # if we filter tags OR have a search query, get the possible tags, otherwise return all tags if tags or query: possible_tags = [] for r in results.all(): possible_tags += [t.pk for t in r.object.tags.all()] possible_tags = set(possible_tags) # convert to set to remove duplicates new_tags = Keyword.objects.filter(pk__in=possible_tags).order_by('name') else: new_tags = Keyword.objects.all() tag_objects = [] # what we will return for t in new_tags: tag_objects.append( {"name": t.name, "slug": t.slug, "selected": True if t.slug in selected_tags else False}) # make sure we return at least the selected tag, if no results were found if len(tag_objects) == 0: user_tags = Keyword.objects.filter(slug__in=selected_tags).order_by('name') for t in user_tags: tag_objects.append( {"name": t.name, "slug": t.slug, "selected": True}) if date_range: start = date_range.split(',')[0] end = date_range.split(',')[1] results = results.filter(date__range=(start, end)) # apply ordering results = results.order_by(order_by) # paginate paginator = Paginator(request.GET, results, resource_uri='/api/' + settings.API_VERSION + '/entry/search/') bundles = [] for result in paginator.page()['objects']: bundle = self.build_bundle(obj=result.object, request=request) bundles.append(self.full_dehydrate(bundle)) object_list = { 'meta': paginator.page()['meta'], 'objects': bundles } # object_list['meta']['search_scope'] = SEARCH_SCOPES object_list['meta']['search_query'] = search_items object_list['meta']['tags'] = tag_objects object_list['meta']['order_by'] = order_by object_list['meta']['match'] = match self.log_throttled_access(request) return self.create_response(request, object_list)