def test_reverse_order(self): paginator = CursorPaginator(Post.objects.all(), ('-created', '-name')) previous_page = paginator.page(first=2) self.assertSequenceEqual(previous_page, [self.items[3], self.items[2]]) cursor = paginator.cursor(previous_page[-1]) page = paginator.page(first=2, after=cursor) self.assertSequenceEqual(page, [self.items[1], self.items[0]])
def test_with_items(self): for i in range(20): Post.objects.create(name='Name %s' % i) paginator = CursorPaginator(Post.objects.all(), ('id', )) page = paginator.page() self.assertEqual(len(page), 20) self.assertFalse(page.has_next) self.assertFalse(page.has_previous)
def chunked_queryset_iterator(queryset, size, *, ordering=('id', )): pager = CursorPaginator(queryset, ordering) after = None while True: page = pager.page(after=after, first=size) if page: yield from page.items else: return if not page.has_next: break after = pager.cursor(instance=page[-1])
def setUpTestData(cls): now = timezone.now() cls.items = [] for i in range(20): post = Post.objects.create(name='Name %s' % i, created=now - datetime.timedelta(hours=i)) cls.items.append(post) cls.paginator = CursorPaginator(Post.objects.all(), ('-created',))
def setUpTestData(cls): cls.items = [] author_1 = Author.objects.create(name='Ana') author_2 = Author.objects.create(name='Bob') for i in range(20): post = Post.objects.create(name='Name %02d' % i, author=author_1 if i % 2 else author_2) cls.items.append(post) cls.paginator = CursorPaginator(Post.objects.all().values('author__name', 'name'), ('author__name', 'name'))
def show_settings_menu(request): """View: List settings menus.""" from cursor_pagination import CursorPaginator users = User.objects.all().annotate(apitoken=F('auth_token')) settings = Setting.objects.all().order_by("key") events_list = Event.objects.all() nb_rows = int(request.GET.get('n', 16)) events_paginator = CursorPaginator(events_list, ordering=['-id']) page_events = request.GET.get('p_events', 1) if type(page_events) == 'unicode' and not page_events.isnumeric(): page_events = 1 else: page_events = int(page_events) if page_events > 1: before = base64.b64encode(str( (page_events - 1) * nb_rows).encode()).decode("UTF-8") else: before = base64.b64encode(b"0").decode("UTF-8") events = events_paginator.page(first=nb_rows, before=before) has_previous = before is not None and base64.b64decode(before) > b"0" previous_decoded_cursor = "1" if before is not None and base64.b64decode(before) > b"0": previous_decoded_cursor = page_events - 1 next_decoded_cursor = "1" if events.has_next: next_decoded_cursor = page_events + 1 return render( request, 'menu-settings.html', { 'users': users, 'settings': settings, 'events': events, 'events_page_info': { 'end_cursor': events_list.count() // nb_rows, 'has_previous': has_previous, 'has_next': events.has_next, 'next_page_number': next_decoded_cursor, 'previous_page_number': previous_decoded_cursor } })
def search(user_id, search_term, before=None, after=None): """Offload the expensive part of search to avoid blocking the web interface""" if not search_term: return { "results": [], "has_next": False } if before and after: raise ValueError("You can't do this.") email_subquery = models.Email.objects.viewable(user_id) inbox_subquery = models.Inbox.objects.viewable(user_id) search_qs = watson_search.search(search_term, models=(email_subquery, inbox_subquery)) page_kwargs = { "after": after, "before": before, } if before: page_kwargs["last"] = SEARCH_PAGE_SIZE else: page_kwargs["first"] = SEARCH_PAGE_SIZE paginator = CursorPaginator(search_qs, ordering=('-watson_rank', '-id')) page = paginator.page(**page_kwargs) results = { "results": [p.id for p in page], "has_next": page.has_next, "has_previous": page.has_previous, } if len(results["results"]) > 0: results["last"] = paginator.cursor(page[-1]) results["first"] = paginator.cursor(page[0]) key = create_search_cache_key(user_id, search_term, before, after) cache.set(key, results, SEARCH_TIMEOUT) return results
def _chunked_queryset_iterator(queryset, size, *, ordering=('id', )): """ Split a queryset into chunks. This can be used instead of `queryset.iterator()`, so `.prefetch_related()` also works Note:: The ordering must uniquely identify the object, and be in the same order (ASC/DESC). See https://github.com/photocrowd/django-cursor-pagination """ pager = CursorPaginator(queryset, ordering) after = None while True: page = pager.page(after=after, first=size) if page: yield from page.items else: return if not page.has_next: break # take last item, next page starts after this. after = pager.cursor(instance=page[-1])
def connection_from_cursor_paginated(queryset, connection_type, edge_type, pageinfo_type, page_query=None): """Create a Connection object from a queryset, using CursorPaginator""" paginator = CursorPaginator(queryset, queryset.query.order_by) if page_query is None: page_query = PageQuery() page = paginator.page(**attr.asdict(page_query)) edges = [] for item in list(page): edge = edge_type(node=item, cursor=paginator.cursor(item)) edges.append(edge) if page: page_info = pageinfo_type( start_cursor=paginator.cursor(edges[0].node), end_cursor=paginator.cursor(edges[-1].node), has_previous_page=page.has_previous, has_next_page=page.has_next, ) else: page_info = pageinfo_type( start_cursor=None, end_cursor=None, has_previous_page=False, has_next_page=False, ) return connection_type( edges=edges, page_info=page_info, )
def search(user_id, search_term, before=None, after=None): """Offload the expensive part of search to avoid blocking the web interface""" if not search_term: return {"results": [], "has_next": False} if before and after: raise ValueError("You can't do this.") email_subquery = models.Email.objects.viewable(user_id) inbox_subquery = models.Inbox.objects.viewable(user_id) search_qs = watson_search.search(search_term, models=(email_subquery, inbox_subquery)) page_kwargs = { "after": after, "before": before, } if before: page_kwargs["last"] = SEARCH_PAGE_SIZE else: page_kwargs["first"] = SEARCH_PAGE_SIZE paginator = CursorPaginator(search_qs, ordering=('-watson_rank', '-id')) page = paginator.page(**page_kwargs) results = { "results": [p.id for p in page], "has_next": page.has_next, "has_previous": page.has_previous, } if len(results["results"]) > 0: results["last"] = paginator.cursor(page[-1]) results["first"] = paginator.cursor(page[0]) key = create_search_cache_key(user_id, search_term, before, after) cache.set(key, results, SEARCH_TIMEOUT) return results
def ApiListView(request): params = request.GET queryset = Video.objects.all().order_by('-published_at') page_size = 10 paginator = CursorPaginator(queryset, ordering=['-id']) if 'before' in params: before = params['before'] page = paginator.page(first=page_size, before=before) elif 'after' in params: after = params['after'] page = paginator.page(first=page_size, after=after) else: page = paginator.page(first=page_size, after=None) data = { 'objects': [VideoSerializer(p).data for p in page], 'has_next_page': page.has_next, 'has_previous_page': page.has_previous, 'last_cursor': paginator.cursor(page[-1]), 'first_cursor': paginator.cursor(page[0]) } return JsonResponse(data)
def test_mixed_order(self): with self.assertRaises(InvalidCursor): CursorPaginator(Post.objects.all(), ('created', '-name'))
def test_empty(self): paginator = CursorPaginator(Post.objects.all(), ('id', )) page = paginator.page() self.assertEqual(len(page), 0) self.assertFalse(page.has_next) self.assertFalse(page.has_previous)