def check_indexes(self, params, page_num, indexes): """ Helper method that instantiates a Paginator object from the passed params and then checks that the start and end indexes of the passed page_num match those given as a 2-tuple in indexes. """ settings = get_settings() object_list = params[0] settings['DEFAULT_PAGINATION'] = params[1] settings['DEFAULT_ORPHANS'] = params[2] # allow_empty_first_page does not exist in this version paginator = Paginator('mypaginator', object_list, settings) if page_num == 'first': page_num = 1 elif page_num == 'last': page_num = paginator.num_pages page = paginator.page(page_num) start, end = indexes msg = ("For %s of page %s, expected %s but got %s." " Paginator parameters were: %s") self.assertEqual(start, page.start_index(), msg % ('start index', page_num, start, page.start_index(), params)) self.assertEqual(end, page.end_index(), msg % ('end index', page_num, end, page.end_index(), params))
def test_has_other_pages(self): # return self.has_previous() or self.has_next() settings = get_settings() settings['DEFAULT_PAGINATION'] = 5 settings['DEFAULT_ORPHANS'] = 1 five = 'abcde' paginator = Paginator('mypaginator', five, settings) page1 = paginator.page(1) self.assertFalse(page1.has_other_pages()) settings['DEFAULT_PAGINATION'] = 3 paginator = Paginator('mypaginator', five, settings) page1 = paginator.page(1) self.assertTrue(page1.has_other_pages())
def test_invalid_page_number(self): """ Tests that invalid page numbers are handeled consistently. """ settings = get_settings() settings['DEFAULT_PAGINATION'] = 2 paginator = Paginator('mypaginator', [1, 2, 3], settings) p = paginator.page(3) self.assertEqual(p.object_list, []) paginator = Paginator('mypaginator', [], settings) p = paginator.page(3) self.assertEqual(p.object_list, [])
def test_get_page_hook(self): """ Tests that a Paginator subclass can use the ``_get_page`` hook to return an alternative to the standard Page class. """ settings = get_settings() settings['DEFAULT_PAGINATION'] = 5 settings['DEFAULT_ORPHANS'] = 1 eleven = 'abcdefghijk' paginator = Paginator('mypaginator', eleven, settings) page1 = paginator.page(1) page2 = paginator.page(2) self.assertFalse(page1.has_previous()) self.assertEqual(page1.next_page_number(), 2) self.assertEqual(page2.previous_page_number(), 1) self.assertFalse(page2.has_next())
def test_save_as_preservation(self): settings = get_settings() # fix up pagination rules from buccaneer.paginator import PaginationRule pagination_rules = [ PaginationRule(*r) for r in settings.get( 'PAGINATION_PATTERNS', DEFAULT_CONFIG['PAGINATION_PATTERNS'], ) ] settings['PAGINATION_PATTERNS'] = sorted( pagination_rules, key=lambda r: r[0], ) object_list = [Article(**self.page_kwargs), Article(**self.page_kwargs)] paginator = Paginator('foobar.foo', object_list, settings) page = paginator.page(1) self.assertEqual(page.save_as, 'foobar.foo')
def generate_output(self, writer=None): if writer == None: writer = self.context['writer'] path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format)) pages = self.context['pages'] + self.context['articles'] # + [ c for (c, a) in self.context['categories']] \ # + [ t for (t, a) in self.context['tags']] \ # + [ a for (a, b) in self.context['authors']] #self.set_url_wrappers_modification_date(self.context['categories']) self.set_url_wrappers_modification_date(self.context['tags']) self.set_url_wrappers_modification_date(self.context['authors']) if not self.multisite: for article in self.context['articles']: pages += article.translations logger.info('writing {0}'.format(path)) with open(path, 'w', encoding='utf-8') as fd: if self.format == 'xml': fd.write(XML_HEADER) else: fd.write(TXT_HEADER.format(self.siteurl)) PageInfo = collections.namedtuple('PageInfo', ['status', 'date', 'url', 'save_as']) # not decided if this is right for buccaneer - most likely not! # for standard_page_url in ['index.html', # 'archives.html', # 'tags.html', # 'categories.html']: # info = PageInfo(status='published', # date=self.now, # url=standard_page_url, # save_as=standard_page_url) # self.write_url(info, fd) # write category sitemap (TODO same is necessary for authors!) for category, articles in self.context['categories']: if category.save_as: # we have a paginator implementation so lets use it! pag = Paginator(category.save_as, articles, category.settings) for page_num in range(pag.num_pages): page = pag.page(page_num + 1) #logger.debug('category page: ' + page.save_as) category_page = PageInfo(status='published', date=self.get_date_modified(category, self.now), url=page.url, save_as=page.save_as) self.write_url(category_page, fd) # write tag sitemap for tag, articles in self.context['tags']: if tag.save_as: pag = Paginator(tag.save_as, articles, tag.settings) for page_num in range(pag.num_pages): page = pag.page(page_num + 1) #logger.debug('tag page: ' + page.save_as) tag_page = PageInfo(status='published', date=self.get_date_modified(tag, self.now), url=page.url, save_as=page.save_as) self.write_url(tag_page, fd) # write index sitemap if self.index_page: pag = Paginator('index.html', self.context['articles'], self.context) for page_num in range(pag.num_pages): page = pag.page(page_num + 1) #logger.debug('index page: ' + page.save_as) index_page = PageInfo(status='published', date=self.get_date_modified(self.index_page, self.now), url=page.url, save_as=page.save_as) self.write_url(index_page, fd) for page in pages: self.write_url(page, fd) if self.format == 'xml': fd.write(XML_FOOTER)