def test_removing_page_unpublishes_all_sub_pages(self): root_page = Page.objects.get(url_path="/") top = Page(title="1p", has_unpublished_changes=False, live=True) root_page.add_child(instance=top) sub_page = Page(title="1p 1u", has_unpublished_changes=True, live=False) top.add_child(instance=sub_page) sub_page = Page(title="1p 2p", has_unpublished_changes=False, live=True) top.add_child(instance=sub_page) sub_page_id = sub_page.id sub_sub_page = Page(title="1p 2p 3u", has_unpublished_changes=True, live=False) sub_page.add_child(instance=sub_sub_page) self.assertEqual(top.get_descendants(inclusive=True).live().count(), 2) self.assertEqual( top.get_descendants(inclusive=True).not_live().count(), 2) with self.register_hook("before_delete_page", recycle_delete): delete_url = reverse("wagtailadmin_pages:delete", args=(top.id, )) self.client.post(delete_url) top.refresh_from_db() self.assertEqual(top.get_descendants(inclusive=True).live().count(), 0) self.assertEqual( top.get_descendants(inclusive=True).not_live().count(), 4)
def create_redirects(page: Page, page_old: Page, sites: Iterable[Site]) -> None: url_path_length = len(page.url_path) sites = tuple(sites) if not sites: return None logger.info(f"Creating redirects for page: '{page}' id={page.id}") # For bulk-creating redirects in batches batch = BatchRedirectCreator(max_size=2000, ignore_conflicts=True) # Treat the page that was updated / moved separately to it's decendants, # because there may be changes to fields other than `slug` or `url_path` # that impact the URL. old_urls = _page_urls_for_sites(page_old, sites, cache_target=page) new_urls = _page_urls_for_sites(page, sites, cache_target=page) # Add redirects for urls that have changed changed_urls = old_urls - new_urls for site, old_path, route_path in changed_urls: batch.add( old_path=old_path, site=site, redirect_page=page, redirect_page_route_path=route_path, automatically_created=True, ) # Now, repeat the process for each descendant page. # Only the `url_path` value of descendants should have been affected by the # change, so we can use in-memory manipulation of `url_path` to figure out what # the old URLS were for descendant in (page.get_descendants().live().specific( defer=True).iterator()): new_urls = _page_urls_for_sites(descendant, sites, cache_target=page) # Restore old 'url_path' value on in-memory instance descendant.url_path = page_old.url_path + descendant.url_path[ url_path_length:] old_urls = _page_urls_for_sites(descendant, sites, cache_target=page) # Add redirects for urls that have changed changed_urls = old_urls - new_urls for site, old_path, route_path in changed_urls: batch.add( old_path=old_path, site=site, redirect_page=descendant, redirect_page_route_path=route_path, automatically_created=True, ) # Process the final batch batch.process() logger.info(batch.get_summary())
def test_generate_page_data(self): root_page = Page.objects.get(url_path="/") top = Page(title="1p", has_unpublished_changes=False, live=True) root_page.add_child(instance=top) sub_page = Page(title="1p 1u", has_unpublished_changes=True, live=False) top.add_child(instance=sub_page) sub_page = Page(title="1p 2p", has_unpublished_changes=False, live=True) top.add_child(instance=sub_page) sub_page_id = sub_page.id sub_sub_page = Page(title="1p 2p 3u", has_unpublished_changes=True, live=False) sub_page.add_child(instance=sub_sub_page) self.assertEqual(top.get_descendants().live().count(), 1) self.assertEqual(top.get_descendants().not_live().count(), 2) self.assertEqual( json.loads(generate_page_data(top)), {"published": [top.id, sub_page_id]} )
def test_restoring_page_re_publishes(self): from wagtail_recycle_bin.wagtail_hooks import urlconf_time root_page = Page.objects.get(url_path="/") top = Page(title="1p", has_unpublished_changes=False, live=True) root_page.add_child(instance=top) sub_page = Page(title="1p 1u", has_unpublished_changes=True, live=False) top.add_child(instance=sub_page) sub_page = Page(title="1p 2p", has_unpublished_changes=False, live=True) top.add_child(instance=sub_page) sub_page_id = sub_page.id sub_sub_page = Page(title="1p 2p 3u", has_unpublished_changes=True, live=False) sub_page.add_child(instance=sub_sub_page) self.assertEqual(top.get_descendants(inclusive=True).live().count(), 2) self.assertEqual( top.get_descendants(inclusive=True).not_live().count(), 2) with self.register_hook("before_delete_page", recycle_delete): delete_url = reverse("wagtailadmin_pages:delete", args=(top.id, )) self.client.post(delete_url) top.refresh_from_db() self.assertEqual(top.get_descendants(inclusive=True).live().count(), 0) self.assertEqual( top.get_descendants(inclusive=True).not_live().count(), 4) with self.register_hook("register_admin_urls", urlconf_time): restore_url = reverse("wagtail_recycle_bin_restore", args=(top.id, )) self.client.get(restore_url) top.refresh_from_db() self.assertEqual(top.get_descendants(inclusive=True).live().count(), 2) self.assertEqual( top.get_descendants(inclusive=True).not_live().count(), 2) self.assertEqual(RecycleBin.objects.count(), 0) self.assertEqual(RecycleBinPage.objects.first().get_children().count(), 0)