def test_page_copy_alias_post(self): post_data = { 'new_title': "Hello world 2", 'new_slug': 'hello-world-2', 'new_parent_page': str(self.root_page.id), 'copy_subpages': False, 'publish_copies': False, 'alias': True, } response = self.client.post(reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data) # Check that the user was redirected to the parents explore page self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) # Get copy page_copy = self.root_page.get_children().get(slug='hello-world-2') # Check the copy is an alias of the original self.assertEqual(page_copy.alias_of, self.test_page.page_ptr) # Check that the copy is live # Note: publish_copies is ignored. Alias pages always keep the same state as their original self.assertTrue(page_copy.live) self.assertFalse(page_copy.has_unpublished_changes) # Check that the owner of the page is set correctly self.assertEqual(page_copy.owner, self.user) # Check that the children were not copied self.assertEqual(page_copy.get_children().count(), 0) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post_new_parent(self): post_data = { "new_title": "Hello world 2", "new_slug": "hello-world-2", "new_parent_page": str(self.test_child_page.id), "copy_subpages": False, "publish_copies": False, "alias": False, } response = self.client.post( reverse("wagtailadmin_pages:copy", args=(self.test_page.id, )), post_data) # Check that the user was redirected to the new parents explore page self.assertRedirects( response, reverse("wagtailadmin_explore", args=(self.test_child_page.id, ))) # Check that the page was copied to the correct place self.assertTrue( Page.objects.filter(slug="hello-world-2").first().get_parent(), self.test_child_page, ) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems")
def test_page_delete_notlive_post(self): # Same as above, but this makes sure the page_unpublished signal is not fired # when if the page is not live when it is deleted # Unpublish the page self.child_page.live = False self.child_page.save() # Connect a mock signal handler to page_unpublished signal mock_handler = mock.MagicMock() page_unpublished.connect(mock_handler) # Post response = self.client.post( reverse('wagtailadmin_pages:delete', args=(self.child_page.id, ))) # Should be redirected to explorer page self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems') # Check that the page is gone self.assertEqual( Page.objects.filter(path__startswith=self.root_page.path, slug='hello-world').count(), 0) # Check that the page_unpublished signal was not fired self.assertEqual(mock_handler.call_count, 0)
def test_create_simplepage_post(self): post_data = { 'title': "New page!", 'content': "Some content", 'slug': 'hello-world', } response = self.client.post( reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data) # Find the page and check it page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific # Should be redirected to edit page self.assertRedirects( response, reverse('wagtailadmin_pages:edit', args=(page.id, ))) self.assertEqual(page.title, post_data['title']) self.assertEqual(page.draft_title, post_data['title']) self.assertIsInstance(page, SimplePage) self.assertFalse(page.live) self.assertFalse(page.first_published_at) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_delete_post(self): # Connect a mock signal handler to page_unpublished signal mock_handler = mock.MagicMock() page_unpublished.connect(mock_handler) # Post response = self.client.post( reverse('wagtailadmin_pages:delete', args=(self.child_page.id, ))) # Should be redirected to explorer page self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems') # Check that the page is gone self.assertEqual( Page.objects.filter(path__startswith=self.root_page.path, slug='hello-world').count(), 0) # Check that the page_unpublished signal was fired self.assertEqual(mock_handler.call_count, 1) mock_call = mock_handler.mock_calls[0][2] self.assertEqual(mock_call['sender'], self.child_page.specific_class) self.assertEqual(mock_call['instance'], self.child_page) self.assertIsInstance(mock_call['instance'], self.child_page.specific_class)
def test_page_copy_post_new_parent(self): post_data = { 'new_title': "Hello world 2", 'new_slug': 'hello-world-2', 'new_parent_page': str(self.test_child_page.id), 'copy_subpages': False, 'publish_copies': False, } response = self.client.post( reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data) # Check that the user was redirected to the new parents explore page self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.test_child_page.id, ))) # Check that the page was copied to the correct place self.assertTrue( Page.objects.filter(slug='hello-world-2').first().get_parent(), self.test_child_page) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_bulk_delete_notlive_post(self): # Same as above, but this makes sure the page_unpublished signal is not fired # for the page that is not live when it is deleted # Unpublish the first child page page_to_be_unpublished = self.pages_to_be_deleted[0] page_to_be_unpublished.unpublish(user=self.user) # Connect a mock signal handler to page_unpublished signal mock_handler = mock.MagicMock() page_unpublished.connect(mock_handler) # Post response = self.client.post(self.url) # Should be redirected to explorer page self.assertEqual(response.status_code, 302) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems") # Check that the child pages to be deleted are gone for child_page in self.pages_to_be_deleted: self.assertFalse( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the child pages not to be deleted remain for child_page in self.pages_not_to_be_deleted: self.assertTrue( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the page_unpublished signal was not fired num_descendants = sum( len(v) for v in self.grandchildren_pages.values()) self.assertEqual(mock_handler.call_count, len(self.pages_to_be_deleted) + num_descendants - 1) # check that only signals for other pages are fired i = 0 for child_page in self.pages_to_be_deleted: if child_page.id != page_to_be_unpublished.id: mock_call = mock_handler.mock_calls[i][2] i += 1 self.assertEqual(mock_call["sender"], child_page.specific_class) self.assertEqual(mock_call["instance"], child_page) self.assertIsInstance(mock_call["instance"], child_page.specific_class) for grandchildren_page in self.grandchildren_pages.get( child_page, []): mock_call = mock_handler.mock_calls[i][2] i += 1 self.assertEqual(mock_call["sender"], grandchildren_page.specific_class) self.assertEqual(mock_call["instance"], grandchildren_page) self.assertIsInstance(mock_call["instance"], grandchildren_page.specific_class)
def test_subpage_deletion(self): # Connect mock signal handlers to page_unpublished, pre_delete and post_delete signals unpublish_signals_received = [] pre_delete_signals_received = [] post_delete_signals_received = [] def page_unpublished_handler(sender, instance, **kwargs): unpublish_signals_received.append((sender, instance.id)) def pre_delete_handler(sender, instance, **kwargs): pre_delete_signals_received.append((sender, instance.id)) def post_delete_handler(sender, instance, **kwargs): post_delete_signals_received.append((sender, instance.id)) page_unpublished.connect(page_unpublished_handler) pre_delete.connect(pre_delete_handler) post_delete.connect(post_delete_handler) # Post response = self.client.post( reverse('wagtailadmin_pages:delete', args=(self.child_index.id, ))) # Should be redirected to explorer page self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems') # Check that the page is gone self.assertFalse( StandardIndex.objects.filter(id=self.child_index.id).exists()) self.assertFalse(Page.objects.filter(id=self.child_index.id).exists()) # Check that the subpage is also gone self.assertFalse( StandardChild.objects.filter(id=self.grandchild_page.id).exists()) self.assertFalse( Page.objects.filter(id=self.grandchild_page.id).exists()) # Check that the signals were fired for both pages self.assertIn((StandardIndex, self.child_index.id), unpublish_signals_received) self.assertIn((StandardChild, self.grandchild_page.id), unpublish_signals_received) self.assertIn((StandardIndex, self.child_index.id), pre_delete_signals_received) self.assertIn((StandardChild, self.grandchild_page.id), pre_delete_signals_received) self.assertIn((StandardIndex, self.child_index.id), post_delete_signals_received) self.assertIn((StandardChild, self.grandchild_page.id), post_delete_signals_received)
def test_page_copy_no_publish_permission_post_copy_subpages_publish_copies( self): # This tests that unprivileged users cannot publish copied pages even if they hack their browser # Turn user into an editor who can add pages but not publish them self.user.is_superuser = False self.user.groups.add(Group.objects.get(name="Editors"), ) self.user.save() # Post post_data = { "new_title": "Hello world 2", "new_slug": "hello-world-2", "new_parent_page": str(self.root_page.id), "copy_subpages": True, "publish_copies": True, "alias": False, } response = self.client.post( reverse("wagtailadmin_pages:copy", args=(self.test_page.id, )), post_data) # Check that the user was redirected to the parents explore page self.assertRedirects( response, reverse("wagtailadmin_explore", args=(self.root_page.id, ))) # Get copy page_copy = self.root_page.get_children().filter( slug="hello-world-2").first() # Check that the copy exists self.assertIsNotNone(page_copy) # Check that the copy is not live self.assertFalse(page_copy.live) # Check that the owner of the page is set correctly self.assertEqual(page_copy.owner, self.user) # Check that the children were copied self.assertEqual(page_copy.get_children().count(), 2) # Check the the child pages # Neither of them should be live child_copy = page_copy.get_children().filter(slug="child-page").first() self.assertIsNotNone(child_copy) self.assertFalse(child_copy.live) unpublished_child_copy = (page_copy.get_children().filter( slug="unpublished-child-page").first()) self.assertIsNotNone(unpublished_child_copy) self.assertFalse(unpublished_child_copy.live) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems")
def test_page_copy_alias_post_copy_subpages(self): post_data = { "new_title": "Hello world 2", "new_slug": "hello-world-2", "new_parent_page": str(self.root_page.id), "copy_subpages": True, "publish_copies": False, "alias": True, } response = self.client.post( reverse("wagtailadmin_pages:copy", args=(self.test_page.id, )), post_data) # Check that the user was redirected to the parents explore page self.assertRedirects( response, reverse("wagtailadmin_explore", args=(self.root_page.id, ))) # Get copy page_copy = self.root_page.get_children().get(slug="hello-world-2") # Check the copy is an alias of the original self.assertEqual(page_copy.alias_of, self.test_page.page_ptr) # Check that the copy is live # Note: publish_copies is ignored. Alias pages always keep the same state as their original self.assertTrue(page_copy.live) self.assertFalse(page_copy.has_unpublished_changes) # Check that the owner of the page is set correctly self.assertEqual(page_copy.owner, self.user) # Check that the children were copied self.assertEqual(page_copy.get_children().count(), 2) # Check the the child pages # Neither of them should be live child_copy = page_copy.get_children().filter(slug="child-page").first() self.assertIsNotNone(child_copy) self.assertEqual(child_copy.alias_of, self.test_child_page.page_ptr) self.assertTrue(child_copy.live) self.assertFalse(child_copy.has_unpublished_changes) unpublished_child_copy = (page_copy.get_children().filter( slug="unpublished-child-page").first()) self.assertIsNotNone(unpublished_child_copy) self.assertEqual(unpublished_child_copy.alias_of, self.test_unpublished_child_page.page_ptr) self.assertFalse(unpublished_child_copy.live) self.assertTrue(unpublished_child_copy.has_unpublished_changes) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems")
def test_page_copy_post_copy_subpages_publish_copies(self): post_data = { 'new_title': "Hello world 2", 'new_slug': 'hello-world-2', 'new_parent_page': str(self.root_page.id), 'copy_subpages': True, 'publish_copies': True, 'alias': False, } response = self.client.post( reverse('wagtailadmin_pages:copy', args=(self.test_page.id, )), post_data) # Check that the user was redirected to the parents explore page self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) # Get copy page_copy = self.root_page.get_children().filter( slug='hello-world-2').first() # Check that the copy exists self.assertNotEqual(page_copy, None) # Check that the copy is live self.assertTrue(page_copy.live) self.assertFalse(page_copy.has_unpublished_changes) # Check that the owner of the page is set correctly self.assertEqual(page_copy.owner, self.user) # Check that the children were copied self.assertEqual(page_copy.get_children().count(), 2) # Check the the child pages # The child_copy should be live but the unpublished_child_copy shouldn't child_copy = page_copy.get_children().filter(slug='child-page').first() self.assertNotEqual(child_copy, None) self.assertTrue(child_copy.live) self.assertTrue(child_copy.has_unpublished_changes) unpublished_child_copy = page_copy.get_children().filter( slug='unpublished-child-page').first() self.assertNotEqual(unpublished_child_copy, None) self.assertFalse(unpublished_child_copy.live) self.assertTrue(unpublished_child_copy.has_unpublished_changes) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_bulk_delete_post(self): # Connect a mock signal handler to page_unpublished signal mock_handler = mock.MagicMock() page_unpublished.connect(mock_handler) # Post response = self.client.post(self.url) # Should be redirected to explorer page self.assertEqual(response.status_code, 302) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems") # Check that the child pages to be deleted are gone for child_page in self.pages_to_be_deleted: self.assertFalse( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the child pages not to be deleted remain for child_page in self.pages_not_to_be_deleted: self.assertTrue( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the page_unpublished signal was fired for all pages num_descendants = sum( len(i) for i in self.grandchildren_pages.values()) self.assertEqual(mock_handler.call_count, len(self.pages_to_be_deleted) + num_descendants) i = 0 for child_page in self.pages_to_be_deleted: mock_call = mock_handler.mock_calls[i][2] i += 1 self.assertEqual(mock_call["sender"], child_page.specific_class) self.assertEqual(mock_call["instance"], child_page) self.assertIsInstance(mock_call["instance"], child_page.specific_class) for grandchildren_page in self.grandchildren_pages.get( child_page, []): mock_call = mock_handler.mock_calls[i][2] i += 1 self.assertEqual(mock_call["sender"], grandchildren_page.specific_class) self.assertEqual(mock_call["instance"], grandchildren_page) self.assertIsInstance(mock_call["instance"], grandchildren_page.specific_class)
def test_create_simplepage_post_publish(self): # Connect a mock signal handler to page_published signal mock_handler = mock.MagicMock() page_published.connect(mock_handler) # Post post_data = { 'title': "New page!", 'content': "Some content", 'slug': 'hello-world', 'action-publish': "Publish", } response = self.client.post( reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.root_page.id)), post_data) # Find the page and check it page = Page.objects.get(path__startswith=self.root_page.path, slug='hello-world').specific # Should be redirected to explorer self.assertRedirects( response, reverse('wagtailadmin_explore', args=(self.root_page.id, ))) self.assertEqual(page.title, post_data['title']) self.assertEqual(page.draft_title, post_data['title']) self.assertIsInstance(page, SimplePage) self.assertTrue(page.live) self.assertTrue(page.first_published_at) # Check that the page_published signal was fired self.assertEqual(mock_handler.call_count, 1) mock_call = mock_handler.mock_calls[0][2] self.assertEqual(mock_call['sender'], page.specific_class) self.assertEqual(mock_call['instance'], page) self.assertIsInstance(mock_call['instance'], page.specific_class) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), 'treebeard found consistency problems')
def test_page_copy_post(self): post_data = { "new_title": "Hello world 2", "new_slug": "hello-world-2", "new_parent_page": str(self.root_page.id), "copy_subpages": False, "publish_copies": False, "alias": False, } response = self.client.post( reverse("wagtailadmin_pages:copy", args=(self.test_page.id, )), post_data) # Check that the user was redirected to the parents explore page self.assertRedirects( response, reverse("wagtailadmin_explore", args=(self.root_page.id, ))) # Get copy page_copy = self.root_page.get_children().filter( slug="hello-world-2").first() # Check that the copy exists self.assertIsNotNone(page_copy) # Check that the copy is not live self.assertFalse(page_copy.live) self.assertTrue(page_copy.has_unpublished_changes) # Check that the owner of the page is set correctly self.assertEqual(page_copy.owner, self.user) # Check that the children were not copied self.assertEqual(page_copy.get_children().count(), 0) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems")
def handle(self, **options): any_problems_fixed = False for page in Page.objects.all(): try: page.specific except page.specific_class.DoesNotExist: self.stdout.write( "Page %d (%s) is missing a subclass record; deleting." % (page.id, page.title)) any_problems_fixed = True page.delete() (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems() if bad_depth: self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth)) if bad_numchild: self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild)) if bad_depth or bad_numchild: Page.fix_tree(destructive=False) any_problems_fixed = True if orphans: # The 'orphans' list as returned by treebeard only includes pages that are # missing an immediate parent; descendants of orphans are not included. # Deleting only the *actual* orphans is a bit silly (since it'll just create # more orphans), so generate a queryset that contains descendants as well. orphan_paths = Page.objects.filter(id__in=orphans).values_list( 'path', flat=True) filter_conditions = [] for path in orphan_paths: filter_conditions.append(Q(path__startswith=path)) # combine filter_conditions into a single ORed condition final_filter = functools.reduce(operator.or_, filter_conditions) # build a queryset of all pages to be removed; this must be a vanilla Django # queryset rather than a treebeard MP_NodeQuerySet, so that we bypass treebeard's # custom delete() logic that would trip up on the very same corruption that we're # trying to fix here. pages_to_delete = models.query.QuerySet(Page).filter(final_filter) self.stdout.write("Orphaned pages found:") for page in pages_to_delete: self.stdout.write("ID %d: %s" % (page.id, page.title)) self.stdout.write('') if options.get('interactive', True): yes_or_no = input("Delete these pages? [y/N] ") delete_orphans = yes_or_no.lower().startswith('y') self.stdout.write('') else: # Running tests, check for the "delete_orphans" option delete_orphans = options.get('delete_orphans', False) if delete_orphans: deletion_count = len(pages_to_delete) pages_to_delete.delete() self.stdout.write( "%d orphaned page%s deleted." % (deletion_count, "s" if deletion_count != 1 else "")) any_problems_fixed = True if any_problems_fixed: # re-run find_problems to see if any new ones have surfaced (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems() if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)): self.stdout.write("Remaining problems (cannot fix automatically):") if bad_alpha: self.stdout.write( "Invalid characters found in path for pages: %s" % self.numberlist_to_string(bad_alpha)) if bad_path: self.stdout.write("Invalid path length found for pages: %s" % self.numberlist_to_string(bad_path)) if orphans: self.stdout.write("Orphaned pages found: %s" % self.numberlist_to_string(orphans)) if bad_depth: self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth)) if bad_numchild: self.stdout.write( "Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild)) elif any_problems_fixed: self.stdout.write("All problems fixed.") else: self.stdout.write("No problems found.")
def test_subpage_deletion(self): # Connect mock signal handlers to page_unpublished, pre_delete and post_delete signals unpublish_signals_received = [] pre_delete_signals_received = [] post_delete_signals_received = [] def page_unpublished_handler(sender, instance, **kwargs): unpublish_signals_received.append((sender, instance.id)) def pre_delete_handler(sender, instance, **kwargs): pre_delete_signals_received.append((sender, instance.id)) def post_delete_handler(sender, instance, **kwargs): post_delete_signals_received.append((sender, instance.id)) page_unpublished.connect(page_unpublished_handler) pre_delete.connect(pre_delete_handler) post_delete.connect(post_delete_handler) # Post response = self.client.post(self.url) # Should be redirected to explorer page self.assertEqual(response.status_code, 302) # treebeard should report no consistency problems with the tree self.assertFalse(any(Page.find_problems()), "treebeard found consistency problems") # Check that the child pages to be deleted are gone for child_page in self.pages_to_be_deleted: self.assertFalse( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the child pages not to be deleted remain for child_page in self.pages_not_to_be_deleted: self.assertTrue( SimplePage.objects.filter(id=child_page.id).exists()) # Check that the subpages are also gone for grandchild_pages in self.grandchildren_pages.values(): for grandchild_page in grandchild_pages: self.assertFalse( SimplePage.objects.filter(id=grandchild_page.id).exists()) # Check that the signals were fired for all child and grandchild pages for child_page, grandchild_pages in self.grandchildren_pages.items(): self.assertIn((SimplePage, child_page.id), unpublish_signals_received) self.assertIn((SimplePage, child_page.id), pre_delete_signals_received) self.assertIn((SimplePage, child_page.id), post_delete_signals_received) for grandchild_page in grandchild_pages: self.assertIn((SimplePage, grandchild_page.id), unpublish_signals_received) self.assertIn((SimplePage, grandchild_page.id), pre_delete_signals_received) self.assertIn((SimplePage, grandchild_page.id), post_delete_signals_received) self.assertEqual(response.status_code, 302)
def handle(self, **options): any_problems_fixed = False for page in Page.objects.all(): try: page.specific except page.specific_class.DoesNotExist: self.stdout.write("Page %d (%s) is missing a subclass record; deleting." % (page.id, page.title)) any_problems_fixed = True page.delete() (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems() if bad_depth: self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth)) if bad_numchild: self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild)) if bad_depth or bad_numchild: Page.fix_tree(destructive=False) any_problems_fixed = True if orphans: # The 'orphans' list as returned by treebeard only includes pages that are # missing an immediate parent; descendants of orphans are not included. # Deleting only the *actual* orphans is a bit silly (since it'll just create # more orphans), so generate a queryset that contains descendants as well. orphan_paths = Page.objects.filter(id__in=orphans).values_list('path', flat=True) filter_conditions = [] for path in orphan_paths: filter_conditions.append(Q(path__startswith=path)) # combine filter_conditions into a single ORed condition final_filter = functools.reduce(operator.or_, filter_conditions) # build a queryset of all pages to be removed; this must be a vanilla Django # queryset rather than a treebeard MP_NodeQuerySet, so that we bypass treebeard's # custom delete() logic that would trip up on the very same corruption that we're # trying to fix here. pages_to_delete = models.query.QuerySet(Page).filter(final_filter) self.stdout.write("Orphaned pages found:") for page in pages_to_delete: self.stdout.write("ID %d: %s" % (page.id, page.title)) self.stdout.write('') if options.get('interactive', True): yes_or_no = input("Delete these pages? [y/N] ") delete_orphans = yes_or_no.lower().startswith('y') self.stdout.write('') else: # Running tests, check for the "delete_orphans" option delete_orphans = options.get('delete_orphans', False) if delete_orphans: deletion_count = len(pages_to_delete) pages_to_delete.delete() self.stdout.write( "%d orphaned page%s deleted." % (deletion_count, "s" if deletion_count != 1 else "") ) any_problems_fixed = True if any_problems_fixed: # re-run find_problems to see if any new ones have surfaced (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems() if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)): self.stdout.write("Remaining problems (cannot fix automatically):") if bad_alpha: self.stdout.write( "Invalid characters found in path for pages: %s" % self.numberlist_to_string(bad_alpha) ) if bad_path: self.stdout.write("Invalid path length found for pages: %s" % self.numberlist_to_string(bad_path)) if orphans: self.stdout.write("Orphaned pages found: %s" % self.numberlist_to_string(orphans)) if bad_depth: self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth)) if bad_numchild: self.stdout.write( "Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild) ) elif any_problems_fixed: self.stdout.write("All problems fixed.") else: self.stdout.write("No problems found.")