def test_page_delete_post(self):
        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post
        response = self.client.post(
            reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))

        # Should be redirected to explorer page
        self.assertRedirects(
            response,
            reverse('wagtailadmin_explore', args=(self.root_page.id, )))

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         'treebeard found consistency problems')

        # Check that the page is gone
        self.assertEqual(
            Page.objects.filter(path__startswith=self.root_page.path,
                                slug='hello-world').count(), 0)

        # Check that the page_unpublished signal was fired
        self.assertEqual(mock_handler.call_count, 1)
        mock_call = mock_handler.mock_calls[0][2]

        self.assertEqual(mock_call['sender'], self.child_page.specific_class)
        self.assertEqual(mock_call['instance'], self.child_page)
        self.assertIsInstance(mock_call['instance'],
                              self.child_page.specific_class)
    def test_page_delete_notlive_post(self):
        # Same as above, but this makes sure the page_unpublished signal is not fired
        # when if the page is not live when it is deleted

        # Unpublish the page
        self.child_page.live = False
        self.child_page.save()

        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post
        response = self.client.post(
            reverse('wagtailadmin_pages:delete', args=(self.child_page.id, )))

        # Should be redirected to explorer page
        self.assertRedirects(
            response,
            reverse('wagtailadmin_explore', args=(self.root_page.id, )))

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         'treebeard found consistency problems')

        # Check that the page is gone
        self.assertEqual(
            Page.objects.filter(path__startswith=self.root_page.path,
                                slug='hello-world').count(), 0)

        # Check that the page_unpublished signal was not fired
        self.assertEqual(mock_handler.call_count, 0)
    def test_expired_page_will_be_unpublished(self):
        # Connect a mock signal handler to page_unpublished signal
        signal_fired = [False]
        signal_page = [None]

        def page_unpublished_handler(sender, instance, **kwargs):
            signal_fired[0] = True
            signal_page[0] = instance

        page_unpublished.connect(page_unpublished_handler)

        page = SimplePage(
            title="Hello world!",
            slug="hello-world",
            content="hello",
            live=True,
            has_unpublished_changes=False,
            expire_at=timezone.now() - timedelta(days=1),
        )
        self.root_page.add_child(instance=page)

        p = Page.objects.get(slug='hello-world')
        self.assertTrue(p.live)

        management.call_command('publish_scheduled_pages')

        p = Page.objects.get(slug='hello-world')
        self.assertFalse(p.live)
        self.assertTrue(p.has_unpublished_changes)
        self.assertTrue(p.expired)

        # Check that the page_published signal was fired
        self.assertTrue(signal_fired[0])
        self.assertEqual(signal_page[0], page)
        self.assertEqual(signal_page[0], signal_page[0].specific)
Esempio n. 4
0
    def test_unpublish_view_post(self):
        """
        This posts to the unpublish view and checks that the page was unpublished
        """
        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post to the unpublish page
        response = self.client.post(
            reverse("wagtailadmin_pages:unpublish", args=(self.page.id,))
        )

        # Should be redirected to explorer page
        self.assertRedirects(
            response, reverse("wagtailadmin_explore", args=(self.root_page.id,))
        )

        # Check that the page was unpublished
        self.assertFalse(SimplePage.objects.get(id=self.page.id).live)

        # Check that the page_unpublished signal was fired
        self.assertEqual(mock_handler.call_count, 1)
        mock_call = mock_handler.mock_calls[0][2]

        self.assertEqual(mock_call["sender"], self.page.specific_class)
        self.assertEqual(mock_call["instance"], self.page)
        self.assertIsInstance(mock_call["instance"], self.page.specific_class)
Esempio n. 5
0
    def test_unpublish(self):
        # set up a listener for the unpublish signal
        unpublish_signals_fired = []

        def page_unpublished_handler(sender, instance, **kwargs):
            unpublish_signals_fired.append((sender, instance))

        page_unpublished.connect(page_unpublished_handler)

        events_index = Page.objects.get(url_path='/home/events/')
        events_index.get_children().unpublish()

        # Previously-live children of event index should now be non-live
        christmas = EventPage.objects.get(url_path='/home/events/christmas/')
        saint_patrick = SingleEventPage.objects.get(
            url_path='/home/events/saint-patrick/')
        unpublished_event = EventPage.objects.get(
            url_path='/home/events/tentative-unpublished-event/')

        self.assertFalse(christmas.live)
        self.assertFalse(saint_patrick.live)

        # Check that a signal was fired for each unpublished page
        self.assertIn((EventPage, christmas), unpublish_signals_fired)
        self.assertIn((SingleEventPage, saint_patrick),
                      unpublish_signals_fired)

        # a signal should not be fired for pages that were in the queryset
        # but already unpublished
        self.assertNotIn((EventPage, unpublished_event),
                         unpublish_signals_fired)
Esempio n. 6
0
    def test_expired_page_will_be_unpublished(self):
        # Connect a mock signal handler to page_unpublished signal
        signal_fired = [False]
        signal_page = [None]

        def page_unpublished_handler(sender, instance, **kwargs):
            signal_fired[0] = True
            signal_page[0] = instance

        page_unpublished.connect(page_unpublished_handler)

        page = SimplePage(
            title="Hello world!",
            slug="hello-world",
            content="hello",
            live=True,
            has_unpublished_changes=False,
            expire_at=timezone.now() - timedelta(days=1),
        )
        self.root_page.add_child(instance=page)

        p = Page.objects.get(slug="hello-world")
        self.assertTrue(p.live)

        management.call_command("publish_scheduled_pages")

        p = Page.objects.get(slug="hello-world")
        self.assertFalse(p.live)
        self.assertTrue(p.has_unpublished_changes)
        self.assertTrue(p.expired)

        # Check that the page_published signal was fired
        self.assertTrue(signal_fired[0])
        self.assertEqual(signal_page[0], page)
        self.assertEqual(signal_page[0], signal_page[0].specific)
    def test_unpublish(self):
        # set up a listener for the unpublish signal
        unpublish_signals_fired = []

        def page_unpublished_handler(sender, instance, **kwargs):
            unpublish_signals_fired.append((sender, instance))

        page_unpublished.connect(page_unpublished_handler)

        events_index = Page.objects.get(url_path='/home/events/')
        events_index.get_children().unpublish()

        # Previously-live children of event index should now be non-live
        christmas = EventPage.objects.get(url_path='/home/events/christmas/')
        saint_patrick = SingleEventPage.objects.get(url_path='/home/events/saint-patrick/')
        unpublished_event = EventPage.objects.get(url_path='/home/events/tentative-unpublished-event/')

        self.assertFalse(christmas.live)
        self.assertFalse(saint_patrick.live)

        # Check that a signal was fired for each unpublished page
        self.assertIn((EventPage, christmas), unpublish_signals_fired)
        self.assertIn((SingleEventPage, saint_patrick), unpublish_signals_fired)

        # a signal should not be fired for pages that were in the queryset
        # but already unpublished
        self.assertNotIn((EventPage, unpublished_event), unpublish_signals_fired)
Esempio n. 8
0
    def test_unpublish_view_post(self):
        """
        This posts to the unpublish view and checks that the page was unpublished
        """
        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post to the unpublish page
        response = self.client.post(self.url)

        # Should be redirected to explorer page
        self.assertEqual(response.status_code, 302)

        # Check that the child pages were unpublished
        for child_page in self.pages_to_be_unpublished:
            self.assertFalse(SimplePage.objects.get(id=child_page.id).live)

        # Check that the child pages not to be unpublished remain
        for child_page in self.pages_not_to_be_unpublished:
            self.assertTrue(SimplePage.objects.get(id=child_page.id).live)

        # Check that the page_unpublished signal was fired
        self.assertEqual(mock_handler.call_count,
                         len(self.pages_to_be_unpublished))

        for i, child_page in enumerate(self.pages_to_be_unpublished):
            mock_call = mock_handler.mock_calls[i][2]
            self.assertEqual(mock_call["sender"], child_page.specific_class)
            self.assertEqual(mock_call["instance"], child_page)
            self.assertIsInstance(mock_call["instance"],
                                  child_page.specific_class)
    def test_subpage_deletion(self):
        # Connect mock signal handlers to page_unpublished, pre_delete and post_delete signals
        unpublish_signals_received = []
        pre_delete_signals_received = []
        post_delete_signals_received = []

        def page_unpublished_handler(sender, instance, **kwargs):
            unpublish_signals_received.append((sender, instance.id))

        def pre_delete_handler(sender, instance, **kwargs):
            pre_delete_signals_received.append((sender, instance.id))

        def post_delete_handler(sender, instance, **kwargs):
            post_delete_signals_received.append((sender, instance.id))

        page_unpublished.connect(page_unpublished_handler)
        pre_delete.connect(pre_delete_handler)
        post_delete.connect(post_delete_handler)

        # Post
        response = self.client.post(
            reverse('wagtailadmin_pages:delete', args=(self.child_index.id, )))

        # Should be redirected to explorer page
        self.assertRedirects(
            response,
            reverse('wagtailadmin_explore', args=(self.root_page.id, )))

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         'treebeard found consistency problems')

        # Check that the page is gone
        self.assertFalse(
            StandardIndex.objects.filter(id=self.child_index.id).exists())
        self.assertFalse(Page.objects.filter(id=self.child_index.id).exists())

        # Check that the subpage is also gone
        self.assertFalse(
            StandardChild.objects.filter(id=self.grandchild_page.id).exists())
        self.assertFalse(
            Page.objects.filter(id=self.grandchild_page.id).exists())

        # Check that the signals were fired for both pages
        self.assertIn((StandardIndex, self.child_index.id),
                      unpublish_signals_received)
        self.assertIn((StandardChild, self.grandchild_page.id),
                      unpublish_signals_received)

        self.assertIn((StandardIndex, self.child_index.id),
                      pre_delete_signals_received)
        self.assertIn((StandardChild, self.grandchild_page.id),
                      pre_delete_signals_received)

        self.assertIn((StandardIndex, self.child_index.id),
                      post_delete_signals_received)
        self.assertIn((StandardChild, self.grandchild_page.id),
                      post_delete_signals_received)
Esempio n. 10
0
    def test_bulk_delete_notlive_post(self):
        # Same as above, but this makes sure the page_unpublished signal is not fired
        # for the page that is not live when it is deleted

        # Unpublish the first child page
        page_to_be_unpublished = self.pages_to_be_deleted[0]
        page_to_be_unpublished.unpublish(user=self.user)

        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post
        response = self.client.post(self.url)

        # Should be redirected to explorer page
        self.assertEqual(response.status_code, 302)

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         "treebeard found consistency problems")

        # Check that the child pages to be deleted are gone
        for child_page in self.pages_to_be_deleted:
            self.assertFalse(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the child pages not to be deleted remain
        for child_page in self.pages_not_to_be_deleted:
            self.assertTrue(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the page_unpublished signal was not fired
        num_descendants = sum(
            len(v) for v in self.grandchildren_pages.values())
        self.assertEqual(mock_handler.call_count,
                         len(self.pages_to_be_deleted) + num_descendants - 1)

        # check that only signals for other pages are fired
        i = 0
        for child_page in self.pages_to_be_deleted:
            if child_page.id != page_to_be_unpublished.id:
                mock_call = mock_handler.mock_calls[i][2]
                i += 1
                self.assertEqual(mock_call["sender"],
                                 child_page.specific_class)
                self.assertEqual(mock_call["instance"], child_page)
                self.assertIsInstance(mock_call["instance"],
                                      child_page.specific_class)
            for grandchildren_page in self.grandchildren_pages.get(
                    child_page, []):
                mock_call = mock_handler.mock_calls[i][2]
                i += 1
                self.assertEqual(mock_call["sender"],
                                 grandchildren_page.specific_class)
                self.assertEqual(mock_call["instance"], grandchildren_page)
                self.assertIsInstance(mock_call["instance"],
                                      grandchildren_page.specific_class)
def register_signal_handlers():
    # Get list of models that are page types
    Page = apps.get_model('wagtailcore', 'Page')
    indexed_models = [model for model in apps.get_models() if issubclass(model, Page)]

    # Loop through list and register signal handlers for each one
    for model in indexed_models:
        page_published.connect(page_published_signal_handler, sender=model)
        page_unpublished.connect(page_unpublished_signal_handler, sender=model)
Esempio n. 12
0
 def subscribe(cls):
     for model_class in cls.model_classes:
         page_published.connect(
             receiver=cls.populate,
             sender=model_class,
             dispatch_uid=model_class.__name__,
         )
         page_unpublished.connect(
             receiver=cls.delete,
             sender=model_class,
             dispatch_uid=model_class.__name__,
         )
Esempio n. 13
0
def register_signal_handlers():
    Image = get_image_model()
    Document = get_document_model()

    for model in get_page_models():
        page_published.connect(purge_page_from_cache, sender=model)
        page_unpublished.connect(purge_page_from_cache, sender=model)

    post_save.connect(purge_image_from_cache, sender=Image)
    post_delete.connect(purge_image_from_cache, sender=Image)
    post_save.connect(purge_document_from_cache, sender=Document)
    post_delete.connect(purge_document_from_cache, sender=Document)
Esempio n. 14
0
    def test_bulk_delete_post(self):
        # Connect a mock signal handler to page_unpublished signal
        mock_handler = mock.MagicMock()
        page_unpublished.connect(mock_handler)

        # Post
        response = self.client.post(self.url)

        # Should be redirected to explorer page
        self.assertEqual(response.status_code, 302)

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         "treebeard found consistency problems")

        # Check that the child pages to be deleted are gone
        for child_page in self.pages_to_be_deleted:
            self.assertFalse(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the child pages not to be deleted remain
        for child_page in self.pages_not_to_be_deleted:
            self.assertTrue(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the page_unpublished signal was fired for all pages
        num_descendants = sum(
            len(i) for i in self.grandchildren_pages.values())
        self.assertEqual(mock_handler.call_count,
                         len(self.pages_to_be_deleted) + num_descendants)

        i = 0
        for child_page in self.pages_to_be_deleted:
            mock_call = mock_handler.mock_calls[i][2]
            i += 1
            self.assertEqual(mock_call["sender"], child_page.specific_class)
            self.assertEqual(mock_call["instance"], child_page)
            self.assertIsInstance(mock_call["instance"],
                                  child_page.specific_class)
            for grandchildren_page in self.grandchildren_pages.get(
                    child_page, []):
                mock_call = mock_handler.mock_calls[i][2]
                i += 1
                self.assertEqual(mock_call["sender"],
                                 grandchildren_page.specific_class)
                self.assertEqual(mock_call["instance"], grandchildren_page)
                self.assertIsInstance(mock_call["instance"],
                                      grandchildren_page.specific_class)
Esempio n. 15
0
    def ready(self):
        user_model = get_user_model()
        post_save.connect(user_save_callback, sender=user_model)
        # Interesting situation: we use this pattern to account for
        # scrolling bugs in IE:
        # http://snipplr.com/view/518/
        # yet, url(null) trips up the ManifestStaticFilesStorage, so we
        # monkeypatch the regex so that url(null) is ignored

        storage.HashedFilesMixin.patterns = (("*.css", (
            r"""(url\((?!null)['"]{0,1}\s*(.*?)["']{0,1}\))""",
            (r"""(@import\s*["']\s*(.*?)["'])""", """@import url("%s")"""),
        )), )

        from v1.models.enforcement_action_page import EnforcementActionPage
        page_published.connect(break_enforcement_cache,
                               sender=EnforcementActionPage)
        page_unpublished.connect(break_enforcement_cache,
                                 sender=EnforcementActionPage)
        post_page_move.connect(break_enforcement_cache,
                               sender=EnforcementActionPage)
Esempio n. 16
0
 def subscribe(cls):
     page_published.connect(
         receiver=cls.populate,
         sender=cls.model,
         dispatch_uid=cls.model.__name__,
     )
     page_unpublished.connect(
         receiver=cls.delete,
         sender=cls.model,
         dispatch_uid=cls.model.__name__,
     )
     for model in cls.subscriptions:
         post_save.connect(
             receiver=cls.populate_many,
             sender=model,
             dispatch_uid=f'{cls.model.__name__}-{model.__name__}',
         )
         page_unpublished.connect(
             receiver=cls.populate_many,
             sender=model,
             dispatch_uid=f'{cls.model.__name__}-{model.__name__}',
         )
Esempio n. 17
0
    def user_can_delete_obj(self, user, obj):
        return False


class NetlifyDeploymentAdmin(ModelAdmin):
    model = Deployment
    permission_helper_class = NetlifyPermissions
    menu_label = 'Netlify Deployments'
    menu_icon = 'collapse-up'
    menu_order = 1000
    list_display = ('deployment_created', 'deployment_time', 'deployment_created_by')
    form_fields_exclude = ('deployment_created_by',)


modeladmin_register(NetlifyDeploymentAdmin)


def trigger_deployment(**kwargs):
    if settings.NETLIFY_AUTO_DEPLOY:
        user = ''
        if kwargs.get('revision') is not None:
            user = kwargs.get('revision').user

        deployment = Deployment(deployment_created_by=user)
        deployment.save()


page_published.connect(trigger_deployment)
page_unpublished.connect(trigger_deployment)
Esempio n. 18
0
from wagtail.core.signals import page_published, page_unpublished

from .tasks import invalidate_entire_cdn


def purge_cdn_on_publish(signal, **kwargs):
    # TODO: refine this to be more selective and only invalidate the
    # keys we know we need to. At the moment we're erring on the side
    # of caution.
    invalidate_entire_cdn.delay()


page_published.connect(purge_cdn_on_publish)
page_unpublished.connect(purge_cdn_on_publish)
Esempio n. 19
0
from wagtail.core.signals import page_published, page_unpublished
from wagtail.contrib.frontend_cache.utils import purge_page_from_cache

from blog.models import BlogPost


def invalidate_frontend_cache_for_blog_index_page(sender, instance, **kwargs):
    """Invalidate the frontend cache for the parent BlogIndexPage of a
    BlogPost."""
    blog_post = instance
    # Recommended way to get parent page from
    # https://github.com/wagtail/wagtail/issues/2779#issuecomment-228472829
    blog_index_page = blog_post.get_parent()
    if blog_index_page:
        purge_page_from_cache(blog_index_page)


page_published.connect(invalidate_frontend_cache_for_blog_index_page,
                       sender=BlogPost)
page_unpublished.connect(invalidate_frontend_cache_for_blog_index_page,
                         sender=BlogPost)
from django.core.management import call_command
from wagtail.core.signals import page_published, page_unpublished


def bake_site(sender, **kwargs):
    '''
    This helper function will trigger a re-build of the site automatically when
    a page is published or unpublished so that the static HTML being served
    gets synced with the changes published to the dynamic app.
    '''
    # TODO: either extend this function or call a different command so that we
    #       re-build and also re-deploy here.
    call_command('build')


page_published.connect(bake_site)
page_unpublished.connect(bake_site)
Esempio n. 21
0
    def user_can_edit_obj(self, user, obj):
        return False

    def user_can_delete_obj(self, user, obj):
        return False


class NetlifyDeploymentAdmin(ModelAdmin):
    model = Deployment
    permission_helper_class = NetlifyPermissions
    menu_label = 'Netlify Deployments'
    menu_icon = 'collapse-up'
    menu_order = 1000
    list_display = ('deployment_created', 'deployment_time',
                    'deployment_created_by')
    form_fields_exclude = ('deployment_created_by', )


modeladmin_register(NetlifyDeploymentAdmin)


def trigger_deployment(**kwargs):
    if settings.NETLIFY_AUTO_DEPLOY is not False:
        revision = kwargs.get('revision')
        deployment = Deployment(deployment_created_by=revision.user)
        deployment.save()


page_published.connect(trigger_deployment)
page_unpublished.connect(trigger_deployment)
Esempio n. 22
0
def build_news_feed(sender, instance, **kwargs):
    """
    Build a static version of the news feed when a LibNewsPage is
    published or unpublished. Query the Django Rest Framework
    (Wagtail v2 API) and save the results to a static JSON file
    in the static files directory.

    Args:
        sender: LibNewsPage class

        instance: LibNewsPage instance

    Returns:
        None but writes a file to the static directory
    """
    clear_cache()
    cache.delete('news_cats')
    drf_url = instance.get_site().root_url + DRF_NEWS_FEED
    try:
        serialized_data = urlopen(drf_url).read()
        data = json.loads(serialized_data)
        with open(STATIC_NEWS_FEED, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=False, indent=None)
    except (URLError):
        # We are running unit tests
        return None


page_published.connect(build_news_feed, sender=LibNewsPage)
page_unpublished.connect(build_news_feed, sender=LibNewsPage)
Esempio n. 23
0
    menu_icon = 'doc-full'
    add_to_settings_menu = True


modeladmin_register(ArticleAdmin)


def _static_build_async(force=False,
                        pipeline=settings.STATIC_BUILD_PIPELINE,
                        **kwargs):
    """Calls each command in the static build pipeline in turn."""
    log_prefix = 'Static build task'
    for name, command in pipeline:
        if settings.DEBUG and not force:
            logger.info(f'{log_prefix} ‘{name}’ skipped.')
        else:
            logger.info(f'{log_prefix} ‘{name}’ started.')
            call_command(command)
            logger.info(f'{log_prefix} ‘{name}’ finished.')


def static_build(**kwargs):
    """Callback for Wagtail publish and unpublish signals."""
    # Spawn a process to do the actual build.
    process = Process(target=_static_build_async, kwargs=kwargs)
    process.start()


page_published.connect(static_build)
page_unpublished.connect(static_build)
Esempio n. 24
0
    content_panels = AbstractEmailForm.content_panels + [
        ImageChooserPanel('image'),
        StreamFieldPanel('body'),
        InlinePanel('form_fields', label="Form fields"),
        FieldPanel('thank_you_text', classname="full"),
        MultiFieldPanel([
            FieldRowPanel([
                FieldPanel('from_address', classname="col6"),
                FieldPanel('to_address', classname="col6"),
            ]),
            FieldPanel('subject'),
        ], "Email"),
    ]


def deploy_to_netlify_on_change(**kwargs):
    import requests
    from django.conf import settings
    try:
        netlify_deploy_hook_url = getattr(settings, 'NETLIFY_DEPLOY_HOOK_URL')
    except KeyError:
        return
    if not netlify_deploy_hook_url:
        return
    r = requests.post(netlify_deploy_hook_url)
    r.raise_for_status()


page_published.connect(deploy_to_netlify_on_change)
page_unpublished.connect(deploy_to_netlify_on_change)
Esempio n. 25
0
from wagtail.core.signals import page_published, page_unpublished

from search.utils.wagtail import delete_wagtail_page, index_wagtail_page


def index_wagtail_page_(sender, instance, **kwargs):
    return index_wagtail_page(instance)


def delete_wagtail_page_(sender, instance, **kwargs):
    return delete_wagtail_page(instance)


page_published.connect(index_wagtail_page_)
page_unpublished.connect(delete_wagtail_page_)
Esempio n. 26
0
    def test_subpage_deletion(self):
        # Connect mock signal handlers to page_unpublished, pre_delete and post_delete signals
        unpublish_signals_received = []
        pre_delete_signals_received = []
        post_delete_signals_received = []

        def page_unpublished_handler(sender, instance, **kwargs):
            unpublish_signals_received.append((sender, instance.id))

        def pre_delete_handler(sender, instance, **kwargs):
            pre_delete_signals_received.append((sender, instance.id))

        def post_delete_handler(sender, instance, **kwargs):
            post_delete_signals_received.append((sender, instance.id))

        page_unpublished.connect(page_unpublished_handler)
        pre_delete.connect(pre_delete_handler)
        post_delete.connect(post_delete_handler)

        # Post
        response = self.client.post(self.url)

        # Should be redirected to explorer page
        self.assertEqual(response.status_code, 302)

        # treebeard should report no consistency problems with the tree
        self.assertFalse(any(Page.find_problems()),
                         "treebeard found consistency problems")

        # Check that the child pages to be deleted are gone
        for child_page in self.pages_to_be_deleted:
            self.assertFalse(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the child pages not to be deleted remain
        for child_page in self.pages_not_to_be_deleted:
            self.assertTrue(
                SimplePage.objects.filter(id=child_page.id).exists())

        # Check that the subpages are also gone
        for grandchild_pages in self.grandchildren_pages.values():
            for grandchild_page in grandchild_pages:
                self.assertFalse(
                    SimplePage.objects.filter(id=grandchild_page.id).exists())

        # Check that the signals were fired for all child and grandchild pages
        for child_page, grandchild_pages in self.grandchildren_pages.items():
            self.assertIn((SimplePage, child_page.id),
                          unpublish_signals_received)
            self.assertIn((SimplePage, child_page.id),
                          pre_delete_signals_received)
            self.assertIn((SimplePage, child_page.id),
                          post_delete_signals_received)
            for grandchild_page in grandchild_pages:
                self.assertIn((SimplePage, grandchild_page.id),
                              unpublish_signals_received)
                self.assertIn((SimplePage, grandchild_page.id),
                              pre_delete_signals_received)
                self.assertIn((SimplePage, grandchild_page.id),
                              post_delete_signals_received)

        self.assertEqual(response.status_code, 302)
Esempio n. 27
0
 def setup(self):
     # Wagtail Specific Events
     page_published.connect(self.handle_save)
     page_unpublished.connect(self.handle_delete)
     pre_page_move.connect(self.handle_delete)
     post_page_move.connect(self.handle_save)
def register_signal_handlers():
    page_published.connect(handle_publish,
                           dispatch_uid='wagtailbakery_page_published')
    page_unpublished.connect(handle_unpublish,
                             dispatch_uid='wagtailbakery_page_unpublished')
def register_signal_handlers():
    page_published.connect(
        handle_publish, dispatch_uid='wagtailbakery_page_published')
    page_unpublished.connect(
        handle_unpublish, dispatch_uid='wagtailbakery_page_unpublished')
Esempio n. 30
0
        try:
            page_files = {
                s3_obj["Key"]
                for s3_obj in page["Contents"]
                if splitext(s3_obj["Key"])[1] == '.html'
            }
            bucket_html_keys.update(page_files)
        except KeyError:
            continue

    # get diff of bucket keys vs uploaded s3 keys
    bucket_html_keys_2_remove = bucket_html_keys.difference(
        s3_html_keys_2_upload)

    # upload whats being uploaded
    for f in html_files_2_upload:
        s3_client.upload_file(
            Filename=f["Filename"],
            Bucket=settings.AWS_STORAGE_BUCKET_NAME_DEPLOYMENT,
            Key=f["Key"],
            ExtraArgs={'ContentType': 'text/html'})

    # remove whats being removed
    for key in bucket_html_keys_2_remove:
        s3_client.delete_object(
            Bucket=settings.AWS_STORAGE_BUCKET_NAME_DEPLOYMENT, Key=key)


page_published.connect(prerender_pages)
page_unpublished.connect(prerender_pages)