def test_go_live_page_will_be_published(self): # Connect a mock signal handler to page_published signal signal_fired = [False] signal_page = [None] def page_published_handler(sender, instance, **kwargs): signal_fired[0] = True signal_page[0] = instance page_published.connect(page_published_handler) page = SimplePage( title="Hello world!", slug="hello-world", live=False, go_live_at=timezone.now() - timedelta(days=1), ) self.root_page.add_child(instance=page) page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1)) p = Page.objects.get(slug='hello-world') self.assertFalse(p.live) self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists()) management.call_command('publish_scheduled_pages') p = Page.objects.get(slug='hello-world') self.assertTrue(p.live) self.assertFalse(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists()) # Check that the page_published signal was fired self.assertTrue(signal_fired[0]) self.assertEqual(signal_page[0], page) self.assertEqual(signal_page[0], signal_page[0].specific)
def ready(self): from home.models import VenuePage from venues.models import VenueImage from venues.signals import populate_venue, remove_image page_published.connect(populate_venue, sender=VenuePage) pre_delete.connect(remove_image, sender=VenueImage)
def register_signal_handlers(): # Get list of models that are page types indexed_models = [model for model in models.get_models() if issubclass(model, Page)] # Loop through list and register signal handlers for each one for model in indexed_models: page_published.connect(page_published_signal_handler, sender=model) page_unpublished.connect(page_unpublished_signal_handler, sender=model)
def register_signal_handlers(): """ Connect handlers to page publish signals """ page_published.connect(handle_publish, dispatch_uid='wagtailbakery_page_published') page_unpublished.connect(handle_unpublish, dispatch_uid='wagtailbakery_page_unpublished')
def register_signal_handlers(): # Get list of models that are page types Page = apps.get_model('wagtailcore', 'Page') indexed_models = [model for model in apps.get_models() if issubclass(model, Page)] # Loop through list and register signal handlers for each one for model in indexed_models: page_published.connect(page_published_signal_handler, sender=model) page_unpublished.connect(page_unpublished_signal_handler, sender=model)
def register_signal_handlers(): # Get list of models that are page types indexed_models = [ model for model in models.get_models() if issubclass(model, Page) ] # Loop through list and register signal handlers for each one for model in indexed_models: page_published.connect(page_published_signal_handler, sender=model) post_delete.connect(post_delete_signal_handler, sender=model)
def register_signal_handlers(): Image = get_image_model() for model in PAGE_MODEL_CLASSES: page_published.connect(purge_page_from_cache, sender=model) page_unpublished.connect(purge_page_from_cache, sender=model) post_save.connect(purge_image_from_cache, sender=Image) post_delete.connect(purge_image_from_cache, sender=Image) post_save.connect(purge_document_from_cache, sender=Document) post_delete.connect(purge_document_from_cache, sender=Document)
} ] } if getattr(instance, 'summary', None): payload['attachments'][0]['text'] = BeautifulSoup( instance.summary, 'html5lib' ).getText() if getattr(instance, 'cover_image', None): payload['attachments'][0]['image_url'] = instance \ .cover_image \ .get_rendition('fill-100x100') \ .url timeout_seconds = 3 try: r = requests.post( webhook_url, data=json.dumps(payload), timeout=timeout_seconds ) logger.info('%s published to Slack, status %s', instance.title, r.status_code) # noqa except RequestException as e: logger.error(e) sentry.captureException() page_published.connect(send_to_slack)
expires_at=expires_at ) password_history.save() user.temporarylockout_set.all().delete() def user_save_callback(sender, **kwargs): user = kwargs['instance'] if kwargs['created']: if user.is_superuser: # If a superuser was created, don't expire its password. new_phi(user, locked_days=0) else: # If a regular user was just created, force a new password to be # set right away by expiring the password and unlocking it. new_phi(user, locked_days=0, expiration_days=0) else: current_password_history = user.passwordhistoryitem_set.latest() if user.password != current_password_history.encrypted_password: new_phi(user) def invalidate_post_preview(sender, **kwargs): instance = kwargs['instance'] caches['post_preview'].delete(instance.post_preview_cache_key) page_published.connect(invalidate_post_preview)
instance = kwargs['instance'] # Only post new post. No updates. if instance.first_published_at != instance.last_published_at: return if settings.DEBUG: discord_url = 'http://example.com/' # change this for local test else: with open('/etc/discord_hook_url.txt') as f: discord_url = f.read().strip() excerpt = render_to_string('home/includes/excerpt.html', {'entry': instance}) # I tryed to convert excerpt to markdown using tomd without success values = { "content": "Breaking news on OSR website !", "embeds": [{ "title": instance.title, "url": "https://openstudyroom.org" + instance.url, "description": excerpt, }] } r = requests.post(discord_url, json=values) r.raise_for_status() # Register two receivers page_published.connect(send_to_discord, sender=EntryPage) page_published.connect(send_to_discord, sender=StreamFieldEntryPage)
"avatar": "https://s.gravatar.com/avatar/%s" % iconMd5, }, "title": "published page %s. " % instance.title, "content": "test", "external_thread_id": "wagtail", "thread": { "title": "Page published", "fields": [{"label": "Dustiness", "value": "5 - severe"}], "body": "The page was published", "external_url": instance.full_url, "status": { "color": "purple", "value": "Published" } } } try: r = requests.post( curl, headers={"content-type": "application/json"}, data=json.dumps(values) ) logger.info('%s published to Flowdock, status %s', instance.title, r.status_code) # noqa except RequestException as e: logger.error(e) # Register a receiver page_published.connect(send_to_flowdock)
url = models.URLField(null=True) deployment_url = models.URLField(null=True) datetime_started = models.DateTimeField(auto_now_add=True, help_text='deployment started') datetime_finished = models.DateTimeField('deployment completed', null=True) def postpone(function): """ cheap aysnc, see https://stackoverflow.com/a/28913218 """ def decorator(*args, **kwargs): t = Thread(target=function, args=args, kwargs=kwargs) t.daemon = True t.start() return decorator @postpone def deploy(sender, **kwargs): """ build static pages, then send incremental changes to netlify """ call_command('build') call_command('netlify') connection.close() if hasattr(settings, 'NETLIFY_AUTO_DEPLOY') and settings.NETLIFY_AUTO_DEPLOY == False: pass else: page_published.connect(deploy)
# Sets all the revisions for a page's attribute to False when it's called def update_all_revisions(instance, attr): for revision in instance.revisions.all(): content = json.loads(revision.content_json) if content[attr]: content[attr] = False revision.content_json = unicode(json.dumps(content), 'utf-8') revision.save() def unshare_all_revisions(sender, **kwargs): update_all_revisions(kwargs['instance'], 'shared') def unpublish_all_revisions(sender, **kwargs): update_all_revisions(kwargs['instance'], 'live') def configure_page_and_revision(sender, **kwargs): from .wagtail_hooks import share, configure_page_revision, flush_akamai share(page=kwargs['instance'], is_sharing=False, is_live=True) configure_page_revision(page=kwargs['instance'], is_sharing=False, is_live=True) flush_akamai(page=kwargs['instance'], is_live=True) page_unshared.connect(unshare_all_revisions) page_unpublished.connect(unpublish_all_revisions) page_published.connect(configure_page_and_revision)
# Sets all the revisions for a page's attribute to False when it's called def update_all_revisions(instance, attr): for revision in instance.revisions.all(): content = json.loads(revision.content_json) if content[attr]: content[attr] = False revision.content_json = unicode(json.dumps(content), 'utf-8') revision.save() def unshare_all_revisions(sender, **kwargs): update_all_revisions(kwargs['instance'], 'shared') def unpublish_all_revisions(sender, **kwargs): update_all_revisions(kwargs['instance'], 'live') def configure_page_and_revision(sender, **kwargs): from .wagtail_hooks import share, configure_page_revision, flush_akamai share(page=kwargs['instance'], is_sharing=False, is_live=True) configure_page_revision(page=kwargs['instance'], is_sharing=False, is_live=True) flush_akamai(page=kwargs['instance']) page_unshared.connect(unshare_all_revisions) page_unpublished.connect(unpublish_all_revisions) page_published.connect(configure_page_and_revision)
def register_signal_handlers(): page_published.connect(handle_publish, dispatch_uid='wagtailbakery_page_published') page_unpublished.connect(handle_unpublish, dispatch_uid='wagtailbakery_page_unpublished')
def register_signal_handlers(): page_published.connect(request_site_recrawl, sender=JobListingPage)
import logging import requests from flags.state import flag_enabled from wagtail.wagtailcore.signals import page_published from jobmanager.models.pages import JobListingPage logger = logging.getLogger(__name__) SITEMAP_URL = 'https://www.consumerfinance.gov/sitemap.xml' GOOGLE_URL = 'http://www.google.com/ping' def request_site_recrawl(sender, **kwargs): try: if flag_enabled('PING_GOOGLE_ON_PUBLISH'): response = requests.get(GOOGLE_URL, {'sitemap': SITEMAP_URL}) response.raise_for_status() logger.info('Pinged Google after job page publication.') except Exception: logger.exception('Pinging Google after job page publication failed.') page_published.connect(request_site_recrawl, sender=JobListingPage)
__author__ = 'nikoladang' from wagtail.wagtailcore.signals import page_published from .models import PostPage def receiver(sender, **kwargs): pass page_published.connect(receiver, sender=PostPage)
def get_current(self): current = PageRevision.objects.get(pk=self.page_revision.id).page.get_latest_revision() try: current_screenshot = PageRevisionScreenshot.objects.get(page_revision=current.pk) if current.id != self.page_revision.id: return True, current_screenshot else: return False, current_screenshot except ObjectDoesNotExist: return False, current def get_previous(self): page = PageRevision.objects.get(pk=self.page_revision.id).page revisions = page.revisions.order_by('-created_at') aslist = list(revisions.values_list('id', flat=True)) index = aslist.index(self.page_revision.id) if index < len(aslist)-1: previous = aslist[index+1] try: return True, PageRevisionScreenshot.objects.get(page_revision=previous) except ObjectDoesNotExist: return False, self else: return False, self # Trigger notification every time page is published page_published.connect(screenshot.process_page_published_async) page_unpublished.connect(screenshot.process_page_unpublished_async)
from wagtail.wagtailcore.signals import page_published import requests from flags.state import flag_enabled from jobmanager.models.pages import JobListingPage logger = logging.getLogger(__name__) SITEMAP_URL = 'https://www.consumerfinance.gov/sitemap.xml' GOOGLE_URL = 'https://www.google.com/ping' def request_site_recrawl(sender, **kwargs): try: if flag_enabled('PING_GOOGLE_ON_PUBLISH'): response = requests.get(GOOGLE_URL, {'sitemap': SITEMAP_URL}) response.raise_for_status() logger.info( 'Pinged Google after job page publication.' ) except Exception: logger.exception( 'Pinging Google after job page publication failed.' ) page_published.connect(request_site_recrawl, sender=JobListingPage)
def connect_page_signals_handlers(): page_published.connect(page_pub_receiver, sender=JournalPage) page_unpublished.connect(page_unpub_receiver, sender=JournalPage) page_published.connect(about_page_pub_receiver, sender=JournalAboutPage) page_unpublished.connect(about_page_unpub_receiver, sender=JournalAboutPage)
from wagtail.wagtailcore.signals import page_published from .models import FeedItem, FEED_TYPE_NEW_EPISODE from apps.shows.models import ShowAudioSeriesEpisodePage def create_new_episode_feed_item(sender, **kwargs): episode_page = kwargs['instance'] FeedItem( type=FEED_TYPE_NEW_EPISODE, prop=episode_page, actor=episode_page.get_show(), ).save() page_published.connect(create_new_episode_feed_item, sender=ShowAudioSeriesEpisodePage, dispatch_uid='create_new_episode_feed_item')
def register_signal_handlers(): if getattr(settings, 'WEBHOOKS_PAGE_PUBLISHED', None): page_published.connect(page_published_signal_handler) if getattr(settings, 'WEBHOOKS_PAGE_UNPUBLISHED', None): page_unpublished.connect(page_unpublished_signal_handler)
Cheap aysnc, see https://stackoverflow.com/a/28913218 """ def decorator(*args, **kwargs): t = Thread(target=function, args=args, kwargs=kwargs) t.daemon = True t.start() return decorator @postpone def deploy(sender, **kwargs): """ Trigger a build on Netlify, if NETLIFY_BUILD_HOOK is supplied, or build static pages, then upload incremental changes to Netlify. """ netlify_build_hook = getattr(settings, "NETLIFY_BUILD_HOOK", None) if netlify_build_hook: call_command("netlify", "--trigger-build") else: call_command("build") call_command("netlify") connection.close() if getattr(settings, "NETLIFY_AUTO_DEPLOY", False) == True: function_path = getattr(settings, "NETLIFY_DEPLOY_FUNCTION", "wagtailnetlify.models.deploy") function = import_string(function_path) page_published.connect(function)
from wagtail.wagtailcore.signals import page_published, page_unpublished from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache from blog.models import BlogIndexPage, BlogPost def invalidate_frontend_cache_for_blog_index_page(sender, instance, **kwargs): """Invalidate the frontend cache for the parent BlogIndexPage of a BlogPost.""" blog_post = instance # Recommended way to get parent page from https://github.com/wagtail/wagtail/issues/2779#issuecomment-228472829 blog_index_page = blog_post.get_parent() if blog_index_page: purge_page_from_cache(blog_index_page) page_published.connect( invalidate_frontend_cache_for_blog_index_page, sender=BlogPost ) page_unpublished.connect( invalidate_frontend_cache_for_blog_index_page, sender=BlogPost )