def purge(url=None): akamai_config = settings.WAGTAILFRONTENDCACHE.get('akamai', {}) cloudfront_config = settings.WAGTAILFRONTENDCACHE.get('files', {}) if url: # Use the Wagtail frontendcache PurgeBatch to perform the purge batch = PurgeBatch() batch.add_url(url) # If the URL matches any of our CloudFront distributions, invalidate # with that backend if any(k for k in cloudfront_config.get('DISTRIBUTION_ID', {}) if k in url): logger.info('Purging {} from "files" cache'.format(url)) batch.purge(backends=['files']) # Otherwise invalidate with our default backend else: logger.info('Purging {} from "akamai" cache'.format(url)) batch.purge(backends=['akamai']) return "Submitted invalidation for %s" % url else: # purge_all only exists on our AkamaiBackend backend = AkamaiBackend(akamai_config) logger.info('Purging entire site from "akamai" cache') backend.purge_all() return "Submitted invalidation for the entire site."
def blog_page_changed(thepage): batch = PurgeBatch() for blog_index in BlogIndexPage.objects.live(): if thepage in blog_index.get_blog_items().object_list: batch.add_page(blog_index) batch.purge()
def break_enforcement_cache(sender, instance, **kwargs): base = instance.get_site().root_url batch = PurgeBatch() enf_api_url = base + reverse('enforcement_action_api') enf_charts_url = base + '/enforcement/payments-harmed-consumers/enforcement-database/' # noqa: E501 batch.add_urls([enf_api_url, enf_charts_url]) batch.purge()
def section_saved(sender, instance, **kwargs): if not instance.subpart.version.draft: batch = PurgeBatch() for page in instance.subpart.version.part.page.all(): urls = page.get_urls_for_version(instance.subpart.version, section=instance) batch.add_urls(urls) batch.purge()
def effective_version_saved(sender, instance, **kwargs): """ Invalidate the cache if the effective_version is not a draft """ if not instance.draft: batch = PurgeBatch() for page in instance.part.page.all(): urls = page.get_urls_for_version(instance) batch.add_urls(urls) batch.purge()
def clear_all_cache(request): pages = request.site.root_page.get_descendants(inclusive=True) batch = PurgeBatch() batch.add_pages(pages) batch.purge() return render(request, 'cache_management/base.html', { 'pages': len(pages), })
def purge_documents_when_collection_saved_with_restrictions( sender, instance, **kwargs): if not instance.get_view_restrictions(): logger.debug( 'Collection "%s" saved, don\'t purge from cache because it has ' 'no view restriction', instance.name) return logger.debug( 'Collection "%s" saved, has restrictions, purge its documents from ' 'the cache', instance.name) batch = PurgeBatch() for document in Document.objects.filter(collection=instance): batch.add_url(document.url) batch.add_url(document.file.url) batch.purge(backend_settings=WAGTAIL_STORAGES_DOCUMENTS_FRONTENDCACHE)
def cloudfront_cache_invalidation(sender, instance, **kwargs): if not settings.ENABLE_CLOUDFRONT_CACHE_PURGE: return if not instance.file: return url = instance.file.url logger.info('Purging {} from "files" cache'.format(url)) batch = PurgeBatch() batch.add_url(url) batch.purge(backends=['files'])
def event_page_changed(event_page): logger.info('Checking for index pages containing {}'.format(event_page)) # Find all the live EventIndexPages that contain this event_page batch = PurgeBatch() for event_index in EventIndexPage.objects.live(): logger.info('Checking if event_page is in {}'.format(event_index)) # The Paginator returns a list of Page class objects which won't match with our event_page object of class # EventPage, we need to get convert the list to EventPages by calling their specific attribute first pages = event_index.get_event_items().object_list event_items = [page.specific for page in pages] if event_page in event_items: logger.info(f'Adding {event_index} to purge list') batch.add_page(event_index) # Purge all the event indexes we found in a single request logger.info('Purging!') batch.purge()
def general_page_changed(general_page): logger.info('Checking for index pages containing {}'.format(general_page)) # Find all the live GeneralIndexPages that contain this general_page batch = PurgeBatch() for general_index in GeneralIndexPage.objects.live(): logger.info('Checking if general_page is in {}'.format(general_index)) # The Paginator returns a list of Page class objects which won't match with our general_page object of class # GeneralPage or TwoColumnGeneralPage, we need to get convert the list to EventPages by calling their specific # attribute first pages = general_index.get_general_items().object_list general_items = [page.specific for page in pages] if general_page in general_items: logger.info('Adding general_index to purge list') batch.add_page(general_index) # Purge all the event indexes we found in a single request logger.info('Purging!') batch.purge()
def product_page_changed(product_page): logger.info( 'Checking for shop index pages containing {}'.format(product_page)) # Find all the live ShopIndexPages that contain this product_page batch = PurgeBatch() for shop_index in ShopIndexPage.objects.live(): logger.info('Checking if product_page is in {}'.format(shop_index)) # The Paginator returns a list of Page class objects which won't match with our product_page object of class # Product so we need to get convert the list to Products by calling their specific # attribute first pages = shop_index.get_product_items().object_list product_items = [page.specific for page in pages] if product_page in product_items: logger.info('Adding shop_index to purge list') batch.add_page(shop_index) # Purge all the shop indexes we found in a single request logger.info('Purging!') batch.purge()
def blog_page_changed(blog_page): batch = PurgeBatch() for index in BlogIndex.objects.live().ancestor_of(blog_page): batch.add_page(index) batch.purge()
def purge_collection_documents_from_cache(collection): # Do not purge documents if they are in a public collection. Documents # themselves have not changed so no need to make redundant calls for big # collections. if not collection.get_view_restrictions(): return logger.debug( 'Purge documents of collection "%s" from the front-end cache', collection.name, ) # Purge download URLs and actual files if they possibly used to be public. wagtail_batch = PurgeBatch() s3_batch = PurgeBatch() for document in get_document_model().objects.filter(collection=collection): wagtail_batch.add_urls( build_absolute_urls_for_all_sites_for_path(document.url)) s3_batch.add_url(document.file.url) wagtail_batch.purge() frontend_cache_configuration = get_frontend_cache_configuration() if frontend_cache_configuration: s3_batch.purge(backend_settings=frontend_cache_configuration)
def purge_document_from_cache_when_saved(sender, instance, **kwargs): logger.debug('Document "%s" saved, purge from cache', instance.file.name) batch = PurgeBatch() batch.add_url(instance.url) batch.add_url(instance.file.url) batch.purge(backend_settings=WAGTAIL_STORAGES_DOCUMENTS_FRONTENDCACHE)
def purge_cache_for_index_pages(**kwargs): batch = PurgeBatch() batch.add_pages(IndexPages().set()) batch.add_url(settings.BASE_URL + "/en/events/json/") batch.add_url(settings.BASE_URL + "/sk/events/json/") batch.purge()
def photo_page_changed(photo_page): batch = PurgeBatch() for index in PhotoIndex.objects.live().ancestor_of(photo_page): batch.add_page(index) batch.purge()
def purge_cache_for_api(**kwargs): batch = PurgeBatch() batch.add_url(settings.BASE_URL + reverse("api-stream")) batch.add_url(settings.BASE_URL + "/en/stream/") batch.add_url(settings.BASE_URL + "/sk/stream/") batch.purge()
def purge_document_from_cache(document): # No need for check if they are public or private - if they've changed, # they should be out of cache. logger.debug('Purge document "%s" from the front-end cache', document.file.name) frontend_cache_configuration = get_frontend_cache_configuration() if frontend_cache_configuration: s3_batch = PurgeBatch() s3_batch.add_url(document.file.url) s3_batch.purge(backend_settings=frontend_cache_configuration) # Purge Wagtail document view URLs using normal site's cache. wagtail_batch = PurgeBatch() wagtail_batch.add_urls( build_absolute_urls_for_all_sites_for_path(document.url)) wagtail_batch.purge()