def purge(url=None): akamai_config = settings.WAGTAILFRONTENDCACHE.get('akamai', {}) cloudfront_config = settings.WAGTAILFRONTENDCACHE.get('files', {}) if url: # Use the Wagtail frontendcache PurgeBatch to perform the purge batch = PurgeBatch() batch.add_url(url) # If the URL matches any of our CloudFront distributions, invalidate # with that backend if any(k for k in cloudfront_config.get('DISTRIBUTION_ID', {}) if k in url): logger.info('Purging {} from "files" cache'.format(url)) batch.purge(backends=['files']) # Otherwise invalidate with our default backend else: logger.info('Purging {} from "akamai" cache'.format(url)) batch.purge(backends=['akamai']) return "Submitted invalidation for %s" % url else: # purge_all only exists on our AkamaiBackend backend = AkamaiBackend(akamai_config) logger.info('Purging entire site from "akamai" cache') backend.purge_all() return "Submitted invalidation for the entire site."
def cloudfront_cache_invalidation(sender, instance, **kwargs): if not settings.ENABLE_CLOUDFRONT_CACHE_PURGE: return if not instance.file: return url = instance.file.url logger.info('Purging {} from "files" cache'.format(url)) batch = PurgeBatch() batch.add_url(url) batch.purge(backends=['files'])
def purge_documents_when_collection_saved_with_restrictions( sender, instance, **kwargs): if not instance.get_view_restrictions(): logger.debug( 'Collection "%s" saved, don\'t purge from cache because it has ' 'no view restriction', instance.name) return logger.debug( 'Collection "%s" saved, has restrictions, purge its documents from ' 'the cache', instance.name) batch = PurgeBatch() for document in Document.objects.filter(collection=instance): batch.add_url(document.url) batch.add_url(document.file.url) batch.purge(backend_settings=WAGTAIL_STORAGES_DOCUMENTS_FRONTENDCACHE)
def purge_document_from_cache(document): # No need for check if they are public or private - if they've changed, # they should be out of cache. logger.debug('Purge document "%s" from the front-end cache', document.file.name) frontend_cache_configuration = get_frontend_cache_configuration() if frontend_cache_configuration: s3_batch = PurgeBatch() s3_batch.add_url(document.file.url) s3_batch.purge(backend_settings=frontend_cache_configuration) # Purge Wagtail document view URLs using normal site's cache. wagtail_batch = PurgeBatch() wagtail_batch.add_urls( build_absolute_urls_for_all_sites_for_path(document.url)) wagtail_batch.purge()
def purge_collection_documents_from_cache(collection): # Do not purge documents if they are in a public collection. Documents # themselves have not changed so no need to make redundant calls for big # collections. if not collection.get_view_restrictions(): return logger.debug( 'Purge documents of collection "%s" from the front-end cache', collection.name, ) # Purge download URLs and actual files if they possibly used to be public. wagtail_batch = PurgeBatch() s3_batch = PurgeBatch() for document in get_document_model().objects.filter(collection=collection): wagtail_batch.add_urls( build_absolute_urls_for_all_sites_for_path(document.url)) s3_batch.add_url(document.file.url) wagtail_batch.purge() frontend_cache_configuration = get_frontend_cache_configuration() if frontend_cache_configuration: s3_batch.purge(backend_settings=frontend_cache_configuration)
def purge_cache_for_api(**kwargs): batch = PurgeBatch() batch.add_url(settings.BASE_URL + reverse("api-stream")) batch.add_url(settings.BASE_URL + "/en/stream/") batch.add_url(settings.BASE_URL + "/sk/stream/") batch.purge()
def purge_cache_for_index_pages(**kwargs): batch = PurgeBatch() batch.add_pages(IndexPages().set()) batch.add_url(settings.BASE_URL + "/en/events/json/") batch.add_url(settings.BASE_URL + "/sk/events/json/") batch.purge()
def purge_document_from_cache_when_saved(sender, instance, **kwargs): logger.debug('Document "%s" saved, purge from cache', instance.file.name) batch = PurgeBatch() batch.add_url(instance.url) batch.add_url(instance.file.url) batch.purge(backend_settings=WAGTAIL_STORAGES_DOCUMENTS_FRONTENDCACHE)