def test_remember_stats_all_urls_looong_url(self): request = self.factory.get( '/something/really/long/to/start/with/right/here/since/this/will/' 'test/that/things/work/with/long/urls/too', { 'line1': 'Bad luck, wind been blowing at my back', 'line2': "I was born to bring trouble to wherever I'm at", 'line3': "Got the number thirteen, tattooed on my neck", 'line4': "When the ink starts to itch, ", 'line5': "then the black will turn to red", }) response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing match, = find_urls() ok_(match[0].startswith('/something/really')) eq_(match[2]['hits'], 0) eq_(match[2]['misses'], 1) # second time response = views.home6(request) eq_(response.status_code, 200) match, = find_urls([]) ok_(match[0].startswith('/something/really')) eq_(match[2]['hits'], 1) eq_(match[2]['misses'], 1)
def test_remember_stats_all_urls_looong_url(self): request = self.factory.get( "/something/really/long/to/start/with/right/here/since/this/will/" "test/that/things/work/with/long/urls/too", { "line1": "Bad luck, wind been blowing at my back", "line2": "I was born to bring trouble to wherever I'm at", "line3": "Got the number thirteen, tattooed on my neck", "line4": "When the ink starts to itch, ", "line5": "then the black will turn to red", }, ) response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing (match, ) = find_urls() ok_(match[0].startswith("/something/really")) eq_(match[2]["hits"], 0) eq_(match[2]["misses"], 1) # second time response = views.home6(request) eq_(response.status_code, 200) (match, ) = find_urls([]) ok_(match[0].startswith("/something/really")) eq_(match[2]["hits"], 1) eq_(match[2]["misses"], 1)
def test_remember_stats_all_urls_looong_url(self): request = self.factory.get( '/something/really/long/to/start/with/right/here/since/this/will/' 'test/that/things/work/with/long/urls/too', { 'line1': 'Bad luck, wind been blowing at my back', 'line2': "I was born to bring trouble to wherever I'm at", 'line3': "Got the number thirteen, tattooed on my neck", 'line4': "When the ink starts to itch, ", 'line5': "then the black will turn to red", } ) response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing match, = find_urls() ok_(match[0].startswith('/something/really')) eq_(match[2]['hits'], 0) eq_(match[2]['misses'], 1) # second time response = views.home6(request) eq_(response.status_code, 200) match, = find_urls([]) ok_(match[0].startswith('/something/really')) eq_(match[2]['hits'], 1) eq_(match[2]['misses'], 1)
def test_find_and_purge_all_urls(self): found = list(find_urls([], purge=True)) eq_(len(found), 4) for key, value in self.urls.items(): pair = (key, value, None) ok_(pair in found) found = list(find_urls([])) eq_(len(found), 0)
def test_find_and_purge_all_urls(self): found = list(find_urls([], purge=True)) eq_(len(found), 4) for key, value in self.urls.items(): pair = (key, value, None) ok_(pair in found) found = list(find_urls([])) eq_(len(found), 0)
def test_purge_one_url(self): ok_(cache.get('key1')) ok_('/page1.html' in cache.get(REMEMBERED_URLS_KEY)) found = list(find_urls(['/page1.html'], purge=True)) eq_(len(found), 1) ok_(('/page1.html', 'key1', None) in found) ok_(not cache.get('key1')) ok_('/page1.html' not in cache.get(REMEMBERED_URLS_KEY)) # find all the rest in there found = list(find_urls([])) eq_(len(found), 3) ok_(('/page1.html', 'key1', None) not in found)
def test_purge_one_url(self): ok_(cache.get("key1")) ok_("/page1.html" in cache.get(REMEMBERED_URLS_KEY)) found = list(find_urls(["/page1.html"], purge=True)) eq_(len(found), 1) ok_(("/page1.html", "key1", None) in found) ok_(not cache.get("key1")) ok_("/page1.html" not in cache.get(REMEMBERED_URLS_KEY)) # find all the rest in there found = list(find_urls([])) eq_(len(found), 3) ok_(("/page1.html", "key1", None) not in found)
def test_purge_one_url(self): ok_(cache.get('key1')) ok_('/page1.html' in cache.get(REMEMBERED_URLS_KEY)) found = list(find_urls(['/page1.html'], purge=True)) eq_(len(found), 1) ok_(('/page1.html', 'key1', None) in found) ok_(not cache.get('key1')) ok_('/page1.html' not in cache.get(REMEMBERED_URLS_KEY)) # find all the rest in there found = list(find_urls([])) eq_(len(found), 3) ok_(('/page1.html', 'key1', None) not in found)
def home(request): remembered_urls = find_urls([]) return render( request, 'home.html', {'remembered_urls': remembered_urls} )
def invalidate_by_BoatImage(sender, instance, **kwargs): cache_key_0 = "boat_detail_view" + str(instance.boat_id) cache_key_1 = "Pdf+%s" % instance.boat_id cache.delete_many((cache_key_0, cache_key_1)) try: pdf_url = reverse('boats:pdf_to_file', args=(instance.boat_id,)) list(find_urls([pdf_url], purge=True)) except NoReverseMatch: pass
def test_remember_stats_all_urls(self): request = self.factory.get('/anything') response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing match, = find_urls(urls=['/anything']) eq_(match[0], '/anything') eq_(match[2]['hits'], 0) eq_(match[2]['misses'], 1) # second time response = views.home6(request) eq_(response.status_code, 200) match, = find_urls(urls=['/anything']) eq_(match[0], '/anything') eq_(match[2]['hits'], 1) eq_(match[2]['misses'], 1)
def test_remember_stats_all_urls(self): request = self.factory.get("/anything") response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing (match, ) = find_urls(urls=["/anything"]) eq_(match[0], "/anything") eq_(match[2]["hits"], 0) eq_(match[2]["misses"], 1) # second time response = views.home6(request) eq_(response.status_code, 200) (match, ) = find_urls(urls=["/anything"]) eq_(match[0], "/anything") eq_(match[2]["hits"], 1) eq_(match[2]["misses"], 1)
def test_remember_stats_all_urls(self): request = self.factory.get('/anything') response = views.home6(request) eq_(response.status_code, 200) # now ask the memory thing match, = find_urls(urls=['/anything']) eq_(match[0], '/anything') eq_(match[2]['hits'], 0) eq_(match[2]['misses'], 1) # second time response = views.home6(request) eq_(response.status_code, 200) match, = find_urls(urls=['/anything']) eq_(match[0], '/anything') eq_(match[2]['hits'], 1) eq_(match[2]['misses'], 1)
def home(request): data = { "found": find_urls([]), "remember_all_urls_setting": getattr(settings, "FANCY_REMEMBER_ALL_URLS", False), "remember_stats_all_urls_setting": getattr(settings, "FANCY_REMEMBER_STATS_ALL_URLS", False), } return render(request, "fancy-cache/home.html", data)
def home(request): data = { 'found': find_urls([]), 'remember_all_urls_setting': getattr(settings, 'FANCY_REMEMBER_ALL_URLS', False), 'remember_stats_all_urls_setting': getattr(settings, 'FANCY_REMEMBER_STATS_ALL_URLS', False) } return render(request, 'fancy-cache/home.html', data)
def invalidate_by_Comment(sender, instance, **kwargs): urls = [] try: article_content_page_url = reverse( "articles:detail", args=(instance.foreignkey_to_article.foreignkey_to_subheading_id, instance.foreignkey_to_article_id)) urls.append(article_content_page_url) except (NoReverseMatch, AttributeError, ObjectDoesNotExist): pass list(find_urls(urls, purge=True))
def accessrequest_post_save(sender, **kwargs): instance = kwargs["instance"] # Clear cached accessrequests for user cache.delete(f'access_request_shipments_{instance.requester_id}') # Invalidate cached tracking data view to force permissions check tracking_get_url = reverse('shipment-tracking', kwargs={ 'version': 'v1', 'pk': instance.shipment.id }) list(find_urls([tracking_get_url + "*"], purge=True))
def routetrackingdata_post_save(sender, **kwargs): instance = kwargs["instance"] LOG.debug(f'New tracking_data committed to db and will be pushed to the UI. Tracking_data: {instance.id}.') # Invalidate cached tracking data view for each shipment in Route for leg in instance.route.routeleg_set.filter(shipment__state=TransitState.IN_TRANSIT.value): tracking_get_url = reverse('shipment-tracking', kwargs={'version': 'v1', 'pk': leg.shipment.id}) list(find_urls([tracking_get_url + "*"], purge=True)) # Notify websocket channel async_to_sync(channel_layer.group_send)(leg.shipment.owner_id, {"type": "tracking_data.save", "tracking_data_id": instance.id})
def delete(self, using=None, keep_parents=False): import articles.models # to avoid circular import with articles """ # очистка всех пустых подкатегорий в категории "Articles on boats" без статей и без связи с # лодкой # - условия срабатывания системы очистки: # 1 находится в папке Articles on boats # 2 нет связи с лодкой # 3 нет связанных не удаленных статей """ try: subheadings_query_set = articles.models.SubHeading.objects.filter( foreignkey_id=articles.models.UpperHeading.objects.get( name__exact="Articles on boats").pk, one_to_one_to_boat_id__isnull=True) for subheading in subheadings_query_set: if not subheading.article_set(manager='reverse').exists(): # удаляем "удаленные" статьи связанные с лодкой при удалении лодки for article in subheading.article_set.filter(show=False): article.true_delete() # удаляем подзаголовок subheading.delete() except EmptyResultSet: pass try: # удаление sub категорий связанных с лодкой при ее удалении current_subheading = articles.models.SubHeading.objects.get( one_to_one_to_boat=self).article_set if not current_subheading.filter(show=True).exists(): # удаляем "удаленные" статьи, связанные с лодкой for article in current_subheading.filter(show=False): article.true_delete() self.heading.delete() # удаляем, если не содержит статей # инвалидируем кеш Артиклес майн main_page_url = reverse('articles:articles_main') list(find_urls([main_page_url], purge=True)) except articles.models.SubHeading.DoesNotExist or ObjectDoesNotExist: pass for image in self.boatimage_set.all( ): # удаляем (не по настоящему) ассоциированные # изображения чтобы задействавать метод delete() модели BoatImage, для того, чтобы # прошла логика работы при удалении фото. Если использоваеть on_delete=SETNULL, то # delete() не будет задействаован image.delete() # удаляем карту, если она есть clean_map(pk=self.id) try: # удаляем карту на Хероку если она есть self.maptemplatemodel.delete() except ObjectDoesNotExist: pass models.Model.delete(self, using=None, keep_parents=False)
def on_post_save(sender, instance, created, raw, using, update_fields, **kwargs): is_indexable = getattr(instance, 'is_indexable', False) if is_indexable and instance.live: search_signals.update_document.send(sender, instance) if getattr(instance, 'url_path', None): removed_cache_items = list( find_urls([f'*{instance.url_path}', f'*{instance.url_path}?*'], purge=True)) for url, key, count in removed_cache_items: mcod_logger.debug( 'URL \"%s\" removed from cache on post_save \"%s\" page signal.' % (url, instance))
def invalidate_by_BoatModel(sender, instance, **kwargs): # invalidates cache for BoatListView cache_key = "BoatListView" cache.delete(cache_key) # invalidates cache for evaluation queryset in template caching example when boat data is # changed cache_key_2 = "boat_detail_view" cache.delete(cache_key_2, version=instance.pk) # invalidates cache for BoatListView_2 try: url = reverse('boats_version_2') list(find_urls([url, ], purge=True)) except NoReverseMatch: pass
def handle(self, *urls, **options): verbose = int(options['verbosity']) > 1 _count = 0 for url, cache_key, stats in find_urls(urls, purge=options['purge']): _count += 1 if stats: self.stdout.write(url[:70].ljust(65)), self.stdout.write("HITS", str(stats['hits']).ljust(5)), self.stdout.write("MISSES", str(stats['misses']).ljust(5)) else: self.stdout.write(url) if verbose: self.stdout.write("-- %s URLs cached --" % _count)
def home(request): data = { 'found': find_urls([]), 'remember_all_urls_setting': getattr( settings, 'FANCY_REMEMBER_ALL_URLS', False ), 'remember_stats_all_urls_setting': getattr( settings, 'FANCY_REMEMBER_STATS_ALL_URLS', False ) } return render(request, 'fancy-cache/home.html', data)
def handle(self, *urls, **options): verbose = int(options['verbosity']) > 1 _count = 0 for url, cache_key, stats in find_urls(urls, purge=options['purge']): _count += 1 if stats: self.stdout.write(url[:70].ljust(65)), self.stdout.write("HITS", str(stats['hits']).ljust(5)), self.stdout.write("MISSES", str(stats['misses']).ljust(5)) else: self.stdout.write(url) if verbose: self.stdout.write("-- %s URLs cached --" % _count)
def handle(self, *urls, **options): verbose = int(options["verbosity"]) > 1 _count = 0 for url, cache_key, stats in find_urls(urls, purge=options["purge"]): _count += 1 if stats: print url[:70].ljust(65), print "HITS", str(stats["hits"]).ljust(5), print "MISSES", str(stats["misses"]).ljust(5) else: print url if verbose: print "-- %s URLs cached --" % _count
def handle(self, *urls, **options): verbose = int(options['verbosity']) > 1 _count = 0 for url, cache_key, stats in find_urls(urls, purge=options['purge']): _count += 1 if stats: print url[:70].ljust(65), print "HITS", str(stats['hits']).ljust(5), print "MISSES", str(stats['misses']).ljust(5) else: print url if verbose: print "-- %s URLs cached --" % _count
def invalidate_by_Article_articles_app(sender, instance, **kwargs): show_by_heading_page_url = reverse( 'articles:show_by_heading', args=(instance.foreignkey_to_subheading_id, )) article_content_page_url = reverse( "articles:detail", args=(instance.foreignkey_to_subheading_id, instance.id)) article_resurrection_url = reverse("articles:resurrection") main_page_url = reverse('articles:articles_main') urls = [show_by_heading_page_url, article_content_page_url] if not instance.show: # если мы "удаляем статью" то инвалидируем кеш страницы # восстановления и главной страницы так как счетчики поменяються urls.extend([article_resurrection_url, main_page_url]) # В случае создания новой статьи мы должны инвалидировать кеш так как счетчик статей в # субкатегориях измениться. if kwargs.get("created"): urls.append(main_page_url) list(find_urls(urls, purge=True))
def telemetrydata_post_save(sender, **kwargs): instance = kwargs["instance"] LOG.debug( f'New telemetry_data committed to db and will be pushed to the UI. Telemetry_data: {instance.id}.' ) # Invalidate cached telemetry data view telemetry_get_url = reverse('shipment-telemetry-list', kwargs={ 'version': 'v1', 'shipment_pk': instance.shipment.id }) list(find_urls([telemetry_get_url + "*"], purge=True)) # Notify websocket channel async_to_sync(channel_layer.group_send)(instance.shipment.owner_id, { "type": "telemetry_data.save", "telemetry_data_id": instance.id })
def clean(self): """Восстанавливаем статьи методом перевода show в True""" forms.ModelForm.clean(self) pk_list = self.cleaned_data["pk"] Article.default.filter(id__in=pk_list).update(show=True) articles_to_reserect = Article.default.filter(id__in=pk_list) mark2 = [] foreignkey_id = [] for article in articles_to_reserect: article.show = True article.change_date = datetime.datetime.now mark2.append(article.change_date) foreignkey_id.append(article.foreignkey_to_boat_id ) if article.foreignkey_to_boat else 0 article.save(update_fields=["show", "change_date"]) # инвалидируем кеш страницы восстановления статей article_resurrection_url = reverse("articles:resurrection") main_page_url = reverse('articles:articles_main') list(find_urls([article_resurrection_url, main_page_url], purge=True)) cache.delete_many( ["boat_detail_view" + str(fk_id) for fk_id in foreignkey_id]) cache.set("mark2", mark2, 2)
def home(request): remembered_urls = find_urls([]) return render(request, "home.html", {"remembered_urls": remembered_urls})
def test_some_urls_double_star(self): found = list(find_urls(['/page*.html?*'])) eq_(len(found), 2) ok_(('/page3.html?foo=bar', 'key3', None) in found) ok_(('/page3.html?foo=else', 'key4', None) in found)
def test_some_urls(self): found = list(find_urls(['/page2.html*'])) eq_(len(found), 1) ok_(('/page2.html', 'key2', None) in found)
def test_some_urls_double_star(self): found = list(find_urls(["/page*.html?*"])) eq_(len(found), 2) ok_(("/page3.html?foo=bar", "key3", None) in found) ok_(("/page3.html?foo=else", "key4", None) in found)
def test_find_one_url(self): found = list(find_urls(['/page1.html'])) eq_(len(found), 1) ok_(('/page1.html', 'key1', None) in found)
def test_some_urls_double_star(self): found = list(find_urls(['/page*.html?*'])) eq_(len(found), 2) ok_(('/page3.html?foo=bar', 'key3', None) in found) ok_(('/page3.html?foo=else', 'key4', None) in found)
def test_some_urls(self): found = list(find_urls(['/page2.html*'])) eq_(len(found), 1) ok_(('/page2.html', 'key2', None) in found)
def test_find_one_url(self): found = list(find_urls(['/page1.html'])) eq_(len(found), 1) ok_(('/page1.html', 'key1', None) in found)
def home(request): remembered_urls = find_urls([]) return render(request, 'home.html', {'remembered_urls': remembered_urls})
def test_find_one_url(self): found = list(find_urls(["/page1.html"])) eq_(len(found), 1) ok_(("/page1.html", "key1", None) in found)
def test_some_urls(self): found = list(find_urls(["/page2.html*"])) eq_(len(found), 1) ok_(("/page2.html", "key2", None) in found)
def invalidate_by_Subheading(sender, instance, **kwargs): main_page_url = reverse('articles:articles_main') show_by_heading_page_url = reverse('articles:show_by_heading', args=(instance.id, )) list(find_urls([main_page_url, show_by_heading_page_url], purge=True))