def del_cached_content(location): # delete content for the given location, as well as for content with run=None. # it's possible that the content could have been cached without knowing the # course_key - and so without having the run. cache.delete_many( [unicode(loc).encode("utf-8") for loc in [location, location.replace(run=None)]] )
def test_cached_list(self): # Set up the test data. users = User.objects.all()[:10] user_cache = TestUserCachedList(users) self.assertEqual([user.pk for user in users], [user.pk for user in user_cache]) # Force it through the pickle cycle. user_cache = pickle.loads(pickle.dumps(user_cache)) self.assertEqual([user.pk for user in users], [user.pk for user in user_cache]) # The pickle size is greatly reduced. While making this test, it went # from 6377 bytes to 201 bytes. To avoid a brittle test, just check # that it's less that half the size. normal_pickle_size = len(pickle.dumps(users)) improved_pickle_size = len(pickle.dumps(user_cache)) self.assertTrue(improved_pickle_size < normal_pickle_size / 2.0) # Force it through the cache cycle. cache_key = 'apncore.cache.tests.test_cached_list' user_cache.cache(cache_key) user_cache = cache.get(cache_key) self.assertEqual([user.pk for user in users], [user.pk for user in user_cache]) # Delete the cached items, forcing the class to rebuild them. # The main list must be retrieved again to test unpacking its items. item_cache_keys = list(user_cache.make_cache_keys([user.pk for user in users])) cache.delete_many(item_cache_keys) user_cache = cache.get(cache_key) self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])
def clear_cache(sender, **kwargs): # FIXME: oh my k = cache.get('opinions_summary_keys') if not k: return cache.delete_many(k) cache.delete('opinions_summary_keys')
def clear_top_level_cache_m2m(sender, instance, action, reverse, model, *args, **kwargs): # Purge cache for both models affected and the potentially custom 'through' model cache.delete_many(( model_top_level_cache_key(instance), model_top_level_cache_key(model), model_top_level_cache_key(sender), ))
def flush_all_caches_for_path(cls, pootle_path): """Remove the cache for all the goals in the given path and upper directories. The cache is deleted for the given path, for the directories between the given path and the translation project, and for the translation project itself. :param pootle_path: A string with a valid pootle path. """ # Get all the affected objects just once, to avoid querying the # database all the time if there are too many objects involved. affected_trail = cls.get_trail_for_path(pootle_path) if not affected_trail: return affected_goals = cls.get_goals_for_path(pootle_path) keys = [] for goal in affected_goals: for path_obj in affected_trail: for function_name in cls.CACHED_FUNCTIONS: keys.append(iri_to_uri(goal.pootle_path + ":" + path_obj.pootle_path + ":" + function_name)) cache.delete_many(keys)
def invalidate_cached_object(instance, is_test=False): """ Removes cached objects for all users based on the class name of the instance passed. Ex: Given a DataSet instance, all possible cached objects holding DataSets will be deleted to represent the saving, updating, deletion, or perms change that was performed upon it. If the is_test flag is set, a new instance of a mockcache Client will be returned """ if not is_test: try: cache.delete_many(['{}-{}'.format(user.id, instance.__class__. __name__) for user in User.objects.all()]) except Exception as e: logger.debug("Could not delete %s from cache" % instance.__class__.__name__, e) else: from mockcache import Client mc = Client() return mc
def bust_cache(type, user_pk): """ Bust our cache for a given type, can bust multiple caches """ bust_keys = BUST_CACHES[type] keys = [CACHE_TYPES[k] % user_pk for k in bust_keys] cache.delete_many(keys)
def clear(self): """clear buffer""" size = self.size keys = [RingBuffer.BUFFER_ELEMENT_CACHE_KEY_PATTERN % {'key': self.key, 'index': i} for i in range(size)] cache.delete_many(keys) self.index = -1 self.size = 0
def clear_blocklist(*args, **kw): # Something in the blocklist changed; invalidate all responses. redis = redisutils.connections['master'] keys = redis.smembers('blocklist:keys') cache.delete_many(keys) redis.delete('blocklist:keys') flush_front_end_cache_urls.delay(['/blocklist/*'])
def uncache_flag(**kwargs): flag = kwargs['instance'] cache.delete_many([ keyfmt(FLAG_CACHE_KEY, flag.name), keyfmt(FLAG_USERS_CACHE_KEY, flag.name), keyfmt(FLAG_GROUPS_CACHE_KEY, flag.name), ])
def contest_update(sender, instance, **kwargs): if hasattr(instance, '_updating_stats_only'): return cache.delete_many(['generated-meta-contest:%d' % instance.id] + [make_template_fragment_key('contest_html', (instance.id, engine)) for engine in EFFECTIVE_MATH_ENGINES])
def delete(self, *args, **kwargs): super(DistinctionType, self).delete(*args, **kwargs) cache.delete_many([ 'PROGRESS_TABLE_ACTIVE_DIST', 'PROGRESS_TABLE_UGRADEL_DIST', 'PROGRESS_TABLE_GRADEL_DIST' ])
def invalidate_cache(model_class, objects, **extra_keys): """ Flushes the cache of any cached objects associated with this instance. Explicitly set a None value instead of just deleting so we don't have any race conditions where: Thread 1 -> Cache miss, get object from DB Thread 2 -> Object saved, deleted from cache Thread 1 -> Store (stale) object fetched from DB in cache Five second should be more than enough time to prevent this from happening for a web app. """ invalidation_dict = {} accessor_set = cache_signals.get_global_signals(model_class) for obj in objects: for (accessor_path, lookup_type, negate) in accessor_set: for value in get_values(obj, accessor_path): invalidation_key = get_invalidation_key( model_class._meta.db_table, accessor_path=accessor_path, negate=negate, value=value, save=False ) invalidation_dict[invalidation_key] = None invalidation_dict.update(cache.get_many(invalidation_dict.keys())) cache_keys = set() for obj_key, cache_key_list in invalidation_dict.iteritems(): if cache_key_list: cache_keys.update(cache_key_list.split(",")) keys_to_invalidate = dict([(key, None) for key in chain(cache_keys, invalidation_dict.keys())]) keys_to_invalidate.update(extra_keys) cache.set_many(keys_to_invalidate, 5) cache.delete_many(keys_to_invalidate.keys())
def post_update(sender, instance, **kwargs): cache.delete_many([ make_template_fragment_key('post_summary', (instance.id,)), make_template_fragment_key('post_content', (instance.id,)), 'blog_slug:%d' % instance.id, 'blog_feed:%d' % instance.id, ])
def delete(self, *args, **kwargs): super(Requirement, self).delete(*args, **kwargs) cache.delete_many([ 'PROGRESS_TABLE_ACTIVE_REQS', 'PROGRESS_TABLE_UGRADEL_REQS', 'PROGRESS_TABLE_GRADEL_REQS' ])
def clear_permission_cache(salt, keys=None, superuser=False, user_obj=None): """ Clear permissions cache. Either keys, superuser or user_obj should be passed. @salt: a string that distinguishes users from groups. @keys: a list of keys for objects to delete. @superuses: add superuser keys here? @user_obj: user object to clear. """ if keys is None: keys = [] else: keys = list(keys) if user_obj: keys = [user_obj.pk] try: delattr(user_obj, '_%s_perm_cache' % salt) except AttributeError: # No local cache, not a problem. pass if superuser: keys.append('Superuser') to_clear = ( '|'.join(('Perm', salt.title(), str(key))) for key in set(keys)) cache.delete_many(to_clear)
def profile_update(sender, instance, **kwargs): if hasattr(instance, '_updating_stats_only'): return cache.delete_many([make_template_fragment_key('user_about', (instance.id,))] + [make_template_fragment_key('org_member_count', (org_id,)) for org_id in instance.organizations.values_list('id', flat=True)])
def receive_gift(self, request, pk=None): """Отмечает, что участник получил подарок.""" member = self.get_member(request, pk) if member.season.is_closed(): raise ValidationError('Сезон перенесен в архив.') if not member.santa: raise ValidationError('У вас нет АДМ.') if member.is_gift_received(): raise ValidationError('Подарок уже получен.') member.gift_received = timezone.now() member.santa.send_notification( 'Ваш подарок получен', 'Ваш АПП отметил в профиле, что подарок получен.') member.save() cache.delete_many([ 'member_%d_%s' % (member.user.id, pk), 'member_%d_%s' % (member.santa.user.id, pk), ]) serializer = MemberSerializer(member) return Response(serializer.data)
def delete(self, *args, **kwargs): directory = self.directory # Just doing a plain delete will collect all related objects in memory # before deleting: translation projects, stores, units, quality checks, # pootle_store suggestions, pootle_app suggestions and submissions. # This can easily take down a process. If we do a translation project # at a time and force garbage collection, things stay much more # managable. import gc gc.collect() for tp in self.translationproject_set.iterator(): tp.delete() gc.collect() super(Project, self).delete(*args, **kwargs) directory.delete() # FIXME: far from ideal, should cache at the manager level instead cache.delete(CACHE_KEY) User = get_user_model() users_list = User.objects.values_list('username', flat=True) cache.delete_many(map(lambda x: 'projects:accessible:%s' % x, users_list))
def save(self, *args, **kwargs): initial_save = not self.id if not self.id: self.guid = str(uuid.uuid1()) super(Image, self).save(*args, **kwargs) # # clear the cache # caching.instance_cache_clear(self, self.pk) # caching.cache_clear(PHOTOS_KEYWORDS_CACHE, key=self.pk) # # re-add instance to the cache # caching.instance_cache_add(self, self.pk) if not self.is_public_photo() or not self.is_public_photoset(): if hasattr(settings, "USE_S3_STORAGE") and settings.USE_S3_STORAGE and hasattr(self.image, "file"): set_s3_file_permission(self.image.file, public=False) cache_set = cache.get("photos_cache_set.%s" % self.pk) if cache_set is not None: # TODO remove cached images cache.delete_many(cache.get("photos_cache_set.%s" % self.pk)) cache.delete("photos_cache_set.%s" % self.pk) if initial_save: try: exif_exists = self.get_exif_data() if exif_exists: self.save() except AttributeError: pass
def send_gift(self, request, pk=None): """Отмечает, что участник отправил подарок.""" member = self.get_member(request, pk) if member.season.is_closed(): raise ValidationError('Сезон перенесен в архив.') if not member.giftee: raise ValidationError('У вас нет АПП.') if member.is_gift_sent(): raise ValidationError('Подарок уже отправлен.') member.gift_sent = timezone.now() member.giftee.send_notification( 'Вам отправлен подарок', 'Пожалуйста, не забудьте отметить его получение.') member.save() cache.delete_many([ 'member_%d_%s' % (member.user.id, pk), 'member_%d_%s' % (member.giftee.user.id, pk), ]) serializer = MemberSerializer(member) return Response(serializer.data)
def new_notecard(request, section_id): vars = {} # Look up the game or raise 404 try: section = Section.objects.get(id__iexact=section_id) except Section.DoesNotExist: raise Http404 if section.semester.user != request.user: raise Http404 if request.method == 'POST': #if the form has been submitted form = NotecardForm(request.POST) # a form bound to the POST data if form.is_valid(): #all validation rules pass # process the data in form.cleaned_data notecard_name = form.cleaned_data['notecard_name'] notecard_body = form.cleaned_data['notecard_body'] notecard = Notecard(section=section, notecard_name=notecard_name, notecard_body=notecard_body) notecard.save() # set and clear cache for the cache_key = str(section_id) + 'users_notecard_list_cache_key' cache.delete_many([cache_key, 'unknown_list_cache_key']) url = reverse('notecard_list', kwargs={'section_id': section_id}) return HttpResponseRedirect(url) else: form = NotecardForm() # an unbound form #Package up some variables to return vars['section'] = section vars['form'] = form context = RequestContext(request) return render_to_response('notecards/new_notecard.html', vars, context_instance=context)
def edit_semester(request, semester_id): cache_key = str(semester_id) + 'single_semester_cache_key' semester = cache.get(cache_key) if not semester: semester = get_object_or_404(Semester, pk=semester_id) cache.set(cache_key, semester, cache_time) vars = {} if semester.user != request.user: raise Http404 if request.method == 'POST': #if the form has been submitted form = SemesterForm(request.POST, instance=semester) # a form bound to the POST data if form.is_valid(): #all validation rules pass # process the data in form.cleaned_data semester.semester_name = form.cleaned_data['semester_name'] semester.save() cache_key_2 = str(request.user) + 'users_semester_list_cache_key' cache.delete_many([cache_key, cache_key_2]) return HttpResponseRedirect('/') else: form = SemesterForm(initial={'semester_name': semester.semester_name}) #Package up some variables to return vars['semester'] = semester vars['form'] = form context = RequestContext(request) return render_to_response('notecards/edit_semester.html', vars, context_instance=context)
def reset(cls): cls._loaded = False keys_to_delete = Setting.objects.all().values_list('key', flat=True) cache_keys_to_delete = [ cls._get_cache_key(key) for key in keys_to_delete ] cache.delete_many(cache_keys_to_delete)
def on_post_delete(sender, **kwargs): """Expire ultracache cache keys affected by this object """ if not invalidate: return if issubclass(sender, Model): obj = kwargs["instance"] if isinstance(obj, Model): # get_for_model itself is cached try: ct = ContentType.objects.get_for_model(sender) except RuntimeError: # This happens when ultracache is being used by another product # during a test run. return # Expire cache keys key = "ucache-%s-%s" % (ct.id, obj.pk) to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Invalidate paths in reverse caching proxy key = "ucache-pth-%s-%s" % (ct.id, obj.pk) if purger is not None: for path in cache.get(key, []): purger(path) cache.delete(key)
def invalidate_cache(self): registry_cache_key = self._get_registry_cache_key() cache_registry = self._get_registry_cache() registered_block = self.reg_item cache.delete_many(cache_registry[registered_block.id]) del cache_registry[registered_block.id] cache.set(registry_cache_key, cache_registry)
def finished_submission(sub): keys = ['user_complete:%d' % sub.user_id, 'user_attempted:%s' % sub.user_id] if hasattr(sub, 'contest'): participation = sub.contest.participation keys += ['contest_complete:%d' % participation.id] keys += ['contest_attempted:%d' % participation.id] cache.delete_many(keys)
def edit_section(request, section_id): vars = {} cache_key = str(section_id) + 'single_section_cache_key' section = cache.get(cache_key) if not section: section = get_object_or_404(Section, pk=section_id) cache.set(cache_key, section, cache_time) # check if section's user is logged in user if section.semester.user != request.user: raise Http404 if request.method == 'POST': #if the form has been submitted form = SectionForm(request.POST, instance=section) # a form bound to the POST data if form.is_valid(): #all validation rules pass # process the data in form.cleaned_data section.section_name = form.cleaned_data['section_name'] section.save() semester_id = section.semester.id cache_key_2 = str(semester_id) + 'users_section_list_cache_key' cache.delete_many([cache_key, cache_key_2]) url = reverse('section_list', kwargs={'semester_id': section.semester.id}) return HttpResponseRedirect(url) else: form = SectionForm(initial={'section_name': section.section_name}) # an unbound form #Package up some variables to return vars['section'] = section vars['form'] = form context = RequestContext(request) return render_to_response('notecards/edit_section.html', vars, context_instance=context)
def submit(request): message = 'Something went wrong :(' if request.is_ajax(): if request.method == 'POST': # Here we can access the POST data itemlist = simplejson.loads(request.raw_post_data) o = None #loops through the provided json for item in itemlist: if item['value']: if int(item['value']) > 0: #checks if the order exists yet if o == None: o = Order(tendered="1000", table="1") o.save() c = Content(order= o, quantity= item['value']) #determines the type of the drink if str(item['name'])[:1] == "d": c.takeaway = False elif str(item['name'])[:1] == "t": c.takeaway = True c.beverage = Beverage.objects.get(pk= str(item['name'])[2::]) c.save() cache.delete_many(['orders_current_orders', 'orders_current_contents']) message = "XHR Complete" return HttpResponse(message)
def add(self, node): """ Add a new record into OpenLabor db """ openlabor_record = self.to_external(node) ## openlabor sync response=requests.post(self.post_url,openlabor_record) if response.status_code != 200: message = 'ERROR while creating "%s". Response: %s' % (node.name, response.content) logger.error('== %s ==' % message) print message return False try: data = response.json() except json.JSONDecodeError as e: logger.error('== ERROR: JSONDecodeError %s ==' % e) print 'JSONDecodeError: %s\n\nresponse: %s' % (e, response.content) return False external = NodeExternal.objects.create(node=node, external_id=int(data['AddedJobId'])) message = 'New record "%s" saved in CitySDK through the HTTP API"' % node.name self.verbose(message) print message logger.info('== %s ==' % message) # clear cache cache_key1 = 'layer_%s_nodes.json' % self.layer.id cache_key2 = 'layer_%s_nodes.geojson' % self.layer.id cache.delete_many([cache_key1, cache_key2]) return True
def invalidate_data_mart_before_delete(sender, instance, **kwargs): keys = get_data_mart_all_active_terms_keys() cache.delete_many(keys) invalidate_data_mart_after_save(sender, instance, **kwargs)
def _clean_many(prefix): keys = [] for lang in [l[0] for l in settings.LANGUAGES]: keys.append(_get_key(prefix, lang)) cache.delete_many(keys)
def test_delete_many_empty_generator(self): """Test the delete_many cache operation with an empty generator""" res = cache.delete_many(key for key in []) self.assertFalse(bool(res))
def update_live_state( self, request, pk=None, ): """View handling AWS POST request to update the video live state. Parameters ---------- request : Type[django.http.request.HttpRequest] The request on the API endpoint, it should contain a payload with the following fields: - state: state of the live, should be "live" or "stopped", pk: string The primary key of the video Returns ------- Type[rest_framework.response.Response] HttpResponse acknowledging the success or failure of the live state update operation. """ now = timezone.now() stamp = to_timestamp(now) msg = request.body serializer = serializers.UpdateLiveStateSerializer(data=request.data) if serializer.is_valid() is not True: return Response(serializer.errors, status=400) # Check if the provided signature is valid against any secret in our list if not validate_signature(request.headers.get("X-Marsha-Signature"), msg): return Response("Forbidden", status=403) with transaction.atomic(): try: video = Video.objects.select_for_update(nowait=True).get(pk=pk) except Video.DoesNotExist as video_does_not_exists: raise Http404 from video_does_not_exists except OperationalError: return Response({"success": True}) request_ids = video.live_info.get("medialive", {}).get("request_ids", []) if serializer.validated_data["requestId"] in request_ids: return Response({"success": True}) request_ids.append(serializer.validated_data["requestId"]) video.live_info["medialive"].update({"request_ids": request_ids}) video.save() live_info = video.live_info live_info.update( {"cloudwatch": {"logGroupName": serializer.validated_data["logGroupName"]}} ) if serializer.validated_data["state"] == defaults.RUNNING: video.live_state = defaults.RUNNING live_info.update({"started_at": stamp}) live_info.pop("stopped_at", None) # if keys with no timeout were cached for this video, it needs to be reinitialized key_cache_video = f"{defaults.VIDEO_ATTENDANCE_KEY_CACHE}{video.id}" if list_keys := cache.get(key_cache_video, None): cache.delete_many(list_keys) cache.delete(key_cache_video)
def clean_cache(cls, id): cache.delete_many(cls.get_cache_keys_for_id(id))
def delete_cache(self): cache.delete_many([self.rate_limit_cache, self.languages_cache])
def clear_cache(cls, assessment_id): keys = (cls.cache_template_taglist.format(assessment_id), cls.cache_template_tagtree.format(assessment_id)) logging.info('removing cache: {0}'.format(', '.join(keys))) cache.delete_many(keys)
def save(self, *args, **kwargs): cache.delete_many([ make_template_fragment_key('header', (user.username, )) for user in Sleeper.objects.all() ]) super(Announcement, self).save(*args, **kwargs)
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): from django.contrib.sessions.backends.cache import KEY_PREFIX from django.core.cache import cache session_keys = cache.keys(KEY_PREFIX + "*") cache.delete_many(session_keys)
def delete_instance(model, *instance_or_pk): """ Purges the cache keys for the instances of this model. """ cache.delete_many([instance_key(model, x) for x in instance_or_pk])
def clear_blocklist(*args, **kw): # Something in the blocklist changed; invalidate all responses. keys = redisutils.connections['master'].smembers('blocklist:keys') cache.delete_many(keys)
def send(self): data = cache.get_many([self.data_key, self.counter_key]) if not data: return cache.delete_many([self.data_key, self.counter_key]) self.mail(data[self.counter_key], data[self.data_key])
def invalidate(self, language: Optional[Language] = None, childs: bool = False): """Invalidate local and cache data.""" self.clear() keys = self.get_invalidate_keys(language, childs) cache.delete_many(keys)
def ready(self): # Delete the cached views list on application reload. cache.delete_many([ _get_setting('PERMISSIONS_AUDITOR_CACHE_KEY'), _get_setting('PERMISSIONS_AUDITOR_CACHE_KEY') + '_BASE_URL', ])
def eat_ID(request, food_ID): u = request.user u.profile.food_history.append(food_ID) u.save() cache.delete_many(['target', 'best_IDs', 'best_ratings']) return (redirect('recommender:history'))
def post(self, request, request_id): try: data = json.loads(request.body) validate(data, ANALYSIS_ID_SCHEMA) ids: Optional[Ids] = cache.get(f'{request_id}/analysis_ids') if ids is None: return HttpResponseNotFound('Query Analysis Ids not available') data = {(tuple(idx[0]), idx[1]): opt for idx, opt in data} if not any(v['show'] for v in data.values()): return HttpResponseBadRequest("cannot hide all analyses") for key in ids: try: if ids[key]['name'] != data[key]['name']: ids[key] = { **data[key], 'version': ids[key]['version'] + 1 } else: ids[key] = { **data[key], 'version': ids[key]['version'] } except KeyError: pass cache.set(f'{request_id}/analysis_ids', ids) cached_result = cache.get_many([ f'{request_id}/target_genes', f'{request_id}/tabular_output_unfiltered' ]) result = cached_result[f'{request_id}/tabular_output_unfiltered'] result = filter_df_by_ids(result, ids) try: user_lists = cached_result[f'{request_id}/target_genes'] result = result[result.index.str.upper().isin( user_lists[0].index.str.upper())].dropna(axis=1, how='all') if result.empty: raise QueryError( "Empty result (user list too restrictive).") result = reorder_data(result) except KeyError: pass cache.set(f'{request_id}/tabular_output', result) # refresh filtered tabular output # delete cache keys and refresh cache here. cache.delete_many([ # formatted output f'{request_id}/formatted_tabular_output', # network f'{request_id}/network', # AUPR curve f'{request_id}/figure', f'{request_id}/figure_data', # network stats f'{request_id}/stats', # Gene list enrichment f'{request_id}/list_enrichment', f'{request_id}/list_enrichment_legend', f'{request_id}/list_enrichment_data', # motif enrichment *(f'{request_id}/{r}_enrich' for r in MOTIFS.regions), # analysis enrichment f'{request_id}/analysis_enrichment', # summary f'{request_id}/summary' ]) return JsonResponse(list(ids.items()), status=201, safe=False, encoder=PandasJSONEncoder) except (json.JSONDecodeError, ValidationError, QueryError) as e: return HttpResponseBadRequest(e) except KeyError: return HttpResponseNotFound( 'Query does not exist. Please start a new query.')
def save(self, *args, **kwargs): cache.delete_many(cache.keys("*blog*")) return super().save(*args, **kwargs)
def get_instance(model, instance_or_pk, timeout=None, using=None): """ Returns the ``model`` instance with a primary key of ``instance_or_pk``. If the data is cached it will be returned from there, otherwise the regular Django ORM is queried for this instance and the data stored in the cache. If omitted, the timeout value defaults to ``settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT`` instead of 0 (zero). Example:: >>> get_instance(User, 1) # Cache miss <User: lamby> >>> get_instance(User, 1) # Cache hit <User: lamby> >>> User.objects.get(pk=1) == get_instance(User, 1) True """ pk = getattr(instance_or_pk, "pk", instance_or_pk) primary_model = model descriptors = getattr(primary_model, "_cache_fetch_related", ()) models = [model, *(d.related.field.model for d in descriptors)] # Note: we're assuming that the relations are primary key foreign keys, and # so all have the same primary key. This matches the assumption which # `cache_relation` makes. keys_to_models = {instance_key(model, instance_or_pk): model for model in models} data_map = cache.get_many(tuple(keys_to_models.keys())) instance_map = {} if data_map.keys() == keys_to_models.keys(): try: for key, data in data_map.items(): model = keys_to_models[key] instance_map[key] = deserialise(model, data, pk, using) except: # Error when deserialising - remove from the cache; we will # fallback and return the underlying instance cache.delete_many(tuple(keys_to_models.keys())) else: key = instance_key(primary_model, instance_or_pk) primary_instance = instance_map[key] for descriptor in descriptors: related_instance = instance_map[ instance_key( descriptor.related.field.model, instance_or_pk, ) ] related_cache_name = get_related_cache_name( get_related_name(descriptor), ) setattr(primary_instance, related_cache_name, related_instance) return primary_instance related_names = [d.related.field.related_query_name() for d in descriptors] # Use the default manager so we are never filtered by a .get_query_set() queryset = primary_model._default_manager.using(using) if related_names: # NB: select_related without args selects all it can find, which we don't want. queryset = queryset.select_related(*related_names) primary_instance = queryset.get(pk=pk) instances = [ primary_instance, *(getattr(primary_instance, x, None) for x in related_names), ] cache_data = {} for instance in instances: if instance is None: continue key = instance_key(instance._meta.model, instance) cache_data[key] = serialise(instance) if timeout is None: timeout = app_settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT cache.set_many(cache_data, timeout) return primary_instance
def delete_caches(cls, model, ids): names = [cls._get_cache_name(model, id, json=False) for id in ids] names.extend([cls._get_cache_name(model, id, json=True) for id in ids]) logging.debug("Removing caches: {}".format(', '.join(names))) cache.delete_many(names)
def reset(request): u = request.user u.profile.food_history = [] u.save() cache.delete_many(['target', 'best_IDs', 'best_ratings']) return (redirect('recommender:index'))
def reset_counters(**counters): cache.delete_many([_key(*pair) for pair in counters.items()])
def save(self, *args, **kwargs): if not self.slug: self.slug = slugify(self.title) cache.delete_many(cache.keys("*blog*")) return super().save(*args, **kwargs)
def cache_clear(self, request: Request) -> Response: """Clear flow cache""" keys = cache.keys("flow_*") cache.delete_many(keys) LOGGER.debug("Cleared flow cache", keys=len(keys)) return Response(status=204)
def _on_model_save(instance, **kwargs): """ Called when a model is saved. """ cache.delete_many(get_object_cache_keys(instance))
def ClearMsgListCache(): cache.delete_many(['news:.full_list'])
def expire_assets_amount(self): ancestor_keys = self.get_ancestor_keys(with_self=True) cache_keys = [ self._assets_amount_cache_key.format(k) for k in ancestor_keys ] cache.delete_many(cache_keys)
def invalidate_cache(self): cache.delete_many([ "suministro-list", f"suministro-municipio-list:{self.municipality}", f"suministro-detail:{self.slug}", ])
def finished_submission(sub): keys = ['user_complete:%d' % sub.user_id] if hasattr(sub, 'contest'): participation = sub.contest.participation keys += ['contest_complete:%d' % participation.id] cache.delete_many(keys)
def clean_cache(keys): _cache = cache.delete_many(keys)
def invalidate_schematics(self): cache.delete_many([self.name + "_cons_schematic",self.name + "_wt_schematic",self.name + "_chem_summary"])