Пример #1
0
def invalidate_cache(model_class, objects, **extra_keys):
    """
    Flushes the cache of any cached objects associated with this instance.

    Explicitly set a None value instead of just deleting so we don't have any race
    conditions where:
        Thread 1 -> Cache miss, get object from DB
        Thread 2 -> Object saved, deleted from cache
        Thread 1 -> Store (stale) object fetched from DB in cache
    Five second should be more than enough time to prevent this from happening for
    a web app.
    """
    invalidation_dict = {}
    accessor_set = cache_signals.get_global_signals(model_class)
    for obj in objects:
        for (accessor_path, lookup_type, negate) in accessor_set:
            for value in get_values(obj, accessor_path):
                invalidation_key = get_invalidation_key(
                    model_class._meta.db_table, accessor_path=accessor_path, negate=negate, value=value, save=False
                )
                invalidation_dict[invalidation_key] = None

    invalidation_dict.update(cache.get_many(invalidation_dict.keys()))

    cache_keys = set()
    for obj_key, cache_key_list in invalidation_dict.iteritems():
        if cache_key_list:
            cache_keys.update(cache_key_list.split(","))

    keys_to_invalidate = dict([(key, None) for key in chain(cache_keys, invalidation_dict.keys())])
    keys_to_invalidate.update(extra_keys)
    cache.set_many(keys_to_invalidate, 5)
    cache.delete_many(keys_to_invalidate.keys())
Пример #2
0
 def __iter__(self):
     results = cache.get(self.result_key)
     if results is not None:
         for obj in results:
             if RUNNING_TESTS:
                 obj = self._set_field(obj,'from_cache', True)
             yield obj
         raise StopIteration
     
     results = []
     cache_query = getattr(self.queryset, '_cache_query', False)
     pk_name = self.queryset.model._meta.pk.name   
     self.queryset._fill_select_reverse_cache()
     
     reversemapping_keys = self.queryset._reversemapping.keys()
     reversemapping_keys.sort()
     
     for obj in self.iterator():    
         for related_name in reversemapping_keys:
             reversemap = self.queryset._target_maps[related_name]
             related_split = related_name.split(LOOKUP_SEP)
             for related_obj, related_field in self._nested_select_reverse(obj, related_split):
                 val = reversemap.get(get_values(related_obj, pk_name),[])
                 related_obj = self._set_field(related_obj, related_field, val)
                     
         if cache_query:
             results.append(obj)
         if RUNNING_TESTS:
             obj = self._set_field(obj,'from_cache', False)
         yield obj
         
     if cache_query:
         self.cache_results(results)
Пример #3
0
 def __iter__(self):
     cache_query = getattr(self.queryset, '_cache_query', False)
     
     if cache_query:
         results = cache.get(self.result_key)
         if results is not None:
             for obj in results:
                 if conf.CACHEBOT_DEBUG_RESULTS:
                     set_value(obj, 'from_cache', True)
                 yield obj
             raise StopIteration
     
     results = []
     pk_name = self.queryset.model._meta.pk.name   
     self.queryset._fill_select_reverse_cache()
     
     reversemapping_keys = self.queryset._reversemapping.keys()
     reversemapping_keys.sort()
     
     for obj in self.parent_class.iterator(self.queryset):    
         for related_name in reversemapping_keys:
             reversemap = self.queryset._target_maps[related_name]
             related_split = related_name.split(LOOKUP_SEP)
             for related_obj, related_field in self._nested_select_reverse(obj, related_split):
                 val = reversemap.get(get_values(related_obj, pk_name),[])
                 set_value(related_obj, related_field, val)
                     
         if cache_query:
             results.append(obj)
         if conf.CACHEBOT_DEBUG_RESULTS:
             set_value(obj, 'from_cache', False)
         yield obj
         
     if cache_query:
         self.cache_results(results)
Пример #4
0
 def get_invalidation_keys(self, results):
     """
     Iterates through a list of results, and returns an invalidation key for each result. If the
     query spans multiple tables, also return invalidation keys of any related rows.
     """
     related_fields = self.queryset._related_fields
     for obj in results:
         for field, model_class in related_fields.iteritems():
             pk_name = model_class._meta.pk.attname
             cache_signals.register(model_class, pk_name, 'exact')
             for value in get_values(obj, field):
                 invalidation_key = get_invalidation_key(
                     model_class._meta.db_table, 
                     accessor_path = pk_name, 
                     value = value)
                 yield invalidation_key
Пример #5
0
 def get_invalidation_keys(self, results):
     """
     Iterates through a list of results, and returns an invalidation key for each result. If the
     query spans multiple tables, also return invalidation keys of any related rows.
     """
     related_fields = self.queryset._related_fields
     for obj in results:
         for field, model_class in related_fields.iteritems():
             pk_name = model_class._meta.pk.attname
             cache_signals.register(model_class, pk_name, 'exact')
             for value in get_values(obj, field):
                 invalidation_key = get_invalidation_key(
                     model_class._meta.db_table, 
                     accessor_path = pk_name, 
                     value = value)
                 yield invalidation_key
Пример #6
0
def invalidate_cache(model_class, objects, **extra_keys):
    """
    Flushes the cache of any cached objects associated with this instance.

    Explicitly set a None value instead of just deleting so we don't have any race
    conditions where:
        Thread 1 -> Cache miss, get object from DB
        Thread 2 -> Object saved, deleted from cache
        Thread 1 -> Store (stale) object fetched from DB in cache
    Five second should be more than enough time to prevent this from happening for
    a web app.
    """
    invalidation_dict = {}
    accessor_set = cache_signals.get_global_signals(model_class)
    for obj in objects:
        for (accessor_path, lookup_type, negate) in accessor_set:
            if lookup_type != 'exact' or negate:
                invalidation_key = get_invalidation_key(
                    model_class._meta.db_table,
                    accessor_path=accessor_path,
                    negate=negate,
                    value='')
                invalidation_dict[invalidation_key] = None
            else:
                for value in get_values(obj, accessor_path):
                    invalidation_key = get_invalidation_key(
                        model_class._meta.db_table,
                        accessor_path=accessor_path,
                        negate=negate,
                        value=value)
                    invalidation_dict[invalidation_key] = None

    if invalidation_dict:
        invalidation_dict.update(cache.get_many(invalidation_dict.keys()))

        cache_keys = set()
        for obj_key, cache_key_list in invalidation_dict.iteritems():
            if cache_key_list:
                cache_keys.update(cache_key_list.split(','))

        if cache_keys:
            cache.set_many(dict([(key, None) for key in cache_keys]),
                           conf.CACHE_INVALIDATION_TIMEOUT)
        invalidation_dict.update(extra_keys)
        cache.delete_many(invalidation_dict.keys())