Exemple #1
0
def fetch_history_from_redis(room_id, num_msgs=None, post_delete=False):
    """
        Fetches the most recent num_msgs from Redis on a particular room
    """
    try:

        room_name = room_id

        REDIS_CONNECTION = cache.get_client('')
        if num_msgs is None:
            history_bytes = REDIS_CONNECTION.lrange(
                cache.make_key(f'HISTORY_{room_name}'), 0, -1)
        else:
            if num_msgs <= 0:
                return []
            else:
                history_bytes = REDIS_CONNECTION.lrange(
                    cache.make_key(f'HISTORY_{room_name}'), 0, num_msgs - 1)
        history = list(
            json.loads(msg)
            for msg in history_bytes)  # history is now a Python List of Dict
        if post_delete:
            REDIS_CONNECTION.delete(cache.make_key(f"HISTORY_{room_name}"))
        print("in clientwidget_updated fetch_history_from_redis: ", room_name,
              history)
        return history
    except Exception as ex:
        print(ex)
        return []
def calc_last_modified(request, *args, **kwargs):
    """
    Returns the file's modified time as the last-modified date
    """
    assert "cache_name" in kwargs, "Must specify cache_name as a keyword arg."

    try:
        cache = get_cache(kwargs["cache_name"])
        assert isinstance(cache, FileBasedCache) or isinstance(cache, LocMemCache), "requires file-based or mem-based cache."
    except InvalidCacheBackendError:
        return None

    key = django_get_cache_key(request, cache=cache)
    if key is None or not cache.has_key(key):
        return None

    if isinstance(cache, FileBasedCache):
        fname = cache._key_to_file(cache.make_key(key))
        if not os.path.exists(fname):  # would happen only if cache expired AFTER getting the key
            return None
        last_modified = datetime.datetime.fromtimestamp(os.path.getmtime(fname))

    elif isinstance(cache, LocMemCache):
        # It's either in the cache (and valid), and therefore anything since the server
        #   started would be fine.
        # Or, it's not in the cache at all.
        creation_time = cache._expire_info[cache.make_key(key)] - settings.CACHE_TIME
        last_modified = datetime.datetime.fromtimestamp(creation_time)

    return last_modified
Exemple #3
0
def recalculate_user_stats(user_id):
    if not cache.set(
        "user_stats_recently_recalculated_{}".format(user_id), 1, timeout=300, nx=True
    ):
        return "skipped"

    u = User.objects.get(id=user_id)

    on_time = Ping.objects.filter(user=u).aggregate(Sum("on_time"))["on_time__sum"]
    detection_count = Detection.objects.filter(user=u).filter(visible=True).count()

    r = get_redis_connection()

    if on_time:
        r.zadd(cache.make_key("on_time"), {user_id: on_time})

        if not r.zscore(cache.make_key("start_time"), user_id):
            start_time = (
                Ping.objects.filter(user=u)
                .filter(on_time__gt=0)
                .aggregate(Min("timestamp"))["timestamp__min"]
            )
            r.zadd(cache.make_key("start_time"), {user_id: start_time})

    r.zadd(cache.make_key("detection_count"), {user_id: detection_count})

    _ = get_user_detections_page(u, 1, preload=True)

    return on_time, detection_count
Exemple #4
0
def calc_last_modified(request, *args, **kwargs):
    """
    Returns the file's modified time as the last-modified date
    """
    assert "cache_name" in kwargs, "Must specify cache_name as a keyword arg."

    try:
        cache = get_cache(kwargs["cache_name"])
        assert isinstance(cache, FileBasedCache) or isinstance(
            cache, LocMemCache), "requires file-based or mem-based cache."
    except InvalidCacheBackendError:
        return None

    key = django_get_cache_key(request, cache=cache)
    if key is None or not cache.has_key(key):
        return None

    if isinstance(cache, FileBasedCache):
        fname = cache._key_to_file(cache.make_key(key))
        if not os.path.exists(
                fname
        ):  # would happen only if cache expired AFTER getting the key
            return None
        last_modified = datetime.datetime.fromtimestamp(
            os.path.getmtime(fname))

    elif isinstance(cache, LocMemCache):
        # It's either in the cache (and valid), and therefore anything since the server
        #   started would be fine.
        # Or, it's not in the cache at all.
        creation_time = cache._expire_info[cache.make_key(
            key)] - settings.CACHE_TIME
        last_modified = datetime.datetime.fromtimestamp(creation_time)

    return last_modified
Exemple #5
0
def recalculate_user_stats(user_id):
    if not cache.set('user_stats_recently_recalculated_{}'.format(user_id),
                     1,
                     timeout=300,
                     nx=True):
        return 'skipped'

    u = User.objects.get(id=user_id)

    on_time = Ping.objects.filter(user=u).aggregate(
        Sum('on_time'))['on_time__sum']
    detection_count = Detection.objects.filter(user=u).filter(
        visible=True).count()

    r = get_redis_connection()

    if on_time:
        r.zadd(cache.make_key('on_time'), {user_id: on_time})

        if not r.zscore(cache.make_key('start_time'), user_id):
            start_time = Ping.objects.filter(user=u).filter(on_time__gt=0)\
                                     .aggregate(Min('timestamp'))['timestamp__min']
            r.zadd(cache.make_key('start_time'), {user_id: start_time})

    r.zadd(cache.make_key('detection_count'), {user_id: detection_count})

    return on_time, detection_count
Exemple #6
0
def get_user_on_time_and_rank(user):
    on_time = get_redis_connection(write=False).zscore(cache.make_key('on_time'), user.id)
    if not on_time:
        return 0, 'no '

    rank = get_redis_connection(write=False).zrevrank(cache.make_key('on_time'), user.id)

    hours, remainder = divmod(int(on_time) / 1000, 3600)
    minutes, seconds = divmod(remainder, 60)
    return '{}h {}m'.format(int(hours), int(minutes)), rank + 1
Exemple #7
0
def get_user_detection_count_and_rank(user):
    r = get_redis_connection(write=False)

    detection_count = r.zscore(cache.make_key('detection_count'), user.id)
    if not detection_count:
        return 0, 'no '

    rank = r.zrevrank(cache.make_key('detection_count'), user.id)

    return int(detection_count), rank + 1
Exemple #8
0
def get_team_detection_count_and_rank(team):
    r = get_redis_connection(write=False)

    detection_count = r.zscore(cache.make_key("team_detection_count"), team.id)
    if not detection_count:
        return 0, "no "

    rank = r.zrevrank(cache.make_key("detection_count"), team.id)

    return int(detection_count), rank + 1
Exemple #9
0
 def test_cache_dataframe_decorator(self):
     cache.clear()
     dds = Dataset()
     self.assertIsNone(cache.get("20f180e978ce3b67ca2e7afd8a880385"))
     dds.get_dataframe()
     self.assertIsNotNone(cache.get("20f180e978ce3b67ca2e7afd8a880385"))
     cache_time = cache._expire_info.get(
         cache.make_key("20f180e978ce3b67ca2e7afd8a880385"))
     dds.get_dataframe()
     self.assertEqual(
         cache._expire_info.get(
             cache.make_key("20f180e978ce3b67ca2e7afd8a880385")),
         cache_time)
Exemple #10
0
 def _get_locker(self, key, version):
     """ 避免其它进程同时进行创建或者更新缓存操作,只有在初始化参数lock为true时候才启用
     """
     from django.core.cache import cache  # 延迟加载, 等django初始化好默认的cache backend之后再加载
     #  使用redis锁定修改缓存,避免多进程重复产生缓存数据
     lock_key = cache.make_key("{key}:{suffix}".format(
         key=cache.make_key(self._wrap_key(key), version),
         suffix=self._lock_key_suffix))
     if hasattr(cache, "client") and hasattr(cache.client, "get_client"):
         client = cache.client.get_client(write=True)
         if hasattr(client, "lock"):
             return client.lock(lock_key, self._lock_key_timeout)
     # 不是用redis做缓存,或者后端的redis客户端没有实现lock方法,采用进程锁
     return Lock()
def delete_history_from_redis(room_name, num_msgs=None):
    """
        Deletes history from the Redis Cache
    """
    REDIS_CONNECTION = cache.get_client('')
    if num_msgs is None:
        res = REDIS_CONNECTION.delete(cache.make_key(f'HISTORY_{room_name}'))
    else:
        if num_msgs <= 0:
            return 1, None
        else:
            res = REDIS_CONNECTION.ltrim(
                cache.make_key(f'HISTORY_{room_name}'), num_msgs, -1)
    return res, None
Exemple #12
0
def make_key(obj):
    '''
    Returns a string serialization of `obj` which is usable as a cache key.

    This function is used internally by CacheCow, but it's exposed in case you 
    want to use it directly with the lower-level Django cache API without the
    rest of CacheCow, and so that you can see how keys are constructed.

    This does a couple things to turn the given object into a clean key:

        1. Recursively traverses, serializes and joins together any iterables, 
           so you can pass a list of items to be turned into a key. The 
           recursion depth is limited to 1 level.
       
           String objects are an exception to this -- they are treated here as 
           atomic units, despite being iterables.

        2. Removes any control code characters and spaces [1] (which are
           illegal in memcached keys [2].)

        3. After the above two steps, if the resulting length is >
           MAX_KEY_LENGTH bytes (250 by default, which is the memcached 
           protocol limit), it generates a hash out of the key instead.
    
    It's possible the resulting key would serialize into an empty string, so
    choose your args carefully to avoid this.

    [1] http://www.unicode.org/charts/PDF/U0000.pdf
        http://www.unicode.org/charts/PDF/U0080.pdf

    [2] http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt
    '''
    key = '.'.join(imap(_format_key_arg, _key_arg_iterator(obj)))

    # If the resulting key is too long, hash the part after the prefix, and
    # truncate as needed.
    if len(cache.make_key(key)) > MAX_KEY_LENGTH:
        prefix = cache.make_key('')  # Django prepends some stuff to keys.

        # Just to be safe... we should be able to have a key >= 1 char long :)
        if len(prefix) >= MAX_KEY_LENGTH:
            raise Exception('Your cache key prefixes are too long.')

        #TODO a further refinement of this would be to hash only the smallest
        # part necessary to get it under the limit. Don't hash an entire key
        # just for being 1 char too long. This would improve readability.
        key = hashlib.md5(key).hexdigest()[:MAX_KEY_LENGTH - len(prefix)]
    return key
Exemple #13
0
def make_key(obj):
    '''
    Returns a string serialization of `obj` which is usable as a cache key.

    This function is used internally by CacheCow, but it's exposed in case you 
    want to use it directly with the lower-level Django cache API without the
    rest of CacheCow, and so that you can see how keys are constructed.

    This does a couple things to turn the given object into a clean key:

        1. Recursively traverses, serializes and joins together any iterables, 
           so you can pass a list of items to be turned into a key. The 
           recursion depth is limited to 1 level.
       
           String objects are an exception to this -- they are treated here as 
           atomic units, despite being iterables.

        2. Removes any control code characters and spaces [1] (which are
           illegal in memcached keys [2].)

        3. After the above two steps, if the resulting length is >
           MAX_KEY_LENGTH bytes (250 by default, which is the memcached 
           protocol limit), it generates a hash out of the key instead.
    
    It's possible the resulting key would serialize into an empty string, so
    choose your args carefully to avoid this.

    [1] http://www.unicode.org/charts/PDF/U0000.pdf
        http://www.unicode.org/charts/PDF/U0080.pdf

    [2] http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt
    '''
    key = '.'.join(imap(_format_key_arg, _key_arg_iterator(obj)))

    # If the resulting key is too long, hash the part after the prefix, and
    # truncate as needed.
    if len(cache.make_key(key)) > MAX_KEY_LENGTH:
        prefix = cache.make_key('') # Django prepends some stuff to keys.
        
        # Just to be safe... we should be able to have a key >= 1 char long :)
        if len(prefix) >= MAX_KEY_LENGTH:
            raise Exception('Your cache key prefixes are too long.')

        #TODO a further refinement of this would be to hash only the smallest 
        # part necessary to get it under the limit. Don't hash an entire key 
        # just for being 1 char too long. This would improve readability.
        key = hashlib.md5(key).hexdigest()[:MAX_KEY_LENGTH - len(prefix)]
    return key
Exemple #14
0
    def test_event_usage_cache_is_reset_at_beginning_of_month(self):
        organization, team, user = self.create_org_team_user()
        self.client.force_login(user)

        for _ in range(0, 3):
            # Create some events on CH
            create_event(
                team=team,
                event="$pageview",
                distinct_id="distinct_id",
                event_uuid=uuid.uuid4(),
            )

        response = self.client.post("/api/user/")
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.json()["billing"]["current_usage"]["value"],
                         3)

        # Check that result was cached
        cache_key = f"monthly_usage_{organization.id}"
        self.assertEqual(cache.get(cache_key), 3)

        # Even though default caching time is 12 hours, the result is only cached until beginning of next month
        self.assertEqual(
            cache._expire_info.get(cache.make_key(cache_key)),
            1546300800.0,
        )  # 1546300800 = Jan 1, 2019 00:00 UTC
Exemple #15
0
 def post(self, request, **kwargs):
     try:
         cache.incr(self.cache_key)
     except ValueError:
         # Cache entry doesn't exist, so create it
         cache._cache.cas(cache.make_key(self.cache_key), 1)
     return redirect('.')
Exemple #16
0
def get_user_list_page(page):
    data = cache.get("user_list_{}".format(page))
    if not data:
        r = get_redis_connection(write=False)
        top = r.zrevrange(
            cache.make_key("detection_count"),
            20 * (page - 1),
            19 + 20 * (page - 1),
            withscores=True,
        )
        top = [(int(t[0]), int(t[1])) for t in top
               if t[1]]  # Remove users with no detections
        users = {
            u.id: u
            for u in User.objects.filter(id__in=[t[0] for t in top])
        }

        data = {
            "has_next":
            len(top) == 20,
            "has_previous":
            page > 1,
            "page_number":
            page,
            "users": [{
                "name": users[t[0]].username,
                "display_name": users[t[0]].display_name,
                "detection_count": t[1],
            } for t in top],
        }
        cache.set("user_list_{}".format(page), data)
    return data
Exemple #17
0
def get_team_list_page(page):
    data = cache.get("team_list_{}".format(page))
    if not data:
        r = get_redis_connection(write=False)
        top = r.zrevrange(
            cache.make_key("team_detection_count"),
            20 * (page - 1),
            19 + 20 * (page - 1),
            withscores=True,
        )
        top = [(int(t[0]), int(t[1])) for t in top
               if t[1]]  # Remove teams with no detections
        teams = {
            team.id: team
            for team in Team.objects.filter(
                id__in=[t[0] for t in top]).annotate(user_count=Count("user"))
        }

        data = {
            "has_next":
            len(top) == 20,
            "has_previous":
            page > 1,
            "page_number":
            page,
            "teams": [{
                "name": teams[t[0]].name,
                "user_count": teams[t[0]].user_count,
                "detection_count": t[1],
            } for t in top],
        }
        cache.set("team_list_{}".format(page), data)
    return data
Exemple #18
0
def relabel_detections(start_id, limit):
    detections = Detection.objects.filter(id__gte=start_id).filter(
        id__lt=start_id + limit
    )
    r = get_redis_connection()

    for d in detections:
        s = True

        if d.source != "api_v2" or not d.frame_content:
            s = False

        if s and not validate_image(d.frame_content):
            s = False

        if s:
            start_time = r.zscore(cache.make_key("start_time"), d.user_id)
            if start_time:
                s = d.timestamp > start_time
            else:
                s = False

        if s != d.visible:
            d.visible = s
            d.save()
Exemple #19
0
def recalculate_team_stats(team_id):
    if not cache.set('team_stats_recently_recalculated_{}'.format(team_id),
                     1,
                     timeout=300,
                     nx=True):
        return 'skipped'

    t = Team.objects.get(id=team_id)

    if t.name:
        detection_count = Detection.objects.filter(team=t).filter(
            visible=True).count()

        r = get_redis_connection()
        r.zadd(cache.make_key('team_detection_count'),
               {team_id: detection_count})

        return detection_count

    else:
        cache.set('team_stats_recently_recalculated_{}'.format(team_id),
                  1,
                  timeout=3600 * 24 * 7)

    return 'ignored'
Exemple #20
0
def get_top_users():
    top = get_redis_connection(write=False) \
        .zrevrange(cache.make_key('detection_count'), 0, 4, withscores=True)
    return [{
        'name': u.username,
        'display_name': u.display_name,
        'detection_count': int(top[i][1])
    } for i, u in enumerate([User.objects.get(id=id) for id in [t[0] for t in top]])]
Exemple #21
0
    def get_time_frame(self, key=None):
        if key:
            lock_key = f'lock:{self.name}:{key}'
        else:
            lock_key = f'lock:{self.name}'
        lock_key = cache.make_key(lock_key)
        log.debug('Requesting %s time frame...', lock_key)

        exists_key = lock_key + ':exists'
        exists = self._client.getset(exists_key, 1)
        if self.timeout is not None:
            self._client.expire(exists_key, self.timeout)

        if exists is None:
            log.debug('Lock does not exist, initialize it')
            self._client.rpush(lock_key, 0)
        else:
            log.debug('Lock already exists, waiting for it')

        blpop_result = self._client.blpop(lock_key, self.timeout)
        if blpop_result is not None:
            ready_time = float(blpop_result[1])
        else:
            log.debug('Timeout hit')
            ready_time = 0

        if ready_time:
            log.debug('Next free time frame will be at %s', ready_time)
        else:
            log.debug('Next free time frame is NOW')

        redis_time = self.get_current_redis_time()
        log.debug('Current redis time: %s', redis_time)

        delay = ready_time - redis_time
        if delay > 0:
            log.debug('Rate limit hit! Sleeping for %s seconds', delay)
            time.sleep(delay)
        else:
            log.debug('Continue without delay')

        log.debug('Running job in time frame %s...', lock_key)
        try:
            yield
        finally:
            log.debug('Job finished in time frame %s', lock_key)

            redis_time = self.get_current_redis_time()
            log.debug('Current redis time: %s', redis_time)

            ready_time = redis_time + self._min_delay
            log.debug('Next free time frame will be at %s', ready_time)

            self._client.set(exists_key, 1, ex=self.timeout)
            self._client.rpush(lock_key, str(ready_time))
            if self.timeout is not None:
                self._client.expire(lock_key, self.timeout)
def cleanup_room_redis(room_name, reset_count=False, bot_type="website"):
    """Dumps the session content of the room into the DB
    """
    connection = cache.get_client('')

    ext = cache.get(str(room_name), "default")

    # Get the room lock status from the cache
    lock = cache.get(f'CLIENTWIDGETROOMLOCK_{room_name}')

    if lock is None and bot_type == "website":
        return

    if lock == True or bot_type in (
            "whatsapp",
            "facebook",
    ):
        # Dump to DB
        variables = cache.get("VARIABLES_" + room_name)
        messages_bytes = connection.lrange(
            cache.make_key("HISTORY_" + room_name), 0, -1)
        messages = list(json.loads(message) for message in messages_bytes)
        modified = False

        with transaction.atomic():
            try:
                room_id = uuid.UUID(str(room_name))
                instance = ChatRoom.objects.get(room_id=room_id)
            except ValueError:
                instance = ChatRoom.objects.get(room_name=room_name)
            if variables is not None:
                instance.variables = variables
                modified = True
            if messages is not None:
                if messages != []:
                    instance.messages.extend(messages)
                    if bot_type == 'website' and hasattr(
                            instance, 'recent_messages') and isinstance(
                                getattr(instance, 'recent_messages'), list):
                        instance.recent_messages.extend(messages)
                    modified = True
            if modified:
                instance.bot_is_active = False
                instance.end_time = timezone.now()
                instance.save(using=ext)

    if reset_count == True:
        # Reset the count to 0
        cache.set(f"NUM_USERS_{room_name}", 0)
        # Delete the locks
        cache.delete(f'CLIENTWIDGETROOMLOCK_{room_name}')
        cache.delete(f'CLIENTWIDGETLOCK_{room_name}')

    # Delete the session history
    cache.delete(f"HISTORY_{room_name}")
    cache.delete(f"VARIABLES_{room_name}")
Exemple #23
0
def cache_delete_pattern(pattern, version=None, client=None):
    """
    Remove all keys matching pattern.
    """

    pattern = cache.make_key(pattern, version=version)
    keys = cache._client.keys(pattern)

    if keys:
        return cache._client.delete(*keys)
def update_session_redis(room_name, msg_number, content):
    """
        Sets the key-value fields for a message on the redis store
    """
    REDIS_CONNECTION = cache.get_client('')
    REDIS_CONNECTION.hmset(room_name + "_" + str(msg_number), content)
    # Also update the history
    # TODO: Store it as a single nested hash value
    REDIS_CONNECTION.hmset(
        cache.make_key(f"HISTORY_{room_name}_{msg_number % (N)}"), content)
 def test_incr_decr_timeout(self):
     """incr/decr does not modify expiry time (matches memcached behavior)"""
     key = 'value'
     _key = cache.make_key(key)
     cache.set(key, 1, timeout=cache.default_timeout * 10)
     expire = cache._expire_info[_key]
     cache.incr(key)
     self.assertEqual(expire, cache._expire_info[_key])
     cache.decr(key)
     self.assertEqual(expire, cache._expire_info[_key])
 def test_incr_decr_timeout(self):
     """incr/decr does not modify expiry time (matches memcached behavior)"""
     key = "value"
     _key = cache.make_key(key)
     cache.set(key, 1, timeout=cache.default_timeout * 10)
     expire = cache._expire_info[_key]
     cache.incr(key)
     self.assertEqual(expire, cache._expire_info[_key])
     cache.decr(key)
     self.assertEqual(expire, cache._expire_info[_key])
Exemple #27
0
    def get_cache_key(self, raw=False):
        """
        Returns a string that is used as cache key for a collection.

        Django adds a prefix to the cache key when using the django cache api.
        In other cases use raw=True to add the same cache key.
        """
        key = get_element_list_cache_key(self.collection_string)
        if raw:
            key = cache.make_key(key)
        return key
def keys_by_prefix(prefix):
    version = cache.make_key('')
    version_len = len(version)
    for fixed in [cache_page_fixed, cache_header_fixed]:
        info = '{0}{1}.{2}'.format(version, fixed, prefix)
        if is_support:
            for key in cache._cache:
                if key.startswith(info):
                    yield key[version_len:]
        else:
            not_support_warn('keys_by_prefix')
Exemple #29
0
    def get_cache_key(self, raw=False):
        """
        Returns a string that is used as cache key for a collection.

        Django adds a prefix to the cache key when using the django cache api.
        In other cases use raw=True to add the same cache key.
        """
        key = get_element_list_cache_key(self.collection_string)
        if raw:
            key = cache.make_key(key)
        return key
def decrement_usercount(room_name):
    """
        Decrement the room counter atomically whenever a Websocket connection is established
    """
    num_users = get_usercount(room_name)
    while True:
        # Set the current message atomically
        num_users, error = atomic_set(cache.make_key(f"NUM_USERS_{room_name}"),
                                      num_users - 1)
        if not error:
            break
    return int(num_users)
def increment_usercount(room_name):
    """
        Increments the user count shared variable atomically on the redis cache
    """
    num_users = get_usercount(room_name)
    while True:
        # Set the current message atomically
        num_users, error = atomic_set(cache.make_key(f"NUM_USERS_{room_name}"),
                                      num_users + 1)
        if not error:
            break
    return int(num_users)
def append_msg_to_redis(room_name,
                        message_dict,
                        store_full=False,
                        timeout=24 * 60 * 60):
    """
        Appends the message dictionary from the websocket to the Redis Message List
    """
    REDIS_CONNECTION = cache.get_client('')

    if store_full:
        REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'),
                               json.dumps(message_dict))
        REDIS_CONNECTION.expire(cache.make_key(f'HISTORY_{room_name}'),
                                timeout)
        # Finally set a lock on this room name. We'll need it later for flushing to DB
        cache.set(f'CLIENTWIDGETLOCK_{room_name}', room_name, timeout=timeout)
        return

    if 'message' in message_dict:
        if isinstance(message_dict['message'], list):
            # If we want to store an array of parsed messages
            for msg in message_dict['message']:
                REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'),
                                       json.dumps(msg))
        else:
            REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'),
                                   json.dumps(message_dict))
    else:
        REDIS_CONNECTION.rpush(cache.make_key(f'HISTORY_{room_name}'),
                               json.dumps(message_dict))

    REDIS_CONNECTION.expire(cache.make_key(f'HISTORY_{room_name}'), timeout)

    # Finally set a lock on this room name. We'll need it later for flushing to DB
    cache.set(f'CLIENTWIDGETLOCK_{room_name}', room_name, timeout=timeout)
def flush_session(room_name, batch_size):
    """
        Deletes the messages related to the session on the redis cache
    """
    # Flush the contents of the redis cache for this session
    REDIS_CONNECTION = cache.get_client('')
    for key_batch in fetch_redis_batch(
            REDIS_CONNECTION.scan_iter(cache.make_key(f"{room_name}_*")),
            batch_size):
        for key in key_batch:
            if key is None:
                break
            REDIS_CONNECTION.delete(key)
Exemple #34
0
def load_cache_signals(version=None, **kwargs):
    """On startup, sync signals with registered models"""
    if not cache_signals.ready:
        results = CacheBotSignals.objects.all()
        tables = [r.table_name for r in results]
        mapping = cache.get_many(tables)
        for result in results:
            key = cache.make_key(u'.'.join(('cachesignals', result.table_name)), version=version)
            accessor_set = mapping.get(key) or set()
            accessor_set.add((result.accessor_path, result.lookup_type, result.exclude))
            mapping[key] = accessor_set
        cache.set_many(mapping, 0)
        cache_signals.ready = True
Exemple #35
0
def get_global_stats():
    data = cache.get("global_stats")

    if not data:
        r = get_redis_connection(write=False)
        data = {
            "detections_total": Detection.objects.filter(visible=True).count(),
            "users_total": r.zcard(cache.make_key("start_time")),
            "teams_total": Team.objects.count(),
        }
        cache.set("global_stats", data)

    return data
 def cacher(*args, **kwargs):
     if cachekeyprefix is None:
         cachekeyprefix = f.func_name
     # The non-keyword parameters used to call the function are used as the keys
     cachekey = cache.make_key(':'.join(map(str,(cachekeyprefix,) + args[1:])))
     if cachekey in cache:
         logging.info('Cache hit for key [%s]' % cachekey)
         return cache.get(cachekey)
     else:
         logging.info('Cache miss for key [%s, %s]' % (cachekey, timeout))
         val = f(*args, **kwargs)
         cache.set(cachekey, val, timeout=timeout)
         return val
Exemple #37
0
def get_global_stats():
    data = cache.get('global_stats')

    if not data:
        r = get_redis_connection(write=False)
        data = {
            'detections_total': Detection.objects.filter(visible=True).count(),
            'users_total': r.zcard(cache.make_key('start_time')),
            'teams_total': Team.objects.count(),
        }
        cache.set('global_stats', data)

    return data
        def cacher(*args, **kwargs):
#            key = json.dumps({'prefix' : cachekeyprefix, 'args' : args[1:], 'kwargs': ['%s:%s' % (k, kwargs.get(k)) for k in sorted(kwargs.keys())] })
            std_args = args[1:] if len(args) and isinstance(args[0], HttpRequest) else args
            params = [cachekeyprefix] + list(std_args) + ['%s:%s' % (k, kwargs.get(k)) for k in sorted(kwargs.keys())]
            print params
#            print key
#            params += 
#            print 'args', len(args), kwargs
            # The non-keyword parameters used to call the function are used as the keys
            cachekey = cache.make_key(':'.join(map(str,params)))
            if cachekey in cache:
                logging.info('Cache hit for key [%s]' % cachekey)
                return cache.get(cachekey)
            else:
                logging.info('Cache miss for key [%s, %s]' % (cachekey, timeout))
                val = f(*args, **kwargs)
                cache.set(cachekey, val, timeout=timeout)
                return val
 def cacher(*args, **kwargs):
     std_args = args[1:] if len(args) and isinstance(args[0], HttpRequest) else args
     params = [cachekeyprefix or f.func_name] + list(std_args) + ['%s:%s' % (k, kwargs.get(k)) for k in sorted(kwargs.keys())]
     # The non-keyword parameters used to call the function are used as the keys
     cachekey = cache.make_key(':'.join(map(str,params)))
     if cachekey in cache:
         print 'hit for %s' % cachekey
         logger.debug('Cache hit for key [%s]' % cachekey)
         return cache.get(cachekey)
     else:
         print 'miss for %s' % cachekey
         logger.debug('Cache miss for key [%s, %s]' % (cachekey, timeout))
         val = f(*args, **kwargs)
         if timeout is None:
             cache.set(cachekey, val)
         else:
             cache.set(cachekey, val, timeout=timeout)
         return val
Exemple #40
0
def get_invalidation_key(table_alias, accessor_path="", lookup_type="exact", negate=False, value="", version=None):
    """
    An invalidation key is associated with a set of cached queries. A blank accessor_path
    will create an invalidation key for this entire table instead of a specific row
    """

    # punt on this problem for now
    if isinstance(value, QueryWrapper) or lookup_type != "exact" or negate:
        value = ""

    if hasattr(value, "__iter__"):
        if len(value) == 1:
            value = value[0]
        else:
            value = ""

    base_key = md5_constructor(".".join((accessor_path, unicode(value))).encode("utf-8")).hexdigest()
    return cache.make_key(".".join((table_alias, "cachebot.invalidation", base_key)), version=version)
Exemple #41
0
    def del_all(self) -> None:
        """
        Deletes all elements from the cache.

        This method uses the redis command SCAN. See
        https://redis.io/commands/scan#scan-guarantees for its limitations. If
        an element is added to the cache while del_all() is in process, it is
        possible, that it is not deleted.
        """
        redis = get_redis_connection()

        # Get all keys that start with self.base_cache_key and delete them
        match = cache.make_key('{}:*'.format(self.base_cache_key))
        cursor = 0
        while True:
            cursor, keys = redis.scan(cursor, match)
            for key in keys:
                redis.delete(key)
            if cursor == 0:
                return
    def test_increment_cache(self):
        """Sum our progress by increments properly."""
        expected = 25.0
        test_key = cache.make_key('increment_test')
        increment = 25.0
        # Fresh increment, this initializes the value.
        decorators.increment_cache(test_key, increment)
        self.assertEqual(float(cache.get(test_key)), expected)

        # Increment an existing key
        decorators.increment_cache(test_key, increment)
        expected = 50.0
        self.assertEqual(float(cache.get(test_key)), expected)

        # This should put us well over 100.0 in incrementation w/o bounds check.
        for i in range(10):
            decorators.increment_cache(test_key, increment)

        expected = 100.0
        self.assertEqual(float(cache.get(test_key)), expected)
Exemple #43
0
def get_many_by_key(cache_key_f, item_keys, version=None):
    """
    For a series of item keys and a function that maps these keys to cache keys,
    get all the items from the cache if they are available there.
    
    Return a dictionary mapping the item keys to the objects retrieved from the
    cache.  Any items not found in the cache are not returned.
    """
    cache_key_to_item_key = {}
    for item_key in item_keys:
        cache_key = cache.make_key(cache_key_f(item_key), version=version)
        cache_key_to_item_key[cache_key] = item_key

    # request from cache
    from_cache = cache.get_many(cache_key_to_item_key.keys())

    results = {}
    for cache_key, value in from_cache.iteritems():
        item_key = cache_key_to_item_key[cache_key]
        results[item_key] = value
    return results
Exemple #44
0
def expire_view_cache(view_name, args = [], kwargs={}, namespace=None, key_prefix=None):
    """
    This function allows you to invalidate any view-level cache. 
        view_name: view function you wish to invalidate or it's named url pattern
        args: any arguments passed to the view function
        namepace: optioal, if an application namespace is needed
        key prefix: for the @cache_page decorator for the function (if any)
    """

    # create a fake request object
    request = HttpRequest()
    # Loookup the request path:
    if namespace:
        view_name = namespace + ":" + view_name
    request.path = reverse(view_name, args = args, kwargs=kwargs)
    # get cache key, expire if the cached item exists:
    key = cache.make_key(get_cache_key(request, key_prefix=key_prefix))
    if key:
        if cache.get(key):
            cache.delete(key)
            return True
    return False
    def test_make_key(self):
        key1 = cache.make_key('ololo')
        self.assertEqual(':1:ololo', key1)

        key2 = cache.make_key(123123)
        self.assertEqual(':1:123123', key2)
Exemple #46
0
 def get_lookup_key(self, model_class, version=None):
     return cache.make_key('.'.join(('cachesignals', model_class._meta.db_table)), version=version)
Exemple #47
0
def template_cache_key(fragment_name, *variables):
    "Devuelve la llave usada por la cache para el fragmento y variable dados."
    args = md5_constructor(u':'.join([urlquote(unicode(v)) for v in variables]))
    cache_key = 'template.cache.%s.%s' % (fragment_name, args.hexdigest())
    cache_key = cache.make_key(cache_key)
    return cache_key
Exemple #48
0
 def get_cache_key(self, user_id: int) -> str:
     """
     Returns the cache key for a user.
     """
     return cache.make_key('{}:{}'.format(self.base_cache_key, user_id))
import redis
import os
from django.conf import settings
from django.core.cache import cache

settings.configure()

key_prefix = cache.make_key('django_compressor')

REDIS_URL =  os.getenv('REDIS_PORT_6379_TCP_ADDR')

redis_host = REDIS_URL
redis_port =  6379
redis_password =  None
redis_db = 0

if redis_password is None:
  redis = redis.Redis(host=redis_host, port=redis_port, db=redis_db)
else:
  redis = redis.Redis(host=redis_host, port=redis_port, db=redis_db, password=redis_password)

keys = redis.keys('%s.*' % (key_prefix,))
for key in keys:
    redis.delete(key)
    print('Cleared Django Compressor key: %s' % (key,))
Exemple #50
0
 def get_cache_key(self):
     """
     Returns the cache key.
     """
     return cache.make_key(self.cache_key)
Exemple #51
0
 def get_user_cache_key(self, user_id):
     """
     Returns a cache key to save the channel names for a specific user.
     """
     return cache.make_key('{}:{}'.format(self.cache_key, user_id))
 def test_get_prog_key(self):
     """We format our cache key properly."""
     expected = cache.make_key('SEED:fun_func:PROG:34')
     self.assertEqual(decorators.get_prog_key('fun_func', 34), expected)
 def get_cache_key(self, extra_args='', version=None):
     """Cache key used to identify this query"""
     query, params = self.query.get_compiler(using=self.db).as_sql()
     query_string = (query % params).strip().encode("utf-8")
     base_key = md5_constructor('.'.join((query_string, extra_args))).hexdigest()
     return cache.make_key('.'.join((self.model._meta.db_table, 'cachebot.results', base_key)), version=version)
Exemple #54
0
def make_key(key):
    return unicode(django_cache.make_key(key))
def _get_cache_key(prefix, import_file_pk):
    """Makes a key like 'SEED:save_raw_data:LOCK:45'."""
    return unicode(cache.make_key(
        '{0}:{1}'.format(prefix, import_file_pk)
    ))
Exemple #56
0
 def get_cache_key(self, collection_string: str) -> str:
     """
     Returns the cache key for a collection.
     """
     return cache.make_key('{}:{}'.format(self.base_cache_key, collection_string))