def test_ttl(self): cache = get_cache("default") _params = cache._params _is_herd = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.HerdClient") _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient") # Not supported for shard client. if _is_shard: return # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if _is_herd: self.assertAlmostEqual(ttl, 12) else: self.assertAlmostEqual(ttl, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertEqual(ttl, None) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0)
def execute(self, *args): uid = self.context.user.id word = cache.get(f"authword.{uid}") if word is not None: time_left = timedelta(seconds=cache.ttl(f"authword.{uid}")) self.send_message( f"Your qualification authword is: {word.upper()}. Expires in: {time_left}" ) return cooldown = cache.get(f"authcooldown.{uid}") if cooldown is not None: time_left = timedelta(seconds=cache.ttl(f"authcooldown.{uid}")) self.send_message( f"Your authword expired. Try again in: {time_left}") return authword = Words.get_word() cache.set(f"authword.{uid}", authword, timeout=self.EXPIRE_TIME) cache.set(f"authcooldown.{uid}", True, timeout=self.COOLDOWN_TIME) time_left = timedelta(seconds=cache.ttl(f"authword.{uid}")) self.send_message( f"Your qualification authword is: {authword.upper()}. Expires in: {time_left}" )
def get(self, request, pk, *args, **kwargs): """ Returns a list of protocol datasources associated with this protocol Also determines authorization for each protocol datasource based on the user making the request. """ try: p = Protocol.objects.get(pk=pk) except ObjectDoesNotExist: return Response({'error': 'Protocol requested not found'}, status=404) ds = [] for pds in p.getProtocolDataSources(): t = ProtocolDataSourceSerializer(pds, context={ 'request': request }).data # Parse ProtocolDataSource configuration if pds.driver_configuration != '': dc = json.loads(pds.driver_configuration) else: dc = {} # If labels are defined get label names from eHB. # (label_id, label_description) if 'labels' in list(dc.keys()): labels = cache.get('ehb_labels') if not labels: labels = self.erl_rh.query() cache.set('ehb_labels', labels) if hasattr(cache, 'ttl'): cache.ttl('ehb_labels', 60) nl = [] for l in dc['labels']: for label in labels: if l == label['id']: if label['label'] == '': nl.append((label['id'], 'Record')) else: nl.append((label['id'], label['label'])) dc['labels'] = nl else: dc['labels'] = [(1, 'Record')] t["driver_configuration"] = dc # Determine Authorization try: ProtocolUserCredentials.objects.get(protocol=p, data_source=pds, user=request.user) t["authorized"] = True except ProtocolUserCredentials.DoesNotExist: t["authorized"] = False # Include DataSource details t["data_source"] = DataSourceSerializer(pds.data_source).data ds.append(t) return Response(sorted(ds, key=lambda ds: ds["display_label"]))
def test_ttl(self): cache = caches["default"] # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if isinstance(cache.client, herd.HerdClient): self.assertAlmostEqual(ttl, 12) else: self.assertAlmostEqual(ttl, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertIsNone(ttl) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") self.assertEqual(ttl, 0) # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0)
def test_ttl(self): cache = caches["default"] # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if isinstance(cache.client, herd.HerdClient): self.assertAlmostEqual(ttl, 12) else: self.assertAlmostEqual(ttl, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertEqual(ttl, None) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") self.assertEqual(ttl, 0) # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0)
def test_refresh(): cache.set('foo', 'value', timeout=2) assert cache.ttl('foo') == 2 time.sleep(1) assert cache.ttl('foo') == 1 cache.set('foo', 'value', timeout=3) assert cache.ttl('foo') == 3
def get(self, request, pk, *args, **kwargs): """ Returns a list of protocol datasources associated with this protocol Also determines authorization for each protocol datasource based on the user making the request. """ try: p = Protocol.objects.get(pk=pk) except ObjectDoesNotExist: return Response({'error': 'Protocol requested not found'}, status=404) ds = [] for pds in p.getProtocolDataSources(): t = ProtocolDataSourceSerializer(pds, context={'request': request}).data # Parse ProtocolDataSource configuration if pds.driver_configuration != '': dc = json.loads(pds.driver_configuration) else: dc = {} # If labels are defined get label names from eHB. # (label_id, label_description) if 'labels' in list(dc.keys()): labels = cache.get('ehb_labels') if not labels: labels = self.erl_rh.query() cache.set('ehb_labels', labels) if hasattr(cache, 'ttl'): cache.ttl('ehb_labels', 60) nl = [] for l in dc['labels']: for label in labels: if l == label['id']: if label['label'] == '': nl.append((label['id'], 'Record')) else: nl.append((label['id'], label['label'])) dc['labels'] = nl else: dc['labels'] = [(1, 'Record')] t["driver_configuration"] = dc # Determine Authorization try: ProtocolUserCredentials.objects.get( protocol=p, data_source=pds, user=request.user) t["authorized"] = True except ProtocolUserCredentials.DoesNotExist: t["authorized"] = False # Include DataSource details t["data_source"] = DataSourceSerializer(pds.data_source).data ds.append(t) return Response(sorted( ds, key=lambda ds: ds["display_label"]))
def get(self, request, *args, **kwargs): user = request.user user_token_cache_key = f'oauth:user:id:{user.id}:token' if cache.ttl(user_token_cache_key) != 0: token = cache.get(user_token_cache_key) cache.delete(user_token_cache_key) token_user_cache_key = f'oauth:token:{token}:user:id' if cache.ttl(token_user_cache_key) != 0: cache.delete(token_user_cache_key) logout(request) return super(LogoutView, self).get(request, *args, **kwargs)
def timer_minus(request): microwave_status = cache.get("status") if microwave_status is None: return HttpResponse("Time can't be negative") microwave_status = cache.get("status") if cache.ttl("status") >= 1 and cache.ttl("status") <= 9: cache.delete("status") microwave_object = json.loads(microwave_status) cache.set("status", json.dumps(microwave_object), timeout=cache.ttl("status") - 10) return redirect('/')
def authenticate_credentials(self, key): token_user_cache_key = f'oauth:token:{key}:user:id' if cache.ttl(token_user_cache_key) == 0: raise exceptions.AuthenticationFailed('token无效') try: user_id = cache.get(token_user_cache_key) user_token_cache_key = f'oauth:user:id:{user_id}:token' if cache.ttl(user_token_cache_key) != 0 and cache.get( user_token_cache_key) == key: user = get_object_or_404(User, id=user_id, is_active=True) return user, key raise exceptions.AuthenticationFailed('token错误') except Http404: raise exceptions.AuthenticationFailed('token无效')
def unpack(request): username = request.POST['name'] conn = get_redis_connection() treasure = int(conn.hget(username, 'treasure')) if cache.ttl(username): time = int(cache.ttl(username)) if 7200 <= time < 9900: treasure += 100 elif 0 < time: treasure += 200 elif time == 0: treasure += 300 cache.persist(username) conn.hmset(username, {'treasure': treasure}) return HttpResponse('领取成功!')
def blog_detail(request, blog_uuid): blog_cache_key = "blog_cache_{}".format(blog_uuid) comment_cache_key = "comment_cache_{}".format(blog_uuid) cached_value_for_blog = cache.ttl(blog_cache_key) cached_value_for_comment = cache.ttl(comment_cache_key) if cached_value_for_blog and cached_value_for_comment: print('Response from cache') resp_for_blog = cache.get(blog_cache_key) resp_for_comment = cache.get(comment_cache_key) else: resp_for_blog = Blog.objects.get(blog_uuid=blog_uuid) resp_for_comment = BlogComments.objects.filter(blog_id=resp_for_blog.id) cache.set(blog_cache_key, resp_for_blog, timeout=900) return render(request, 'blog/blog_detail.html', {'blog': resp_for_blog, 'comments': resp_for_comment})
def get_lock_from_cache(content_type_id, object_id): key = f'{LOCKS_CACHE_KEY_PREFIX}{content_type_id}_{object_id}' res = cache.get(key) if res: # pragma: no cover logger.debug(f'uniCMS locks timed cache - {key} succesfully taken from cache') return (res, cache.ttl(key)) return (0, 0)
def get_redirect_url(self, **kwargs): url = self.request.build_absolute_uri() if not check_qrurl_sign(url): logger.warning(f'QRResolveSymbolView.check_qrurl_sign__fail {url}') return not_found_url cache_key, redirect = url_timed_hash_redirect(url) if cache.ttl(cache_key) > 100: return redirect try: key, hotp = kwargs['key'], kwargs['hotp'] inst = Symbol.objects.get(symbol=str(key).lower()) assert inst.hotp_at == hotp and inst.is_open inst.increase_pages() except (KeyError, AssertionError, Symbol.DoesNotExist) as exc: logger.warning(f'QRResolveSymbolView.error {url} > {str(exc)}') return not_found_url data = dict( by='场景码', tail=inst.tail, name=inst.user.name or '**', avatar=inst.user.cached_avatar, title=inst.title, scened=inst.scened, ) cache.set(cache_key, data, timeout=60 * 5) return redirect
def get_slide(message): room = get_room_or_error(message["room"]) slide = Slide.objects.get(room=room, now_id=message["id"]) hash_blob = javaHash(slide.md_blob) if cache.ttl("%s/%s" % (message["room"], message["id"])) == 0: cache.set("%s/%s" % (message["room"], message["id"]), slide.md_blob, timeout=60) cache.set("%s/%s/%s" % (message["room"], message["id"], hash_blob), slide.md_blob, timeout=60) else: cache.expire("%s/%s" % (message["room"], message["id"]), timeout=60) cache.expire("%s/%s/%s" % (message["room"], message["id"], hash_blob), timeout=60) message.reply_channel.send({ "text": json.dumps({ "get_slide": message["id"], "md_blob": slide.md_blob, "title": slide.title, "idx": slide.now_id, }), })
def decorator(*args, **kwargs): cache_key = _make_cache_key(*args, **kwargs) result = cache.get(cache_key, NO_CACHE) semi_fresh = False if result is not NO_CACHE: semi_fresh = fresh_after and (cache.ttl(cache_key) < fresh_after) if not semi_fresh: # Still absolutely fresh return result # Let one process to recalculate and others to return semi-fresh value (if exists), # else wait for calculation blocking_timeout = 0 if semi_fresh else None with use_lock(cache_key, timeout=calculation_time, blocking_timeout=blocking_timeout) as acquired: if not acquired: # Only possible with semi_fresh = True (blocking_timeout = 0, no waiting for the lock) return result if not semi_fresh: # Check value wasn't calculated while we were waiting for the lock result = cache.get(cache_key, NO_CACHE) # This process was selected for value calculation if semi_fresh or result is NO_CACHE: result = func(*args, **kwargs) cache.set(cache_key, result, timeout + fresh_after) return result
def get_context_data(self, **kwargs): data = super().get_context_data(**kwargs) data['code'] = None chasing = self.request.GET.get('id', '') history_keys = cache.keys("PASTEBIN*") data['history'] = [] for key in history_keys: code = cache.get(key) if code: id = key.lstrip("PASTEBIN") data['history'].append({ 'code': code, 'length': len(code), 'id': id, 'ttl': cache.ttl(key) }) if id == chasing: data['code'] = code data['history'].sort(key=lambda x: x['ttl']) if data['history'] and not data['code']: data['code'] = data['history'][0]['code'] return data
def timer_plus(request): microwave_status = cache.get("status") if microwave_status is None: cache.set("status", '{"power":100}', timeout=1) if cache.ttl("status") >= 99: return HttpResponse("This is maximal time") microwave_status = cache.get("status") microwave_object = json.loads(microwave_status) if cache.ttl("status") >= 91 and cache.ttl("status") <= 99: cache.set("status", json.dumps(microwave_object), timeout=99) microwave_status = cache.get("status") microwave_object = json.loads(microwave_status) cache.set("status", json.dumps(microwave_object), timeout=cache.ttl("status") + 10) return redirect('/')
def _acquire_token() -> str: """ Validate token exists or refresh it """ secret_ttl = cache.ttl('YSK_secret') if secret_ttl is None: cache.expire('YSK_secret', timeout=0) if secret_ttl > 0: return cache.get('YSK_secret') try: resp = requests.post( settings.YSK_IAM_BEARER_PULL_URL, params={ 'yandexPassportOauthToken': settings.YSK_BEARER_PULL_SECRET }, timeout=10, ) except requests.Timeout: raise exc.TTSBackendIsUnavailable( 'Yandex speech kit API is unavailable, ' 'please try again later') if not resp.status_code == 200: raise requests.HTTPError('Undefined error. Please try again.') token = json.loads(resp.text)['iamToken'] cache.set('YSK_secret', token, timeout=42000) return token
def get_user(id, type='id', timeout=60 * 10): # For security. if type not in ('id', 'name', 'email'): return False key = _key % (type, id) user = cache.get(key) if user: if type != 'id': user = get_user(user, 'id', timeout) if timeout and cache.ttl(key) < timeout: con = get_redis_connection() con.expire(key, timeout) else: # A trick for realize http://php.net/manual/en/language.variables.variable.php kwargs = {} kwargs[type] = type try: user = User.objects.get(**kwargs) except ObjectDoesNotExist: return None else: user = user.__dict__ if timeout: if type == 'id': cache.set(key, user, timeout=timeout) else: cache.set(_key % ('id', user['id']), user, timeout=timeout) cache.set(key, user['id'], timeout=timeout) return user
def get_context_data(self, **kwargs): bans = [] for key in cache.keys(f"{constants.CACHE_KEY_HARBOR_BAN_PREFIX}*"): user_id = key.removeprefix(constants.CACHE_KEY_HARBOR_BAN_PREFIX) try: user = User.objects.get(id=user_id) except User.DoesNotExist: pass else: seconds_left = cache.ttl(key) banned_until = timezone.localtime() + datetime.timedelta(seconds=seconds_left) bans.append( ( # Reverse sort by seconds left -seconds_left, user.get_full_name(), user_id, date_format(banned_until, "DATETIME_FORMAT"), ) ) return { **super().get_context_data(**kwargs), "bans": [ban[1:] for ban in sorted(bans)], }
def test_persist(self): """Test the persist cache operation""" cache.set("foo", "bar", timeout=20) cache.persist("foo") ttl = cache.ttl("foo") self.assertIsNone(ttl)
def get_slide_diff(message): # cache is expired, moved to redis->sqlite if cache.ttl("%s/%s" % (message["room"], message["id"])) == 0: with transaction.atomic(): room = get_room_or_error(message["room"]) slide = Slide.objects.get(room=room, now_id=message["id"]) hash_blob = javaHash(slide.md_blob) cache.set("%s/%s" % (message["room"], message["id"]), slide.md_blob, timeout=60) cache.set("%s/%s/%s" % (message["room"], message["id"], hash_blob), slide.md_blob, timeout=60) else: cache.expire("%s/%s" % (message["room"], message["id"]), timeout=60) curr_text = cache.get("%s/%d" % (message["room"], message["id"])) if cache.ttl("%s/%s/%s" % (message["room"], message["id"], message["hash"])) == 0: message.reply_channel.send({ "text": json.dumps({ "change_slide": "whole", "id": message["id"], "curr_text": curr_text, }), }) else: cache.expire("%s/%s/%s" % (message["room"], message["id"], message["hash"]), timeout=60) dmp = diff_match_patch() pre_text = cache.get("%s/%s/%s" % (message["room"], message["id"], message["hash"])) patches = dmp.patch_make(pre_text, curr_text) patch_text = dmp.patch_toText(patches) message.reply_channel.send({ "text": json.dumps({ "change_slide": "diff", "id": message["id"], "patch_text": patch_text, "pre_hash": message["hash"], "curr_hash": javaHash(curr_text), }), })
def goodkill(request): goods_id = cache.get(request.session.get('user') + 'x') # 拿出秒杀的商品id time_out = cache.ttl(request.session.get('user') + 'x') # 查看过期时间 item_message = cache.get(goods_id) return render(request, 'index/goodkill.html', { "item_message": item_message, "time_out": time_out })
def currently_harbor_authorized(self, now=None, should_log=True): def log(s): if should_log: logger.info(s) if now is None: now = timezone.now() title = None current_show = self.current_show_time(now=now) if current_show: title = current_show[0] if not title: title = f"{self.get_full_name(short=True)}'s show" if config.APPEND_LIVE_ON_STATION_NAME_TO_METADATA: # This copy is used in webui/views.py:ZoomView twice, so change it there too title += f" LIVE on {config.STATION_NAME}" auth_log = f'harbor_auth = {self.get_harbor_auth_display()} for show "{title}"' ban_seconds = cache.ttl( f"{constants.CACHE_KEY_HARBOR_BAN_PREFIX}{self.id}") if ban_seconds > 0: log(f"dj auth requested by {self}: denied ({auth_log}, but BANNED with {ban_seconds} seconds left)" ) return CurrentHarborAuthorization(False, None, title) if self.harbor_auth == self.HarborAuth.NEVER: log(f"dj auth requested by {self}: denied ({auth_log})") return CurrentHarborAuthorization(False, None, title) elif self.harbor_auth == self.HarborAuth.ALWAYS: log(f"dj auth requested by {self}: allowed ({auth_log})") return CurrentHarborAuthorization(True, None, title) elif self.harbor_auth == self.HarborAuth.GOOGLE_CALENDAR: if config.GOOGLE_CALENDAR_ENABLED: if current_show: _, lower, upper = current_show log(f"dj auth requested by {self}: allowed ({auth_log}, {timezone.localtime(now)} in time bounds -" f" {timezone.localtime(lower)} - {timezone.localtime(upper)} with" f" {self.gcal_entry_grace_minutes} minutes entry grace, {self.gcal_exit_grace_minutes} minutes" " exit grace)") # Return the authorized until amount end = upper + datetime.timedelta( minutes=self.gcal_exit_grace_minutes) return CurrentHarborAuthorization(True, end, title) else: log(f"dj auth requested by {self}: denied ({auth_log}, {timezone.localtime(now)} not in time bounds" f" for {self.gcal_shows.count()} show times, {self.gcal_entry_grace_minutes} minutes entry" f" grace, {self.gcal_exit_grace_minutes} minutes exit grace)" ) return CurrentHarborAuthorization(False, None, title) else: log(f"dj auth requested by {self}: allowed ({auth_log}, however GOOGLE_CALENDAR_ENABLED =" f" False, so treating this like harbor_auth = {self.HarborAuth.ALWAYS.label})" ) return CurrentHarborAuthorization(True, None, title)
def get_ttl(self, task_type): """ Calculate time-to-left for the gamer's task. Return 0 if time is end of task has not been started """ key = self.build_key(task_type) return redis.ttl(key)
def validate_code(self, code): email = self.initial_data['email'] key = 'auth:verification:code:{email}'.format(email=email) if cache.ttl(key) == 0: raise serializers.ValidationError('验证码过期') elif cache.get(key) != code: raise serializers.ValidationError('验证码错误') else: cache.delete(key)
def validate_email(email): """验证邮箱""" # 邮箱号码时候已注册 if User.objects.filter(email=email).first(): raise serializers.ValidationError("邮箱号已被注册") # 验证码发送频率 if cache.ttl('sms' + email) > 240: raise serializers.ValidationError("距上一次发送未超过60s") return email
def sendRedis(room_name, token, status): count = int(cache.get(room_name + "__count")) - 1 ttl = cache.ttl(room_name + "__count") cache.set(room_name + "__count", str(count), timeout=ttl) if count == 0: sendTriggertofastapi(room_name) return True
def get(self, request): if not cache.ttl('vue-music-hot'): self.get_hot_music() return JsonResponse( { "code": 200, "data": json.loads(cache.get('vue-music-hot')), "error": "" }, safe=False)
def test_ttl(self): """Test the ttl cache operation""" # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") self.assertGreater(ttl + 0.5, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertIsNone(ttl) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") self.assertEqual(ttl, 0) # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0)
def test_ttl(self): cache = caches["default"] # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") assert ttl + 0.5 > 10 # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") assert ttl is None # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") assert ttl == 0 # Test ttl with not existent key ttl = cache.ttl("not-existent-key") assert ttl == 0
def test_ttl(self): if FAKE_REDIS: raise unittest.SkipTest("FakeRedis ttl is broken, see https://github.com/jamesls/fakeredis/issues/119") cache = caches["default"] _params = cache._params _is_herd = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.HerdClient") _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient") # Not supported for shard client. if _is_shard: return # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if _is_herd: self.assertAlmostEqual(ttl, 12) else: self.assertAlmostEqual(ttl, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertEqual(ttl, None) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") self.assertEqual(ttl, 0) # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0)
def increasecnt(post_id): """ This is being called on each article request and asynchronously increases a view counter by 1 """ # A lock is being used to make this "process" thread-safe PREFIX = settings.CACHE_PREFIX endkey = prefixing(PREFIX, post_id) with redis_cache.lock('lock'): if redis_cache.get(endkey): redis_cache.incr(endkey) else: # Timeout is set to infinite (None) redis_cache.set(endkey, 1, timeout=5400) logger.warn("New View Count For id = {0} is {1} with TTL = {2}".format(post_id, redis_cache.get(endkey), redis_cache.ttl(endkey)))
def redis(request): cache.set('foo', 'value', timeout=25) logger.info("redis: " + cache.get('foo') + ", " + str(cache.ttl('foo'))) ret = utils.wrap_message(code=0) return HttpResponse(json.dumps(ret), mimetype='application/json')
def get_cooldown(self, prefix): return cache.ttl(str(prefix) + '.faucets.' + str(self.id))