def init_xcache_on_start(): # 清理模块配置缓存 cache.set(Xcache.XCACHE_MODULES_CONFIG, None, None) # 清理muit_module缓存 re_key = "{}_*".format(Xcache.XCACHE_MODULES_TASK_LIST) keys = cache.keys(re_key) for key in keys: try: req = cache.get(key) except Exception as _: cache.delete(key) continue if req.get("job_id") is None: cache.delete(key) # 清理session_info缓存 re_key = "{}_*".format(Xcache.XCACHE_SESSION_INFO) keys = cache.keys(re_key) for key in keys: try: cache.delete(key) except Exception as _: continue # 清理session_count 缓存 cache.set(Xcache.XCACHE_SESSION_CONT, 0, None) return True
def events(request): data = {} if 'event' in request.GET and request.GET['event'] != '': logger.debug('event details view triggered by %s for event: %s' % (request.user.username, request.GET['event'])) try: data = cache.get('event_' + request.GET['event']) except: raise return render(request, 'isubscribe/generic.html', {'DATA': data['check']}) logger.debug('events view triggered by %s' % request.user.username) for word in cache.keys("event_*"): entity = re.sub(r'^event_', '', word) try: data[entity] = {} event_data = cache.get('event_' + entity) data[entity]['entity_element_id'] = re.sub(r':|\.', '_', entity) data[entity]['entity'] = entity data[entity]['status'] = event_data['check']['status'] data[entity]['output'] = json.dumps(event_data['check']['output'], ensure_ascii=False) data[entity]['timestamp'] = event_data['timestamp'] if 'ack_' + entity in cache.keys("ack_*"): data[entity]['ack'] = True ack = cache.get('ack_' + entity) data[entity]['ack_by'] = ack['user_name'] data[entity]['ack_comment'] = ack['ack_comment'] else: data[entity]['ack'] = False if 'silent_' + entity in cache.keys("silent_*"): data[entity]['silent'] = True silent = cache.get('silent_' + entity) data[entity]['silent_by'] = silent['user_name'] data[entity]['silent_comment'] = silent['silent_comment'] else: data[entity]['silent'] = False except: raise profile_form = ContactForm(instance=Contact.objects.get( user=request.user.id)) return render( request, 'isubscribe/events.html', { 'DATA': OrderedDict( sorted(data.items(), key=lambda x: x[1]['timestamp'], reverse=True)), 'profile_form': profile_form })
def user_id_subsribtions(user_id): subscriptions = [] for word in cache.keys("rule_*"): entity = re.sub(r'^rule_', '', word) status_1 = False status_2 = False try: rule = cache.get('rule_' + entity) if '1' in rule and user_id in rule['1']: status_1 = True if '2' in rule and user_id in rule['2']: status_2 = True except: pass if 'silent_' + entity in cache.keys("silent_*"): silent = True else: silent = False if status_1 == True or status_2 == True: subscriptions.append({ 'entity': entity, 'status_1': status_1, 'status_2': status_2, 'silent': silent }) return subscriptions
def cache_clear(self, request: Request) -> Response: """Clear policy cache""" keys = cache.keys("policy_*") cache.delete_many(keys) LOGGER.debug("Cleared Policy cache", keys=len(keys)) # Also delete user application cache keys = cache.keys(user_app_cache_key("*")) cache.delete_many(keys) return Response(status=204)
def test_cache_general_spreadsheet(self): cache.clear() assert not cache.keys("*") data = google_data.get_general_spreadsheet(timeout=10) cache_key = cache.keys("*")[0] assert data assert data == cache.get(cache_key)
def test_cache_keys(self): # WHEN cache.set("key", "value") # THEN self.assertEqual(len(cache.keys("*")), 1) # WHEN cache.delete("key") # THEN self.assertEqual(len(cache.keys("*")), 0)
def test_engine_cache(self): """Ensure empty policy list passes""" pbm = PolicyBindingModel.objects.create() binding = PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0) engine = PolicyEngine(pbm, self.user) self.assertEqual( len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 0) self.assertEqual(engine.build().passing, False) self.assertEqual( len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 1) self.assertEqual(engine.build().passing, False) self.assertEqual( len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 1)
def invalidate_policy_cache(sender, instance, **_): """Invalidate Policy cache when policy is updated""" from authentik.policies.models import Policy, PolicyBinding if isinstance(instance, Policy): total = 0 for binding in PolicyBinding.objects.filter(policy=instance): prefix = f"policy_{binding.policy_binding_uuid.hex}_{binding.policy.pk.hex}*" keys = cache.keys(prefix) total += len(keys) cache.delete_many(keys) LOGGER.debug("Invalidating policy cache", policy=instance, keys=total) # Also delete user application cache keys = cache.keys(user_app_cache_key("*")) or [] cache.delete_many(keys)
def index(request): chave_cache = 'contexto_home' + ':' + str(datetime.date.today()) + ':' + 'g5football' uri = request.build_absolute_uri() seo_og = Seo(uri) if cache.keys(chave_cache): conteudo = get_post_from_cache(chave_cache) else: conteudo = saving_content_redis(chave_cache) if request.method == 'POST': formulario = PostFormAtleta(request.POST) form_reponsavel_atleta = FormularioResponsavel(request.POST) if formulario.is_valid() and form_reponsavel_atleta.is_valid(): try: formulario.save() form_reponsavel_atleta.save(formulario) try: IdadeAtleta.defining_age(formulario.data) except Exception, error: msg = 'Erro ao inserir idade no atleta {}'.format(error) print msg try: EmailSender.sender(formulario.data) except Exception, email_error: msg = 'Erro ao enviar email {}'.format(email_error) print msg except Exception, e: msg = 'Erro ao salvar o formulario {}'.format(e) print msg
def scenic(request,pk): obj = Scenic.objects.get(pk=pk) objs = obj.area_set.all().order_by('-num') data = [] if len(objs)>=3: for ob in objs[:3]: temp = {} temp['name'] = ob.name temp['num'] = ob.num data.append(temp) if len(objs)==2: for ob in objs: temp = {} temp['name'] = ob.name temp['num'] = ob.num data.append(temp) data.append({'name':u'无','num':0}) if len(objs)==1: data.append({'name':objs.first().name,'num':objs.first().num}) data.append({'name':u'无','num':0}) data.append({'name':u'无','num':0}) if len(objs)==0: data.append({'name':u'无','num':0}) data.append({'name':u'无','num':0}) data.append({'name':u'无','num':0}) _data = [] key = str(obj.pk)+':*' _keys = cache.keys(key) if _keys: for _mac in _keys: _data.append(cache.get(_mac)) data = {'data':_data,'num':data,'account':cache.get(str(pk)+'account')} return HttpResponse(json.dumps(data))
def ticket_service_view(request): if request.method == 'POST': method, data = 'post', request.POST else: method, data = 'get', request.GET client_ip = request.META.get('REMOTE_ADDR') logger.info("Request from {}".format(client_ip)) matched_ips = cache.keys('{}_status'.format(client_ip)) if len(matched_ips) > 0: if cache.get('{}_status'.format(client_ip)) == 'w': return call_third_party(method, data) else: return JsonResponse({'detail': 'Permission Denied.'}) else: g_recaptcha_response = request.POST.get('g-recaptcha-response', []) if g_recaptcha_response != '' and check_captcha(g_recaptcha_response, client_ip): cache.set('{}_status'.format(client_ip), 'w') return call_third_party(method, data) else: unathorised_request_num = cache.get_or_set('{}_unathorised_request_num'.format(client_ip), 0) if unathorised_request_num < settings.MAX_UNATHORISED_REQUEST_NUM: cache.incr('{}_unathorised_request_num'.format(client_ip)) template = loader.get_template('ticket_template.html') context = { 'site_key': settings.RECAPTCHA_SITEKEY, 'data': data } return HttpResponse(template.render(context, request)) return HttpResponse('You are not allowed to access this service anymore.')
def set_to_cache(request, value): if not CMS_CACHE_MAX_ENTRIES or \ len(cache.keys(f'{CMS_CACHE_KEY_PREFIX}*')) < CMS_CACHE_MAX_ENTRIES: key = make_cache_key(request) cache.set(key, value, CMS_CACHE_TTL) logger.debug(f'uniCMS Cache - {key} succesfully stored to cache') return True
def tube_desc(tube_id): tube_info = cache.get('tube_info') tube_steps = cache.get_many(cache.keys('%s:*' % tube_id)) if tube_steps: init_price = tube_steps['%s:1' % tube_id]['init_price'] steps = [] for i in range(1, len(tube_steps) + 1): steps.append(tube_steps['%s:%d' % (tube_id, i)]) response_data = { 'tubeId': tube_id, 'stepNow': tube_info['step_now'], 'initPrice': init_price, 'steps': steps } return HttpResponse(json.dumps(response_data, default=json_default), content_type="application/json") else: response_data = {'error': 'Not exist tube'} return HttpResponseNotFound(json.dumps(response_data, default=json_default), content_type="application/json")
def tube_cancel(request, tube_id): try: tube_info = cache.get('tube_info') running_tube_id = tube_info['tube_id'] if tube_id != running_tube_id: raise RuntimeError('Tube is not running') tube_steps = cache.get_many(cache.keys('%s:*' % tube_id)) if tube_steps: last_step = tube_steps['%s:%d' % (tube_id, len(tube_steps))] tube_step = last_step['tube_step'] p.cancel(tube_id, tube_step) response_data = {'tubeId': tube_id} return HttpResponse(json.dumps(response_data, default=json_default), content_type="application/json") else: raise RuntimeError('Unknown error occured.') except Exception as ex: response_data = {'error': ex} return HttpResponseServerError(json.dumps(response_data, default=json_default), content_type="application/json")
def scrapper(): try: headers_list = list() soup_data = get_scrapped_data() if soup_data: class_data = soup_data.find("div", class_="tabular_data_live_analysis") if class_data: table_content = class_data.find("table", attrs={"id": "topGainers"}) if table_content: tr_data = table_content.find_all("tr") if tr_data and tr_data[0]: for th_data in tr_data[0].find_all("th"): print("th_data", th_data.text) headers_list = create_header_list( th_data, headers_list) nifty_url_data = get_nse_json_data() if nifty_url_data != None: cached_keys = cache.keys('prefix:*') if cached_keys: for key in cached_keys: cache.delete(key) if len(headers_list) > 0: cache.set('valid_headers', headers_list, timeout=None) current_timestamp = datetime.datetime.now() cache.set('valid_key', current_timestamp, timeout=None) cache.set(current_timestamp, nifty_url_data, timeout=None) logging.info("DATA SUCCESSFULLY CACHED IN REDIS") else: logging.info("NO SOUP DATA FOUND AFTER SCRAPING!") except: raise ex
def get_global_count(): count_cached_value = cache.get(GLOBAL_COUNT_CACHE_KEY, None) if count_cached_value: return count_cached_value try: old_site_reporter_counter_keys = cache.keys('org:*:reporters:old-site') cached_values = [cache.get(key) for key in old_site_reporter_counter_keys] # no old sites cache values, double check with a fetch if not cached_values: cached_values = fetch_old_sites_count() count = sum([elt['results'].get('size', 0) for elt in cached_values if elt.get('results', None)]) for org in Org.objects.filter(is_active=True): if org.get_config('is_on_landing_page'): count += get_reporters_count(org) # cached for 10 min cache.set(GLOBAL_COUNT_CACHE_KEY, count, 60 * 10) except AttributeError: import traceback traceback.print_exc() count = '__' return count
def readMyRoomsFromCache(user): ''' A method to read a list of chatrooms owned by user and sorted by how many users are in the room ''' result = [] it = cache.keys('chatroom(*)_owner_*') pattern = re.compile('chatroom\((.*?)\)_owner_(.*)') for key in it: m = pattern.match(key) if m: hash = m.group(1) name = m.group(2) count = cache.get(key) # Skip the rooms owned by other users if user.username != name: continue room = Chatroom.objects.get(hash = hash) data = { 'title' : room.title, 'hash' : hash, 'count' : count, 'owner' : room.owner, 'count' : count } result.append(data) result = sorted(result, key = lambda k : k['count'], reverse = True) return result
def get_global_count(): count_cached_value = cache.get(GLOBAL_COUNT_CACHE_KEY, None) if count_cached_value: return count_cached_value try: old_site_reporter_counter_keys = cache.keys('org:*:reporters:old-site') cached_values = [ cache.get(key) for key in old_site_reporter_counter_keys ] # no old sites cache values, double check with a fetch if not cached_values: cached_values = fetch_old_sites_count() count = sum([ elt['results'].get('size', 0) for elt in cached_values if elt.get('results', None) ]) for org in Org.objects.filter(is_active=True): if org.get_config('is_on_landing_page'): count += get_reporters_count(org) # cached for 10 min cache.set(GLOBAL_COUNT_CACHE_KEY, count, 60 * 10) except AttributeError: import traceback traceback.print_exc() count = '__' return count
def check(self): logger.debug("escalator check entity: %s" % self.entity) if 'ack_' + self.entity in cache.keys("ack_*") or self.status == 0: return False if self.occurrences > 20 and self.occurrences > len( self.history) and self.status == 2: return True if len(self.history) > 1: # remove current status from history self.history.pop() if len(self.history) < 2: return False problem_history = [] for i in range(len(self.history), 0, -1): last_status = int(self.history[i - 1]) if int(last_status) == 0: break problem_history.append(last_status) if self.status == 2 and len(problem_history) >= 2 and len( set(problem_history)) == 1: return True if int(self.status) == 2 and len(problem_history) > 10: return True return False
def viewcountupdate(): """ This task is being executed hourly and pushed the cached view counters into the database using a single transaction """ # This is the prefix we are going to use to distinguish the cache keys # we need for the view counters PREFIX = settings.CACHE_PREFIX logger.warn('Entering viewcountupdate...') with redis_cache.lock('lock'): keys = redis_cache.keys(PREFIX + "*") if keys: try: with transaction.atomic(): for key in keys: post_id = key.split('_')[1] hourly_viewcount = redis_cache.get(key) try: post = Posts.objects.get(id=post_id) except ObjectDoesNotExist: continue; old_viewcount = post.viewcount post.viewcount = post.viewcount + hourly_viewcount new_viewcount = post.viewcount logger.warn('Updated: id = {0}. Oldcount = {1} -> Newcount = {2} '.format(post_id, old_viewcount, new_viewcount)) post.save(update_fields=['viewcount']) except IntegrityError: logger.warn('Rolling back...') transaction.rollback() redis_cache.delete_pattern(PREFIX + "*") logger.warn('Exiting viewcountupdate...')
def list_module_tasks(): re_key = "{}_*".format(Xcache.XCACHE_MODULES_TASK_LIST) keys = cache.keys(re_key) reqs = [] for key in keys: reqs.append(cache.get(key)) return reqs
def get_booking_tokens(self): tokens = [] flight_keys = cache.keys('flight_*') for key in flight_keys: token = cache.get(key).get('booking_token') tokens.append((key[7:], token)) return tokens
def get_all_flights(self): flights = [] flights_keys = cache.keys('flight_*') for key in flights_keys: flight = self.get_flight(key) flights.append(flight) return flights
def resolve_urls(self, info, **kwargs): url = kwargs.get('url') url_type = UrlType() keys = cache.keys('*') cached_url = [key for key in keys if cache.get(key) == url] if cached_url: url_type.url = cached_url[0] return url_type if not url: return url_type if not re.findall('^http(s)?://+', url.lower()): url = "http://{}".format(url) if not cache.get(url): id = uuid4() url_type.normal_url = url url_type.url = create_short_url(id.fields[1]) cache.set(url, url_type.url) else: url_type.url = cache.get(url) return url_type
def list(self, request): keys_to_search = "STDATA:*:*" name = request.query_params.get("s") page = request.query_params.get("page") if name is not None and not name == "": keys_to_search = "STDATA:*:*" + str(name).upper() + "*" else: if page is not None: try: page = int(page) page_str = str(page - 1) if page == 1: page_str = "" keys_to_search = "STDATA:" + page_str + "[0-9]:*" except: pass keys = cache.keys(keys_to_search) queryset = [cache.get(key) for key in keys] print("Result :: ", keys_to_search) print("Result :: ", keys[:3]) print("Result :: ", queryset[:3]) serializer = EquityDataSerializer(data=queryset, many=True) if serializer.is_valid(): return Response(serializer.data) else: return Response(serializer.errors, status=400)
def view_cache(request): """ Cache """ value = None key = request.GET.get('key', None) if key: value = cache.get(key) try: value = dict(value) value = json_encode(value, indent=4) except (TypeError, ValueError): pass if hasattr(cache, 'keys'): for key in request.POST: cache.delete_pattern(key) keys = sorted(cache.keys('*')) else: for key in request.POST: cache.delete(key) keys = sorted((key.split(':')[-1] for key in cache._cache.keys())) return { 'keys': keys, 'value': value or None, }
def get_context_data(self, **kwargs): data = super().get_context_data(**kwargs) data['code'] = None chasing = self.request.GET.get('id', '') history_keys = cache.keys("PASTEBIN*") data['history'] = [] for key in history_keys: code = cache.get(key) if code: id = key.lstrip("PASTEBIN") data['history'].append({ 'code': code, 'length': len(code), 'id': id, 'ttl': cache.ttl(key) }) if id == chasing: data['code'] = code data['history'].sort(key=lambda x: x['ttl']) if data['history'] and not data['code']: data['code'] = data['history'][0]['code'] return data
def process_request(self, request): # 获取当前session key session_key = request.session.session_key # 获取当前访问用户的IP if 'HTTP_X_FORWARDED_FOR' in request.META: ip = request.META['HTTP_X_FORWARDED_FOR'] else: ip = request.META['REMOTE_ADDR'] # 判断用户是否登录 if request.session.get('user_info', False): # 因为已经在session中配置了自动更新时间了,下面操作不需要 # # 在线的话每当用户访问页面要更新session 时间,防止session失效 # request.session.set_expiry(SESSION_COOKIE_AGE) # 统计在线用户,先生成唯一key online_key = 'count_online_id_{_id}_session_{_session}'.format( _id=request.session.get('user_info')['uid'], _session=session_key) # 设置过期时间,或者重新设置过期时间 cache.set(online_key, 'online', timeout=SESSION_COOKIE_AGE) # 把统计数放入请求中,方便在模板中使用 # 通过通配符获取 count_online 的key 有多少个 # 如果用户不再看网页,session 和 cache 的key 会自动过期,自动删除 request.online_member_count = len(cache.keys("count_online_id_*")) request.current_visitor_ip = ip
def get_all_queued_confirmation_blocks(): """ Return a set of all queued confirmation blocks """ keys = cache.keys(f'{QUEUED_CONFIRMATION_BLOCK}:*') return cache.get_many(keys)
def invalidate(self, key='*'): if isinstance(key, str): keys = cache.keys('{0}_{1}'.format(self.CACHE_PREFIX, key)) for key in keys: self._invalidate(key) elif isinstance(key, (list, tuple)): self.invalidate_many(key)
def get_contact(self, user_pk): if 'contact_' + str(user_pk) in cache.keys("contact_*"): contact = cache.get('contact_' + str(user_pk)) else: try: user = User.objects.get(id=user_pk, is_active = True) except: logger.error('notify get_contact failed finding user id: %s' % (user_pk)) pass if not hasattr(user, 'contact') or user.contact.slack_uid in [None, '']: logger.error('notify get_contact no contact found for user id: %s' % (user_pk)) return {} else: if user.contact.slack_uid not in [None, '']: slack_uid = user.contact.slack_uid else: slack_uid = None if user.contact.phone_number not in [None, '']: phone_number = user.contact.phone_number else: phone_number = None contact = { 'slack_uid': slack_uid, 'phone_number': phone_number, 'username': user.username } cache.set('contact_' + str(user_pk), contact, timeout=(float(1) * 3600)) return contact
def home_clear_cache(): from content.models import ( ArticlePage, BlogPage, NewsPage, EventPage, IssuePage ) from content.helpers import prefix_key, PageStructureHelper HomePage = apps.get_model(app_label='home', model_name='HomePage') page_models = (ArticlePage, NewsPage, BlogPage, EventPage, IssuePage, HomePage,) home_page = HomePage.objects.exclude(body=None).first() if not home_page: return for is_mobile in (True, False): key = prefix_key( 'home_page', page_models, is_mobile=is_mobile ) if cache.get('home_page_sign') != key: cache.delete_many(cache.keys('*home_page*')) key_structure = 'home_page_structure' if is_mobile: key_structure += '_mobile' ps = PageStructureHelper(home_page.body, is_mobile, key_structure) ps.get_structure() cache.set('home_page_sign', key, 60 * 60 * 7)
def write_read_in_database(content_type, yesterday_str): content_type_dict = { 'answer': Answer, 'article': Article, 'think': Idea, 'question': Question } which_model = content_type_dict[content_type] cache_key_list = cache.keys(content_type + '_*_' + yesterday_str) data_list = [{ 'nums': cache.get(key), 'id': key.replace(content_type + '_', '').replace('_' + yesterday_str, '') } for key in cache_key_list] for data in data_list: time.sleep(0.01) try: instance = which_model.objects.get(pk=data['id']) answer_nums = instance.read_nums.filter( object_id=instance.id).first() raw_nums = answer_nums.nums if answer_nums else 0 instance.read_nums.update_or_create( object_id=instance.id, defaults={'nums': raw_nums + int(data['nums'])}) except Exception as e: print(e.args)
def put(self, request, pk=None): response = self.partial_update(request, pk) for key in cache.keys('*'): if 'products_frontend' in key: cache.delete(key) cache.delete('products_backend') return response
def post(self, request): response = self.create(request) for key in cache.keys('*'): if 'products_frontend' in key: cache.delete(key) cache.delete('products_backend') return response
def search_from_cache(key): if key != "": value = cache.keys(str(key)+"*") if value != None: data = json.dumps(value) return data else: data = None return ''
def index(self, request): if request.method == 'POST' and request.POST.getlist('_selected_action') and \ request.POST.get('action') == 'delete_selected' and \ request.POST.get('post') == 'yes': cache.delete_many(request.POST.getlist('_selected_action')) if not len(cache.get_many(request.POST.getlist('_selected_action'))): messages.add_message(request, messages.SUCCESS, 'Successfully deleted %d keys.' % len(request.POST.getlist('_selected_action'))) else: messages.add_message(request, messages.ERROR, 'Could not delete %d keys.' % len(request.POST.getlist('_selected_action'))) elif request.method == 'POST' and request.POST.getlist('_selected_action') and \ request.POST.get('action') == 'delete_selected': return render_to_response('redis_admin/delete_selected_confirmation.html', {'keys': request.POST.getlist('_selected_action')}, context_instance=RequestContext(request)) if request.GET.get('q'): keys_result = cache.keys('*%s*' % request.GET.get('q')) else: keys_result = cache.keys('*') paginator = Paginator(keys_result, 100) page = request.GET.get('p') try: keys = paginator.page(page) except PageNotAnInteger: keys = paginator.page(1) except EmptyPage: keys = paginator.page(paginator.num_pages) return render_to_response('redis_admin/index.html', {'keys': keys, 'count': paginator.count, 'page_range': paginator.page_range}, context_instance=RequestContext(request))
def save(request): random_bytes = [random.randint(0, 0xFF) for i in range(32)] bytes = ''.join(map(byte_to_base32_chr, random_bytes)) user = "******"+bytes cache.set(user,{"name" : request.POST['name'], "description" : request.POST['description'], "email" : request.POST['email']}, timeout=None) results = cache.keys("user-*") lists = [] for e in results: lists.append(cache.get(e)) return render_to_response("home.html", {"lists": lists,},context_instance=RequestContext(request))
def save_visits_task(): """Persist visit counts to database and reset cache.""" cache_keys = cache.keys(f'{Story.VISIT_KEY_PREFIX}*') for key in cache_keys: val = int(cache.get(key)) pk = int(key.replace(Story.VISIT_KEY_PREFIX, '')) Story.objects.filter(pk=pk).update( hit_count=F('hit_count') + val, hot_count=F('hot_count') + 100 * val, ) logger.info(f'Story {pk} was visited {val} times') cache.delete(key) return len(cache_keys)
def set_cache_rules(self): chave_conteudo = 'seo_content' + ':' + 'g5football' if cache.keys(chave_conteudo): return cache.get(chave_conteudo) else: seo_ogs = TextoApresentacao.objects.get(pk=1) seo = { 'titulo':seo_ogs.tituto, 'texto':seo_ogs.texto, 'imagem_empresa':seo_ogs.imagem_empresa.url, } cache.set(chave_conteudo,seo,864000) return cache.get(chave_conteudo)
def handle(self, *args, **options): user_id = options['user_id'][0] try: user = User.objects.get(pk=user_id) except user.DoesNotExist: raise CommandError('User with ID "%s" does not exist' % user_id) # cache_keys = cache.keys('*') for key in cache.keys('*'): if re.match('{0}.*login_attempts'.format(user.username), key): cache.delete(key) print('Login attempts deleted from cache') user.is_active = True user.save() print('User Reactivated')
def stats_snapshot(reset=True): last_now = cache.get('apistats_last_reset', '', None) now = timezone.now() results = {} for key in cache.keys('apistats__*'): results[key] = cache.get(key) if reset: cache.delete(key) if reset: cache.set('apistats_last_reset', now, None) results = dict(sorted(results.items())) return { 'start_date': str(last_now), 'end_date': str(now), 'data': results }
def get_global_count(): count_cached_value = cache.get(GLOBAL_COUNT_CACHE_KEY, None) if count_cached_value: return count_cached_value try: reporter_counter_keys = cache.keys('org:*:reporters:*') cached_values = [cache.get(key) for key in reporter_counter_keys] count = sum([elt['results'].get('size', 0) for elt in cached_values if elt.get('results', None)]) # cached for 10 min cache.set(GLOBAL_COUNT_CACHE_KEY, count, 60 * 10) except AttributeError: import traceback traceback.print_exc() count = '__' return count
def validator_service_view(request): client_ip = request.META.get('REMOTE_ADDR') matched_ips = cache.keys('{}_status'.format(client_ip)) if len(matched_ips) > 0: if cache.get('{}_status'.format(client_ip)) == 'w': return call_third_party() else: return JsonResponse({'detail': 'Permission Denied.'}) else: g_recaptcha_response = request.POST.get('g-recaptcha-response', []) if len(g_recaptcha_response) > 0 and g_recaptcha_response[0] != '': cache.set('{}_status'.format(client_ip), 'w') return call_third_party() else: template = loader.get_template('validator_template.html') context = {'name': request.GET.get('name', '')} return HttpResponse(template.render(context, request))
def readAllRoomsFromCache(): ''' A method to read a list of chatrooms sorted by how many users are in the room ''' global room_info_key result = [] it = cache.keys('chatroom(*)_owner_*') pattern = re.compile('chatroom\((.*?)\)') for key in it: m = pattern.match(key) if m: hash = m.group(1) count = cache.get(key) roominfo = cache.get(room_info_key % (hash)) data = { 'title' : roominfo['title'], 'hash' : hash, 'owner' : roominfo['owner'], 'count' : count } result.append(data) result = sorted(result, key = lambda k : k['count'], reverse = True) return result
def persist_cached(self, cache_pattern=None): """ Fetches all requests stored in the cache and push them to the database. Returns the persisted requests. """ created = [] if settings.REQUEST_USE_CACHE: # Set default cache pattern if not given if not cache_pattern: cache_pattern = '%s*' % settings.REQUEST_CACHE_PREFIX # Get all requests from cache requests_keys = cache.keys(cache_pattern) requests_dict = cache.get_many(requests_keys) requests = requests_dict.values() # Persist all requests to database created = self.bulk_create(requests) # Clear requests cache cache.delete_pattern(cache_pattern) return created
def handle(self, *args, **options): print cache.keys('*') print "CLEARING CACHE..." cache.clear() print cache.keys('*')
def handle(self, *args, **options): rooms = cache.keys("chatroom(*)") for room in rooms: cache.set(room, len(cache.keys(room + '_user*')))
def main(req): """ FIXME: super big shit """ cache_prefix = req.session.session_key start = req.GET.get('start') currency = req.GET.get('cur') captcha = req.GET.get('cpt') wallet = req.GET.get('cat') ut_min = req.GET.get('tmin') ut_max = req.GET.get('tmax') query = Faucet.objects.exclude(visible=False).order_by('-reward_mid') if currency is not None: query = query.filter(currency__id=currency) if captcha is not None: query = query.filter(captcha__id=captcha) if wallet is not None: query = query.filter(category__id=wallet) if ut_min is not None: query = query.filter(update_time__gt=ut_min) if ut_max is not None: query = query.filter(update_time__lt=ut_max) if start is None: """ Если смотрим конкретный кран, то так уж и быть - показать, если еще на кд """ in_cache_faucets = cache.keys(str(cache_prefix) + '.faucets.*') excludes = [x.rsplit('.', 1)[1] for x in in_cache_faucets] query = query.exclude(id__in=excludes) if start is not None: # если смотрми конкретный кран query = query.filter(title_en=start) # фильтр по крану if len(query) == 0: # если нет результатов (напр. нет крана) показать все краны по валюте response = redirect('hammer') if currency is not None: response['Location'] += '?cur={0}'.format(currency) return response else: faucet = query.get() else: # смотрим просто рядовой кран if len(query) == 0: """ Если все краны на кд и есть фильтр помимо валюты - оставляем только валюту """ if captcha or wallet or ut_min or ut_min or ut_max: response = redirect('hammer') if currency is not None: response['Location'] += '?cur={0}'.format(currency) return response else: response = redirect(reverse('faucets')) return response else: faucet = query[0] # записать кран в сессию cache.set(str(cache_prefix) + '.faucets.' + str(faucet.id), 1, timeout=faucet.update_time * 60) next_link = '/hammer/?cur={0}'.format(faucet.currency_id) if captcha is not None: next_link = next_link + '&cpt=' + captcha if wallet is not None: next_link = next_link + '&cat=' + wallet if ut_min is not None: next_link = next_link + '&tmin=' + ut_min if ut_max is not None: next_link = next_link + '&tmax=' + ut_max # турбо-костыль, починить admin namespace admin_edit_link = "/admin/objects/faucet/{0}/".format(faucet.id) if faucet.currency.title_short_en == "BTC": cookie_faucet = req.COOKIES.get('address' + str(faucet.currency.id)) if cookie_faucet is not None and not cookie_faucet == '': faucetbox_link = "https://faucetbox.com/en/check/" + str(cookie_faucet) else: faucetbox_link = '' else: faucetbox_link = '' return render(req, 'hammer/main.html', { 'faucet': faucet, 'labels': _make_labels(faucet), 'next_link': next_link, 'is_moderator': req.user.groups.filter(name='Moderators').exists(), 'admin_edit_link': admin_edit_link, 'faucetbox_link': faucetbox_link })
def invalidate_all(): keys = cache.keys(f'{_make_prefix()}*') for cache_key in keys: cache.delete(cache_key) return keys
def get_market_depth(s_b, code): orders = [] keys = cache.keys(s_b + '-' + code + '-*') for key in keys: orders.append(cache.get(key)) return orders