def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertTrue(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) cache.set("no_expiry", "here", None) self.assertTrue(cache.has_key("no_expiry"))
def category(request, pk, name=None): cate = get_object_or_404(Category,pk=pk) posts = cate.post_set.all() ## 获取分类下的所有文章 if not cache.has_key("categories"): categories = Category.objects.all() cache.set("categories",categories) else: categories = cache.get("categories") if not cache.has_key("links"): links = FriendLink.objects.all() cache.set("links",links) else: links = cache.get("links") if not cache.has_key("tags"): tags = Tag.objects.all() cache.set("tags",tags) else: tags = cache.get("tags") return render(request,'blogapp/index.html',{ "posts": posts, "cate": cate, "is_category": True, "categories": categories, 'links':links, 'tags':tags, })
def remove_page(request, wiki, path): path = stripspecialchars(path) w = get_object_or_404(Wiki, slug=wiki) w.repo.delete(u'{0}.md'.format(path)) w.repo.commit(request.user, ugettext(u'Update Wiki: {0} deleted'.format(path)).encode('utf-8')) # Remove attachements Attachment.objects.filter(wiki=w, page=os.path.join(wiki, path)).delete() # Invalidate cache pageurl = reverse('view-page', args=[wiki, path]) if cache.has_key(pageurl): cache.delete(pageurl) key = u'diffs_{0}'.format(pageurl) if cache.has_key(key): cache.delete(key) cache.delete('LastEdits') return wiki, ''
def index(request): if not cache.has_key("posts"): posts = Post.objects.all() cache.set("posts",posts,timeout=120) else: posts = cache.get("posts") if not cache.has_key("categories"): categories = Category.objects.all() cache.set("categories",categories,timeout=120) else: categories = cache.get("categories") if not cache.has_key("links"): links = FriendLink.objects.all() cache.set("links",links,timeout=120) else: links = cache.get("links") if not cache.has_key("tags"): tags = Tag.objects.all() cache.set("tags",tags,timeout=120) else: tags = cache.get("tags") return render(request,'blogapp/index.html',{ 'posts':posts, "categories": categories, 'links':links, 'tags':tags, 'index_active':'active', })
def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertEqual(cache.has_key("hello1"), True) # noqa self.assertEqual(cache.has_key("goodbye1"), False) # noqa cache.set("no_expiry", "here", None) self.assertEqual(cache.has_key("no_expiry"), True) # noqa
def postHeartbeat(self, request, pk=None): global EXPIRATION #check for seperate cache entry """ if (cache.get('heartbeat') != None): #trigger would be 'heartbeat' for status of heartbeats #SEND TRIGGER IN THIS CASE TO START """ if not cache.has_key('heartbeat'): #if doesn't have key heartbeat set its cache entry cache.set('heartbeat','connected',EXPIRATION) else: #else delete the old one cache.delete('heartbeat') #create a new one cache.set('heartbeat','connected',EXPIRATION) #ONCE STARTS RECEIVING HEARTBEATS #cache.set('heartbeat', 'triggering', EXPIRATION) #ONCE STOPS RECEIVING HEARTBEATS #cache.set('heartbeat','stopped', EXPIRATION) if cache.get('trigger') == 1: redis_publisher = RedisPublisher(facility="viewer",sessions=gcsSessions()) redis_publisher.publish_message(RedisMessage(json.dumps({'triggering':'true','time':cache.get("time")}))) elif cache.get('trigger')== 0: redis_publisher = RedisPublisher(facility="viewer",sessions=gcsSessions()) redis_publisher.publish_message(RedisMessage(json.dumps({'triggering':'false'}))) if (cache.has_key('trigger')): return Response({'heartbeat':cache.get('trigger')}) else: return Response({})
def connectionCheck(): if cache.has_key("checkallowed"): if not cache.has_key("android"): redis_publisher = RedisPublisher(facility='viewer',sessions=gcsSessions()) redis_publisher.publish_message(RedisMessage(json.dumps({'disconnected':'disconnected'}))) cache.delete("checkallowed")
def about(request): if not cache.has_key("categories"): categories = Category.objects.all() cache.set("categories",categories) else: categories = cache.get("categories") if not cache.has_key("links"): links = FriendLink.objects.all() cache.set("links",links) else: links = cache.get("links") if not cache.has_key("tags"): tags = Tag.objects.all() cache.set("tags",tags) else: tags = cache.get("tags") return render(request,'blogapp/about.html',{ 'about_active':'active', "categories": categories, 'links':links, 'tags':tags, })
def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") assert cache.has_key("hello1") # noqa assert not cache.has_key("goodbye1") # noqa cache.set("no_expiry", "here", None) assert cache.has_key("no_expiry") # noqa
def tag(request,tag_id): t = get_object_or_404(Tag,id=tag_id) posts = TaggedItem.objects.get_by_model(Post,t) if not cache.has_key("categories"): categories = Category.objects.all() cache.set("categories",categories) else: categories = cache.get("categories") if not cache.has_key("links"): links = FriendLink.objects.all() cache.set("links",links) else: links = cache.get("links") if not cache.has_key("tags"): tags = Tag.objects.all() cache.set("tags",tags) else: tags = cache.get("tags") return render(request,'blogapp/index.html',{ "posts": posts, "is_tag": True, 'tag_active':'active', "tag_name": t, "categories": categories, 'links':links, 'tags':tags, })
def list_photos(request, **kw): album ='' if kw.get('id_album'): # filter by album rows = 2 cols = 6 photos_list = [] cache_photo_id = 'photo:' + kw.get('id_album', '') if not cache.has_key(cache_photo_id): mlist = Photo.objects.filter(id=ZI(kw.get('id_album'))) if mlist.count(): AddPhotoCache(mlist[0]) if cache.has_key(cache_photo_id): photo = eval(cache.get(cache_photo_id)) album = photo['album'] if not cache.has_key('photos:' + album): photos_list = Photo.objects.filter(album=album).order_by('-date') AddPhotosListCache(album, photos_list) photos_list = eval(cache.get('photos:' + album)) else: # full list by album rows = 5 cols = 3 photos_list = {} allkey = '.full_list' if not cache.has_key('photos:' + allkey): photos_list = Photo.objects.all() AddAlbumListCache(allkey, photos_list) photos_list = eval(cache.get('photos:' + allkey)).values() return render_to_response('photos.html', context_instance=RequestContext(request, {'request': request, 'photos': PageList(request, photos_list, rows * cols), 'photos_count': len(photos_list), 'rows_count': range(1, rows * cols + 1, cols), 'album': album, 'logback': reverse('my.views.list_photos')}))
def post(request,pk,title=None): post = get_object_or_404(Post,pk=pk) if not cache.has_key("categories"): categories = Category.objects.all() cache.set("categories",categories) else: categories = cache.get("categories") if not cache.has_key("links"): links = FriendLink.objects.all() cache.set("links",links) else: links = cache.get("links") if not cache.has_key("tags"): tags = Tag.objects.all() cache.set("tags",tags) else: tags = cache.get("tags") return render(request,'blogapp/post.html',{ 'post':post, "categories": categories, 'links':links, 'tags':tags, })
def test_cache_memoize_large_files(self): """Testing cache_memoize with large files""" cacheKey = "abc123" # This takes into account the size of the pickle data, and will # get us to exactly 2 chunks of data in cache. data = 'x' * (CACHE_CHUNK_SIZE * 2 - 8) def cacheFunc(cacheCalled=[]): self.assertTrue(not cacheCalled) cacheCalled.append(True) return data result = cache_memoize(cacheKey, cacheFunc, large_data=True, compress_large_data=False) self.assertEqual(result, data) site = Site.objects.get_current() full_key = '%s:%s' % (site.domain, cacheKey) self.assertTrue(cache.has_key(full_key)) self.assertTrue(cache.has_key('%s-0' % full_key)) self.assertTrue(cache.has_key('%s-1' % full_key)) self.assertFalse(cache.has_key('%s-2' % full_key)) result = cache_memoize(cacheKey, cacheFunc, large_data=True, compress_large_data=False) self.assertEqual(result, data)
def get_song(request, **kw): if request.method == 'GET': song_list = [] song_id = kw.get('id', '') if not cache.has_key('song:' + song_id): song = get_object_or_404(Song, id=ZI(song_id)) AddSongCache(song) song = eval(cache.get('song:' + song_id)) # if request.GET.get('astext'): # get song text only rs = HttpResponse((song['artist'] + ' ' + song['title'] + '\n\n' + striptags(song['content'].replace(' ',' ').replace('<br/>', '\n'))).encode('cp1251') +'\n\n(source egaxegax.appspot.com)', content_type='text/plain') return rs if song['title'] != 'about': # add about to song text mkey = '.about' + song['artist'] if not cache.has_key('songs:' + mkey): song_list = Song.objects.filter(Q(title='about')&Q(artist=song['artist'])) AddSongListCache(mkey, song_list) song_list = eval(cache.get('songs:' + mkey)) song_list.append( song ) return render_to_response('song.html', context_instance=RequestContext(request, {'request': request, 'art_index': art_index, 'form': SearchForm(initial={'search':request.GET.get('search')}), 'songs': song_list, 'song': song, 'autoplay': request.GET.get('a', 0), 'logback': reverse('songs.views.get_song', kwargs={'id': song_id}) }))
def test_has_key(self): cache.set(self.KEY, self.VALUE) res = cache.has_key(self.KEY) # noqa self.assertTrue(res) res = cache.has_key('DOES NOT EXIST') # noqa self.assertFalse(res)
def cheungssh_login(request): info={"msgtype":"ERR","content":"","auth":"no"} logintime=time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())) client_ip=request.META['REMOTE_ADDR'] limit_ip='fail.limit.%s'%(client_ip) ip_threshold_r=cache.get('ip.threshold') ip_threshold=lambda x:x if x is not None else 4 ip_threshold=ip_threshold(ip_threshold_r) if cache.has_key(limit_ip): if cache.get(limit_ip)>ip_threshold: info['content']="无效登陆" cache.incr(limit_ip) cache.expire(limit_ip,8640000) info=json.dumps(info) return HttpResponse(info) if request.method=="POST": username = request.POST.get("username", '非法用户名') password = request.POST.get("password", False) print username,password,request.POST user=authenticate(username=username,password=password) if user is not None: if user.is_active: print "成功登陆" login(request,user) request.session["username"]=username info["msgtype"]="OK" info['auth']="yes" info['content']="成功登录" request.session.set_expiry(0) if cache.has_key(limit_ip):cache.delete(limit_ip) print request.COOKIES,request.session.keys(),request.session['_auth_user_id'] info['sid']=str(request.session.session_key) else: info["content"]="用户状态无效" print info["content"] else: if cache.has_key(limit_ip): cache.incr(limit_ip) else: cache.set(limit_ip,1,3600) info["content"]="用户名或密码错误" info["IP"]=client_ip info["IP-Locate"]=IP.find(client_ip) info["username"]=username info["logintime"]=logintime redis_to_redis.set_redis_data('sign.record',json.dumps(info,encoding='utf-8',ensure_ascii=False)) else: info["content"]="No Get" info=json.dumps(info,encoding="utf-8",ensure_ascii=False) response=HttpResponse(info) response["Access-Control-Allow-Origin"] = "*" response["Access-Control-Allow-Methods"] = "POST" response["Access-Control-Allow-Credentials"] = "true" return response
def clear_tasks_from_cache(task,user): if task.status==PENDING: pending_tasks_key = key_function([user.username],'pending-tasks') if cache.has_key(pending_tasks_key): cache.delete(pending_tasks_key) else: finished_tasks_key = key_function([user.username],'finished-tasks') if cache.has_key(finished_tasks_key): cache.delete(finished_tasks_key)
def flush_all(request): cache.clear() for i in SERVERS: server,port = i.split(":") keys = get_keys(server, port) for key in keys: print key cache.has_key(key[0]) return HttpResponseRedirect(reverse("cache_index"))
def sendTarget(self,request,pk=None): connectionCheck() try: if not cache.has_key("Server") or not cache.has_key("InteropClient"): return Response(json.dumps({'error':"Not logged into interop!"})) #fetch the client session = cache.get("InteropClient") server = cache.get("Server") targatAtPk = Target.objects.get(pk=int(request.data['pk'])) if target.sent: return Response(json.dumps({'sent','Target was sent\n Would you like to send an edit?'})) #print(targatAtPk.ptype) #print(targatAtPk.shape) #serialize the target pretarget = TargetSubmissionSerializer(targatAtPk) data = None try: #create dictionary to use to create AUVSITarget dataDict = dict(pretarget.data) dataDict['type'] = dataDict.pop('ptype') for key in dataDict: if dataDict[key]=='': dataDict[key] =None target = AUVSITarget(**dataDict) if not cache.has_key("Creds"): return Response(json.dumps({'error':"Not logged into interop!"})) target.user = cache.get("Creds").validated_data['username'] #post the target data = post_target(session,server,target,tout=5) #test for interop error and respond accordingly/MIGHT BE AN ISSUE HAVE TO TEST if isinstance(data,InteropError): code, reason,text = data.errorData() errorStr = "Error: HTTP Code %d, reason: %s" % (code,reason) return Response(json.dumps({'error':errorStr})) #retrieve image binary for sent image pid = data['id'] f = open(targatAtPk.picture.path, 'rb') picData = f.read() resp = post_target_image(session,server,tout =5,target_id=pid, image_binary=picData) #test for interop error and respond accordingly if isinstance(resp,InteropError): code, reason,text = redis_publisher.errorData() errorStr = "Error: HTTP Code %d, reason: %s" % code,reason return Response(json.dumps({'error':errorStr})) target.wasSent() return Response(json.dumps({'response':"Success"})) except Exception as e: return Response({'error':str(e)}) except Target.DoesNotExist: return Response(json.dumps({'error':'Image does not exist'}))
def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertTrue(cache.has_key('answer1')) # noqa self.assertTrue(cache.has_key('answer1', version=1)) # noqa self.assertFalse(cache.has_key('answer1', version=2)) # noqa self.assertFalse(caches['v2'].has_key('answer1')) # noqa self.assertTrue(caches['v2'].has_key('answer1', version=1)) # noqa self.assertFalse(caches['v2'].has_key('answer1', version=2)) # noqa
def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key assert cache.has_key('answer1') # noqa assert cache.has_key('answer1', version=1) # noqa assert not cache.has_key('answer1', version=2) # noqa assert not caches['v2'].has_key('answer1') # noqa assert caches['v2'].has_key('answer1', version=1) # noqa assert not caches['v2'].has_key('answer1', version=2) # noqa
def test_cache_versioning_has_key(self): cache.set("answer1", 42) # has_key self.assertTrue(cache.has_key("answer1")) # noqa self.assertTrue(cache.has_key("answer1", version=1)) # noqa self.assertFalse(cache.has_key("answer1", version=2)) # noqa self.assertFalse(caches["v2"].has_key("answer1")) # noqa self.assertTrue(caches["v2"].has_key("answer1", version=1)) # noqa self.assertFalse(caches["v2"].has_key("answer1", version=2)) # noqa
def test_get_fields(self): concept_id = 1 key = cache.id_key % concept_id fkey = cache.field_id_key % concept_id self.assertFalse(djcache.has_key(key)) self.assertFalse(djcache.has_key(fkey)) fields = cache.get_fields(concept_id) self.assertTrue(djcache.has_key(key)) self.assertEqual(djcache.get(fkey), fields)
def testCar(self): key = self.car.key() assert cache.has_key(key) assert cache.get(key).make == "Avensis" self.car.make = "Auris" assert cache.get(key).make == "Avensis" self.car.save() assert cache.get(key).make == "Auris" self.car.delete() assert not cache.has_key(key)
def test_options_cache(self): created = self.model.options.create_many(self.options) keys = [i.cache_key for i in created] # it should not be cached yet for key in keys: self.assertEquals(False,cache.has_key(key)) for opt in created: opt.value self.assertEquals(True,cache.has_key(opt.cache_key)) self.assertEquals(opt.value,cache.get(opt.cache_key))
def test_get(self): concept_id = 1 key = cache.id_key % concept_id self.assertFalse(djcache.has_key(key)) concept = cache.get(concept_id) self.assertNotEqual(concept, None) self.assertEqual(djcache.get(key), concept) djcache.delete(key) queryset = Field.objects.none() concept = cache.get(concept_id, queryset=queryset) self.assertEqual(concept, None) self.assertFalse(djcache.has_key(key))
def test_cache(self): import random i = random.randint(0, 100) self.assertFalse(cache.has_key('test_key')) cache.set('test_key', i) self.assertTrue(cache.has_key('test_key')) self.assertEqual(cache.get('test_key'), i) cache.delete('test_key') self.assertFalse(cache.has_key('test_key'))
def dbpedia(wiki_id, use_cache=True): """ Return dbpedia info about the resource """ if not wiki_id: print wiki_id raise Exception( 'dbpedia wiki_id should be a valid string, none received.') ckey = 'dbpedia:%s' % wiki_id logger.debug( 'dbpedia: loading contents for {wiki_id:%s, url: https://dbpedia.org/data/%s.json}' % (wiki_id, wiki_id)) if use_cache and cache.has_key(ckey): logger.debug('dbpedia: returning cached contents.') return json.loads(cache.get(ckey)) # perform the resuestto dbpedia json endpoint res = requests.get('https://dbpedia.org/data/%s.json' % wiki_id) res.raise_for_status() contents = res.json() if use_cache: cache.set(ckey, res.text, timeout=None) logger.debug( 'dbpedia: {status_code:%s, wiki_id:%s}, url: https://dbpedia.org/data/%s.json' % (res.status_code, wiki_id, wiki_id)) return contents
def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertTrue(cache.has_key('expire1')) time.sleep(3) self.assertFalse(cache.has_key('expire1')) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertTrue(cache.has_key('expire1')) self.assertIs(cache.touch('nonexistent'), False)
def PhoneCode(self, request, pk=None): """ get:得到手机验证码 request {"Phone": Phone} post:手机验证码验证 request {"Phone": Phone, "PhoneCode_01": GetPhoneCode, "PhoneCode_02":SetPhoneCode} """ if request.method == "GET": Phone = request.data.get('Phone') if not BasePublishViewSet.None_Dict_value( Phone).None_Dict_value_Is: return Response(BaseResponse( Message=ResponseMessage.NullMessage.value).__str__(), status=status.HTTP_401_UNAUTHORIZED) else: code = random.randint(10000, 999999) cache.set(Phone, code, 60) return Response(BaseResponse(Message="successfully", Phone=Phone).__str__(), status=status.HTTP_200_OK) elif request.method == "POST": Phone = request.data.get('Phone') PhoneCode = request.data.get("PhoneCode") if not BasePublishViewSet.SameCode(Phone, PhoneCode).is_same_code: return Response(BaseResponse( Message=ResponseMessage.NullMessage.value).__str__(), status=status.HTTP_401_UNAUTHORIZED) else: PhoneodeRedis = str( cache.get(Phone)) if cache.has_key(Phone) else str(None) return Response(BaseResponse(Message=ResponseMessage.Code( is_True=BasePublishViewSet.SameCode( str(PhoneCode), PhoneodeRedis.__str__())).__str__(), Phone=Phone).__str__(), status=status.HTTP_401_UNAUTHORIZED)
def _cache(*args, **kwargs): key = sha1(str(func.__module__) + str(func.__name__)).hexdigest() # Search in cache if it exists if cache.has_key(key): # Get value in cache value = cache.get(key) # If was locked if value == LOCK: # Try until unlock while value == LOCK: time.sleep(1) value = cache.get(key) # Return value of cache return value # If not exists in cache else: # Function can be called several times before it finishes and is put into the cache, # then lock it to others wait it finishes. cache.set(key, LOCK, length) # Execute method result = func(*args, **kwargs) # Set in cache the result of method cache.set(key, result, length) return result
def bulk_import_public_gs(gsid, gid, use_cache=True, required_headers=['slug']): # if not 'sheet': url = 'https://docs.google.com/spreadsheets/d/e/{0}/pub'.format(gsid) print url, gsid, gid ckey = 'gs:%s:%s' % (gsid, gid) print ckey if use_cache and cache.has_key(ckey): #print 'serve cahced', ckey logger.debug('getting csv from cache: %s' % ckey) contents = cache.get(ckey) else: logger.debug('loading csv...%s' % url) # raise Exception('please provide the sheet to load') response = requests.get(url, stream=True, params={ 'gid': gid, 'single': 'true', 'output': 'csv' }) response.encoding = 'utf8' contents = response.content print 'done' cache.set(ckey, contents, timeout=None) import csv reader = csv.DictReader(contents.splitlines(), delimiter=',') return [row for row in reader], reader.fieldnames
def wrapper(*args, **kwargs): judge = out_kwargs.get('judge', lambda result: result != None) timeout = out_kwargs.get('timeout', cache.default_timeout) name = out_kwargs.get('name', '') param = encode_param(args, kwargs) key = u"%s:%s:%s:%s" % (name, method.__module__, method.__name__, param) # 结果允许是 None, 0, False 等等值 res = cache.get(key) if res != None or cache.has_key(key): logger.debug(u'读取缓存, key:%s, 返回:%s', key, res) if not judge or judge(res): return res res = None # 函数执行有可能不成功 try: res = method(*args, **kwargs) # 结果需要判断是否正确,不正确的不缓存;如果没有判断结果的函数,则不再判断,直接缓存 if not judge or judge(res): cache.set(key, res, timeout=timeout) logger.debug(u'设置缓存%s秒,key:%s, 结果:%s', timeout, key, res) except Exception, e: logger.error(u"[red]函数执行错误: %s[/red] %s %s.%s,参数:%s,%s", e, name, method.__module__, method.__name__, args, kwargs, exc_info=True, extra={ 'color': True, 'Exception': e })
def get_points_by_policy(policy_name): chunk_num = 0 point_cache = cache.get(' '.join( ('policy', policy_name, str(chunk_num))).replace(' ', '_')) points = [] while point_cache is not None: points.extend(point_cache) chunk_num += 1 point_cache = cache.get(' '.join( ('policy', policy_name, str(chunk_num))).replace(' ', '_')) if not points: polygons = region_services.get_policy_area_polygons(policy_name) mpoly = polygons_to_mpoly(polygons) points = layer_services.get_points_by_polygons(mpoly) # Store and load this in chunks for idx, chunk in enumerate( [points[i:i + 190000] for i in range(0, len(points), 190000)]): cache.set( ' '.join(('policy', policy_name, str(idx))).replace(' ', '_'), chunk, None) assert cache.has_key(' '.join( ('policy', policy_name, str(idx))).replace(' ', '_')), "Could not write to cache" return points
def load_consumer(sender, *args, **kwargs): instance = kwargs['instance'] # if the object being instantiated has a pk, i.e. has been saved to the db if instance.id: cache_key = 'neo_consumer_%s' % instance.id if USE_MCAL: ''' All member fields are in our database ''' if not cache.has_key(cache_key): member_dict = dict((k, getattr(instance, k)) for k in NEO_ATTR.union(ADDRESS_FIELDS)) cache.set(cache_key, member_dict, 1200) else: ''' Members with a corresponding consumer in CIDB won't have all fields stored in our database ''' try: member_dict = cache.get(cache_key, None) if not member_dict: neoprofile = instance.neoprofile if neoprofile: # retrieve consumer from Neo consumer = api.get_consumer(instance.neoprofile.consumer_id) wrapper = ConsumerWrapper(consumer=consumer) member_dict = dict((k, getattr(wrapper, k)) for k in NEO_ATTR) member_dict.update(wrapper.address) # update instance with Neo attributes if member_dict: for key, val in member_dict.iteritems(): setattr(instance, key, val) except NeoProfile.DoesNotExist: pass
def get_cached_or_create_item(self, uid, it): from .models import ProxyMileItem itemid = 'item#' + uid if settings.USE_CACHE: if cache.has_key(itemid): item = cache.get(itemid) print 'hit item' else: ext = BaiduMap.search_detail_verbose(uid)['content']['ext'] lng, lat = self.convert_BD09_to_GCJ02((it['location']['lng'], it['location']['lat'])) locstr = self.to_loc_str(lng, lat) pic = self.pick_image_From_ext_info(ext) author = Author.objects.get(user__username='******') item = ProxyMileItem(title=it['name'], uid=uid, publishTime=datetime.now(pytz.utc), category=it['category'], address=it['address'], POIName=it['category'] + '(' + it['address'] + ')', picOne=pic, #'http://apis.map.qq.com/ws/streetview/v1/image?size=600x480&location=%s&pitch=0&heading=0&key=LF2BZ-5WHWR-ZONWQ-WBMHI-VUPOQ-JNBSA' % locstr, coordinate=Point(it['location']['lng'], it['location']['lat']), author=author, ) if 'distance' in it['detail_info']: item.distance = it['detail_info']['distance'] cache.set(itemid, item, timeout=3600*24) return item
def destroy_cache_function(key_list, equipment=False): for key in key_list: key = str(key) if equipment == True: key = str(key) + 'equipment' if(cache.has_key(sha1(key).hexdigest())): cache.delete(sha1(key).hexdigest())
def incr(self, key, delta=1, start=0): key = self._clean_key(key) logger.debug('Incremeting %s by %d' % (key, delta)) if not cache.has_key(key): cache.set(key, start + delta) return start + delta else: return cache.incr(key, delta)
def __init__(self, request, queryset, extra_ordering=[], perform_q=True, cache_prefix=None): if cache_prefix: self.cache_key = u'{0}.{1}'.format(cache_prefix, self.get_verbose_hash(request=request)) self.is_in_cache = cache.has_key(self.cache_key) and not request.query_params.get('nocache', None) if self.is_in_cache: return None super(CachedGlue, self).__init__(request=request, queryset=queryset, extra_ordering=extra_ordering, perform_q=perform_q)
def wrapper(*args, **kwargs): # 判断缓存是否存在 if cache.has_key(key): data = cache.get(key) else: data = func(*args, **kwargs) cache.set(key, data, timeout) return data
def cache_test(request): if request.GET.get("url"): get_url = request.GET.get("url") print(get_url) if cache.has_key("url"): return HttpResponse(cache.get("url")) else: cache.set("url", get_url)
def get_pending_tasks(user): key = key_function([user.username], 'pending-tasks') pending_tasks = cache.get(key) if cache.has_key(key) else None if not pending_tasks: pending_tasks = Task.objects.filter(author=user, status=PENDING) #print 'DBQUERY:pending tasks got from db' cache.set(key, pending_tasks) return pending_tasks
def postHeartbeat(self, request, pk=None): global EXPIRATION #check for seperate cache entry """ if (cache.get('heartbeat') != None): #trigger would be 'heartbeat' for status of heartbeats #SEND TRIGGER IN THIS CASE TO START """ if not cache.has_key('heartbeat'): #if doesn't have key heartbeat set its cache entry cache.set('heartbeat', 'connected', EXPIRATION) else: #else delete the old one cache.delete('heartbeat') #create a new one cache.set('heartbeat', 'connected', EXPIRATION) #ONCE STARTS RECEIVING HEARTBEATS #cache.set('heartbeat', 'triggering', EXPIRATION) #ONCE STOPS RECEIVING HEARTBEATS #cache.set('heartbeat','stopped', EXPIRATION) if cache.get('trigger') == 1: redis_publisher = RedisPublisher(facility="viewer", sessions=gcsSessions()) redis_publisher.publish_message( RedisMessage( json.dumps({ 'triggering': 'true', 'time': cache.get("time") }))) elif cache.get('trigger') == 0: redis_publisher = RedisPublisher(facility="viewer", sessions=gcsSessions()) redis_publisher.publish_message( RedisMessage(json.dumps({'triggering': 'false'}))) if (cache.has_key('trigger')): return Response({ 'heartbeat': cache.get('trigger'), 'loop': cache.get('loop'), 'delay': cache.get('delay') }) else: return Response({})
def get_access_client(): key = 'client' if cache.has_key(key): client = cache.get(key) else: client = WeChatClient(corp_id, app_secret) cache.set(key, client) return client
def cache_rm(request, path): # http://djangosnippets.org/snippets/936/ if cache.has_key(path): cache.delete(path) result = "DELETED" else: result = "NOT FOUND" return HttpResponse('<h1>%s</h1><h4>%s</h4>' % (result, path))
def postDetail(request, year, month, day, slug, id): # get cache cache_key = "post:{}".format(id) if cache.has_key(cache_key): post = cache.get(cache_key) else: post = get_object_or_404( Post, slug=slug, status='published', created__year=year, created__month=month, #要setting设置USE_TZ=False,否则不识别month,day created__day=day, id=id) cache.set(cache_key, post, 3600) comments = post.comments.filter(active=True).order_by("created") new_comment = None if request.method == 'POST': if request.user.is_authenticated: comment_form = CommentForm(data=request.POST) if comment_form.is_valid(): new_comment = comment_form.save(commit=False) new_comment.post = post new_comment.user = request.user if comments: new_comment.floor = comments.count() + 1 else: new_comment.floor = 1 new_comment.save() comments = post.comments.filter( active=True).order_by("created") else: comment_form = CommentForm() else: comment_form = CommentForm() if new_comment: comment_form = CommentForm() # List of similar posts post_tags_ids = post.tags.values_list('id', flat=True) similar_posts = Post.published.filter(tags__in=post_tags_ids) \ .exclude(id=post.id) similar_posts = similar_posts.annotate(same_tags=Count('tags')) \ .order_by('-same_tags', '-publish')[:4] return render( request, 'blog/post/detail.html', { 'post': post, 'comments': comments, 'comment_form': comment_form, 'new_comment': new_comment, 'similar_posts': similar_posts, 'user': post.author, 'auth_user': request.user })
def list_songs(request, **kw): song_count = 0 song_last_count = 0 search_count = 0 per_page = 100 song_list = [] if kw.get('id_art'): # filter by art art_id = ZI(kw.get('id_art')) artist = GetArtArtist(art_id) if not cache.has_key('songs:' + artist): song_list = Song.objects.filter(artist=artist).order_by('title') AddSongListCache(artist, song_list) song_list = eval(cache.get('songs:' + artist)) song_count = len(song_list) elif request.GET.get('search'): # search st = request.GET.get('search') search_key = '.search' + to_translit(st) if not cache.has_key('songs:' + search_key): song_list = Song.objects.filter( Q(title__startswith=st.capitalize())) AddSongListCache(search_key, song_list) song_list = eval(cache.get('songs:' + search_key)) song_count = len(song_list) search_count = song_count else: # last update artist = '.last_update' if not cache.has_key('songs:' + artist): song_list = Song.objects.order_by('-date')[:10] AddSongListCache(artist, song_list) song_list = eval(cache.get('songs:' + artist)) song_last_count = len(song_list) return render_to_response( 'songs.html', context_instance=RequestContext( request, { 'request': request, 'art_index': art_index, 'form': SearchForm(initial={'search': request.GET.get('search')}), 'song_count': song_count, 'last_count': song_last_count, 'search_count': search_count, 'songs': PageList(request, song_list, per_page), 'logback': reverse('songs.views.list_songs') }))
def get_data(pk): if cache.has_key(pk): data = cache.get(pk) else: job = LS4Job.objects.get(pk=pk) with open(job.path, 'rb') as f: data = pickle.load(f) cache.set(job.id, data) return data
def send_verify_code(phonenum): '''发送验证码''' key = 'VCode-%s' % phonenum if not cache.has_key(key): vcode = gen_verify_code() send_sms(phonenum, vcode) cache.set(key, vcode, 300) else: raise VcodeExist
def fetch_cached(self, context): cache_key = self.get_cache_key() user = context['request'].user if not user.is_staff and caching_enabled and cache.has_key(cache_key): cached_value = cache.get(cache_key) rendered_content = cached_value.get('content') sekizai = cached_value.get('sekizai') self._do_restore_sekizai_context(context, sekizai) return rendered_content
def has_key(self, key): key = self._clean_key(key) logger.debug('Looking for %s' % key) exists = cache.has_key(key) if exists: logger.debug('%s is cached' % key) else: logger.debug('%s is not cached' % key) return exists
def get_set_key(self, prefix, value_to_set): key = "" while True: key = prefix + str(uuid.uuid4().get_hex().upper()) if cache.has_key(key): continue cache.set(key, value_to_set) break return key
def GetArtId(artist): if not cache.has_key('arts:.art' + artist): art_list = Art.objects.filter(artist=artist) AddArtListCache('.art' + artist, art_list) art_list = eval(cache.get('arts:.art' + artist)) try: return art_list[0]['id'] except: return ''
def links_list(): num = LinksList.objects.all().count() links_key = 'links_key_{0}'.format(num) if cache.has_key(links_key): links_val = cache.get(links_key) else: links_val = LinksList.objects.all() cache.set(links_key, links_val, 3600) return links_val
def session_constraint_expire(request,session): from ajax import session_constraints path = reverse(session_constraints, args=[session.meeting.number, session.pk]) temp_request = HttpRequest() temp_request.path = path temp_request.META['HTTP_HOST'] = request.META['HTTP_HOST'] key = get_cache_key(temp_request) if key is not None and cache.has_key(key): cache.delete(key)
def expire_page(path, key_prefix=None): ''' Delete page from cache based on it's url ''' request = HttpRequest() request.path = path key = get_cache_key(request, key_prefix) if cache.has_key(key): cache.delete(key)
def get_mobile_code(self, mobile): """ 从缓存中取出手机验证码 :param mobile: :return: """ if not cache.has_key(mobile): return False return cache.get(mobile)
def GetArtArtist(art_id): if not cache.has_key('arts:.id' + str(art_id)): art_list = Art.objects.filter(id=art_id) AddArtListCache('.id' + str(art_id), art_list) art_list = eval(cache.get('arts:.id' + str(art_id))) try: return art_list[0]['artist'] except: raise Http404
def get_access_token(): key = 'access_token' if (cache.has_key(key)): return cache.get(key), True print(requests.get(wechart_info.base_get_access_token).json()) access_token = requests.get( wechart_info.base_get_access_token).json()['access_token'] cache.set(key, access_token, 110 * 60) return access_token, True
def decodePwd(upwd): splitStr = "||&~&||" pwdStr = _GG("DecodeStr")(upwd) if splitStr not in pwdStr: return "" pwd, randomCode = pwdStr.split(splitStr) if not cache.has_key(randomCode) or cache.get(randomCode) != randomCode: return "" return pwd