def loop_backs_questions(self): ''' :return: loop question structure for this batch (q_start_pk, q_end_pk) = [] ''' cached_as(Batch.objects.filter(id=self.id)) def _loop_backs_questions(): survey_questions = self.survey_questions loop_starters = self.loop_starters() loop_enders = self.loop_enders() start = None loop_desc = OrderedDict() present_loop = [] for q in survey_questions: if q in loop_starters: start = q present_loop = [] if q in loop_enders: present_loop.append(q) loop_desc[(start.pk, q.pk)] = present_loop start = None if start: present_loop.append(q) #just transpose return loop_desc return _loop_backs_questions()
def loop_back_boundaries(self): cached_as(Batch.objects.filter(id=self.id)) def _loop_back_boundaries(): loop_desc = self.loop_backs_questions() quest_map = OrderedDict() for boundary, loop_questions in loop_desc.items(): map(lambda q: quest_map.update({q.pk: boundary}), loop_questions) return quest_map return _loop_back_boundaries()
def test_cached_as_model(self): get_calls = self._make_func(cached_as(Category)) self.assertEqual(get_calls(), 1) # miss self.assertEqual(get_calls(), 1) # hit Category.objects.create(title='test') # invalidate self.assertEqual(get_calls(), 2) # miss
def survey_questions(self): cached_as(Batch.objects.filter(id=self.id)) def _survey_questions(): inline_ques = self.questions_inline() questions = OrderedSet(inline_ques) survey_questions = OrderedSet() for ques in inline_ques: survey_questions.append(ques) map(lambda q: survey_questions.add(q), ques.direct_sub_questions() ) #boldly assuming subquests dont go #more than quest subquestion deep for present implemnt return survey_questions return _survey_questions()
def test_cached_as_cond(self): get_calls = make_inc(cached_as(Category.objects.filter(title='test'))) self.assertEqual(get_calls(), 1) # cache Category.objects.create(title='miss') # don't invalidate self.assertEqual(get_calls(), 1) # hit Category.objects.create(title='test') # invalidate self.assertEqual(get_calls(), 2) # miss
def test_cached_as_cond(self): get_calls = self._make_func(cached_as(Category.objects.filter(title='test'))) self.assertEqual(get_calls(), 1) # cache Category.objects.create(title='miss') # don't invalidate self.assertEqual(get_calls(), 1) # hit Category.objects.create(title='test') # invalidate self.assertEqual(get_calls(), 2) # miss
def test_cached_as_obj(self): c = Category.objects.create(title='test') get_calls = self._make_func(cached_as(c)) self.assertEqual(get_calls(), 1) # cache Category.objects.create(title='miss') # don't invalidate self.assertEqual(get_calls(), 1) # hit c.title = 'new'; c.save() # invalidate self.assertEqual(get_calls(), 2) # miss
def test_cached_as_depends_on_two_models(self): get_calls = self._make_func(cached_as(Category, Post)) c = Category.objects.create(title='miss') p = Post.objects.create(title='New Post', category=c) self.assertEqual(get_calls(1), 1) # cache c.title = 'new title'; c.save() # invalidate by Category self.assertEqual(get_calls(1), 2) # miss and cache p.title = 'new title'; p.save() # invalidate by Post self.assertEqual(get_calls(1), 3) # miss and cache
def test_cached_as(self): get_calls = _make_inc(cached_as(SignalTest.objects.filter(name='test'))) func = get_calls.__wrapped__ # Miss self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_call[0], {'sender': None, 'func': func, 'hit': False}) # Hit self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_call[0], {'sender': None, 'func': func, 'hit': True})
def test_cached_as(self): get_calls = make_inc(cached_as(Category.objects.filter(title='test'))) func = get_calls.__wrapped__ # Miss self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_calls, [{'sender': None, 'func': func, 'hit': False}]) # Hit self.signal_calls = [] self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_calls, [{'sender': None, 'func': func, 'hit': True}])
def test_cached_as(self): get_calls = _make_inc(cached_as(Category.objects.filter(title='test'))) func = get_calls.__wrapped__ # Miss self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_calls, [{'sender': None, 'func': func, 'hit': False}]) # Hit self.signal_calls = [] self.assertEqual(get_calls(), 1) self.assertEqual(self.signal_calls, [{'sender': None, 'func': func, 'hit': True}])
def add_new_stv(geom, overlaps_are_missing, **data): """ Solve overlaps if included in request body """ geom = GEOSGeometry(geom) def _overlaps(): # Note that we are using list for queryset here, # it's because we don't want to cache queryset object but results. return list( overlaps_queryset(geom, data["start_date"], data["end_date"])) if "cacheops" in settings.INSTALLED_APPS: # Cache overlaps query in production _overlaps = (cached_as( SpacetimeVolume.objects.filter( territory__overlaps=geom, start_date__lte=data["end_date"], end_date__gte=data["start_date"], ), extra=(data["start_date"], data["end_date"]), ))(_overlaps) # keep or modify STVs on server # "keep|modify": ["stv_id", "stv_id" ...] overlaps = {"keep": [], "modify": []} # Generate list of overlaps grouped by entities # { "entity_id": ["stv_id", "stv_id"], ...} overlaps["db"] = {} for i in _overlaps(): if i.entity.pk not in overlaps["db"]: overlaps["db"][i.entity.pk] = [] overlaps["db"][i.entity.pk].append(i.pk) if overlaps_are_missing and len(overlaps["db"]) > 0: return {"response": {"overlaps": overlaps["db"]}, "status": 409} data["territory"] = subtract_geometry(data["overlaps"], overlaps, geom) data["entity"] = TerritorialEntity.objects.get(id=data["entity"]) # remove overlaps from data data.pop("overlaps", None) stv = SpacetimeVolume.objects.create(**data) # objects.get() will return entity with computed visual_center response = SpacetimeVolumeSerializer( SpacetimeVolume.objects.get(pk=stv.id)).data return {"response": response, "status": 201}
def test_cached_as_depends_on_args(self): get_calls = self._make_func(cached_as(Category)) self.assertEqual(get_calls(1), 1) # cache self.assertEqual(get_calls(1), 1) # hit self.assertEqual(get_calls(2), 2) # miss
def cached_as(queryset, timeout, fragment_name, *extra): return cacheops.cached_as(queryset, timeout=timeout, extra=(fragment_name,) + extra)
def cached_as(queryset, timeout, fragment_name, *extra): return cacheops.cached_as(queryset, timeout=timeout, extra=(fragment_name, ) + extra)