def test_make_key(): with translation.override('en-US'): assert make_key(u'é@øel') == 'eb7592119dace3b998755ef61d90b91b' assert make_key(u'é@øel', with_locale=False) == 'f40676a34ef1787123e49e1317f9ed31' with translation.override('fr'): assert make_key(u'é@øel') == 'e0c0ff9a07c763506dc6d77daed9c048' with translation.override('en-US'): assert make_key(u'é@øel') == 'eb7592119dace3b998755ef61d90b91b'
def _get_hash_for_selected_file(self, obj): selected_file = self.get_selected_file(obj) # Return the hash if we already saved it to the locally cached # `self._entries` dictionary. _entries = getattr(self, '_entries', {}) if _entries and _entries[selected_file]['sha256']: return _entries[selected_file]['sha256'] commit = self._get_commit(obj) tree = self.repo.get_root_tree(commit) # Normalize the key as we want to avoid that we exceed max # key lengh because of selected_file. cache_key = make_key( f'reviewers:fileentriesserializer:hashes' f':{commit.hex}:{selected_file}', with_locale=False, normalize=True) def _calculate_hash(): try: blob_or_tree = tree[selected_file] except KeyError: return None if blob_or_tree.type == pygit2.GIT_OBJ_TREE: return None blob = self.git_repo[blob_or_tree.oid] return get_sha256(io.BytesIO(memoryview(blob))) return cache.get_or_set(cache_key, _calculate_hash, 60 * 60 * 24)
def addon_bayesian_rating(*addons, **kw): def addon_aggregates(): return Addon.objects.valid().aggregate(rating=Avg('average_rating'), reviews=Avg('total_ratings')) log.info('[%s@%s] Updating bayesian ratings.' % (len(addons), addon_bayesian_rating.rate_limit)) avg = cache_get_or_set(make_key('task.bayes.avg'), addon_aggregates, 60 * 60 * 60) # Rating can be NULL in the DB, so don't update it if it's not there. if avg['rating'] is None: return mc = avg['reviews'] * avg['rating'] for addon in Addon.objects.no_cache().filter(id__in=addons): if addon.average_rating is None: # Ignoring addons with no average rating. continue # Update the addon bayesian_rating atomically using F objects (unless # it has no reviews, in which case directly set it to 0). qs = Addon.objects.filter(id=addon.id) if addon.total_ratings: num = mc + F('total_ratings') * F('average_rating') denom = avg['reviews'] + F('total_ratings') qs.update(bayesian_rating=num / denom) else: qs.update(bayesian_rating=0)
def _get_hash_for_selected_file(self): selected_file = self._get_selected_file() # Return the hash if we already saved it to the locally cached # `self._entries` dictionary. _entries = getattr(self, '_entries', {}) if _entries and _entries[selected_file]['sha256']: return _entries[selected_file]['sha256'] commit = self.commit blob, name = self._get_blob_for_selected_file() # Normalize the key as we want to avoid that we exceed max # key lengh because of selected_file. cache_key = make_key( f'reviewers:fileentriesserializer:hashes' f':{commit.hex}:{selected_file}', with_locale=False, normalize=True, ) def _calculate_hash(): if blob is None: return None return get_sha256(io.BytesIO(memoryview(blob))) return cache.get_or_set(cache_key, _calculate_hash, 60 * 60 * 24)
def side_nav(context, addon_type, category=None): app = context['request'].APP.id cat = str(category.id) if category else 'all' cache_key = make_key( 'side-nav-%s-%s-%s' % (app, addon_type, cat), # We have potentially very long names in the cache-key, # normalize to not hit any memcached key-limits normalize=True) return cache_get_or_set( cache_key, lambda: _side_nav(context, addon_type, category))
def side_nav(context, addon_type, category=None): app = context['request'].APP.id cat = str(category.id) if category else 'all' cache_key = make_key( 'side-nav-%s-%s-%s' % (app, addon_type, cat), # We have potentially very long names in the cache-key, # normalize to not hit any memcached key-limits normalize=True) return cache_get_or_set(cache_key, lambda: _side_nav(context, addon_type, category))
def get_addons(self): addons = self.collection.addons.public() kw = { 'addon_type': 'ALL', 'limit': self.limit, 'app': self.request.APP, 'platform': self.platform, 'version': self.version, 'compat_mode': self.compat_mode } def fetch_and_filter_addons(): return addon_filter(addons, **kw) # The cache-key can be very long, let's normalize it to make sure # we never hit the 250-char limit of memcached. cache_key = make_key('collections-promo-get-addons:{}'.format( repr(kw)), normalize=True) return cache_get_or_set(cache_key, fetch_and_filter_addons)
def get_addons(self): addons = self.collection.addons.public() kw = { 'addon_type': 'ALL', 'limit': self.limit, 'app': self.request.APP, 'platform': self.platform, 'version': self.version, 'compat_mode': self.compat_mode } def fetch_and_filter_addons(): return addon_filter(addons, **kw) # The cache-key can be very long, let's normalize it to make sure # we never hit the 250-char limit of memcached. cache_key = make_key( 'collections-promo-get-addons:{}'.format(repr(kw)), normalize=True) return cache_get_or_set(cache_key, fetch_and_filter_addons)
def version_detail(request, addon, version_num): # TODO: Does setting this in memcachd even make sense? # This is specific to an add-ons version so the chance of this hitting # the cache and not missing seems quite bad to me (cgrebs) def _fetch(): qs = _version_list_qs(addon) return list(qs.values_list('version', flat=True)) cache_key = make_key(u'version-detail:{}:{}'.format(addon.id, version_num), normalize=True) ids = cache_get_or_set(cache_key, _fetch) url = reverse('addons.versions', args=[addon.slug]) if version_num in ids: page = 1 + ids.index(version_num) / PER_PAGE to = urlparams(url, 'version-%s' % version_num, page=page) return http.HttpResponseRedirect(to) else: raise http.Http404()
def version_detail(request, addon, version_num): # TODO: Does setting this in memcachd even make sense? # This is specific to an add-ons version so the chance of this hitting # the cache and not missing seems quite bad to me (cgrebs) def _fetch(): qs = _version_list_qs(addon) return list(qs.values_list('version', flat=True)) cache_key = make_key( u'version-detail:{}:{}'.format(addon.id, version_num), normalize=True) ids = cache_get_or_set(cache_key, _fetch) url = reverse('addons.versions', args=[addon.slug]) if version_num in ids: page = 1 + ids.index(version_num) / PER_PAGE to = urlparams(url, 'version-%s' % version_num, page=page) return http.HttpResponseRedirect(to) else: raise http.Http404()
def test_make_key(): with translation.override('en-US'): assert make_key(u'é@øel') == u'é@øel:en-us' with translation.override('de'): assert make_key(u'é@øel') == u'é@øel:de' with translation.override('de'): assert make_key(u'é@øel', with_locale=False) == u'é@øel' with translation.override('en-US'): assert (make_key(u'é@øel', normalize=True) == '2798e65bbe384320c9da7930e93e63fb') assert (make_key(u'é@øel', with_locale=False, normalize=True) == 'a83feada27737072d4ec741640368f07') with translation.override('fr'): assert (make_key(u'é@øel', normalize=True) == 'bc5208e905c8dfcc521e4196e16cfa1a')
def test_make_key(): with translation.override('en-US'): assert make_key(u'é@øel') == 'é@øel:en-us' with translation.override('de'): assert make_key(u'é@øel') == 'é@øel:de' with translation.override('de'): assert make_key(u'é@øel', with_locale=False) == 'é@øel' with translation.override('en-US'): assert ( make_key(u'é@øel', normalize=True) == '2798e65bbe384320c9da7930e93e63fb') assert ( make_key(u'é@øel', with_locale=False, normalize=True) == 'a83feada27737072d4ec741640368f07') with translation.override('fr'): assert ( make_key(u'é@øel', normalize=True) == 'bc5208e905c8dfcc521e4196e16cfa1a')
def _category_personas(qs, limit): def fetch_personas(): return randslice(qs, limit=limit) key = make_key('cat-personas:' + str(qs.query), normalize=True) return cache_get_or_set(key, fetch_personas)
def site_nav(context): app = context['request'].APP.id cache_key = make_key('site-nav-%s' % app, normalize=True) return cache_get_or_set(cache_key, lambda: _site_nav(context))