예제 #1
0
def test_cache_get_or_set():
    # Compatibility test, since cache_get_or_set is a 1:1 backport from
    # Django 1.11, their unittests apply.

    def some_function():
        some_function.call_count += 1
        return 'something'  # Needed for cache_get_or_set() to work.
    some_function.call_count = 0

    cache_get_or_set('my-key', some_function)
    cache_get_or_set('my-key', some_function)

    assert some_function.call_count == 1
예제 #2
0
def test_cache_get_or_set():
    # Compatibility test, since cache_get_or_set is a 1:1 backport from
    # Django 1.11, their unittests apply.

    def some_function():
        some_function.call_count += 1
        return 'something'  # Needed for cache_get_or_set() to work.
    some_function.call_count = 0

    cache_get_or_set('my-key', some_function)
    cache_get_or_set('my-key', some_function)

    assert some_function.call_count == 1
예제 #3
0
def _category_personas(qs, limit):
    def fetch_personas():
        return randslice(qs, limit=limit)

    # TODO: .query_key comes from cache-machine, find replacement
    key = 'cat-personas:' + qs.query_key()
    return cache_get_or_set(key, fetch_personas)
예제 #4
0
파일: tasks.py 프로젝트: bqbn/addons-server
def addon_bayesian_rating(*addons, **kw):
    def addon_aggregates():
        return Addon.objects.valid().aggregate(rating=Avg('average_rating'),
                                               reviews=Avg('total_ratings'))

    log.info('[%s@%s] Updating bayesian ratings.' %
             (len(addons), addon_bayesian_rating.rate_limit))

    avg = cache_get_or_set('task.bayes.avg', addon_aggregates, 60 * 60 * 60)
    # Rating can be NULL in the DB, so don't update it if it's not there.
    if avg['rating'] is None:
        return

    mc = avg['reviews'] * avg['rating']

    for addon in Addon.objects.filter(id__in=addons):
        if addon.average_rating is None:
            # Ignoring addons with no average rating.
            continue

        # Update the addon bayesian_rating atomically using F objects (unless
        # it has no reviews, in which case directly set it to 0).
        qs = Addon.objects.filter(id=addon.id)
        if addon.total_ratings:
            num = mc + F('total_ratings') * F('average_rating')
            denom = avg['reviews'] + F('total_ratings')
            qs.update(bayesian_rating=num / denom)
        else:
            qs.update(bayesian_rating=0)
예제 #5
0
def addon_bayesian_rating(*addons, **kw):
    def addon_aggregates():
        return Addon.objects.valid().aggregate(rating=Avg('average_rating'),
                                               reviews=Avg('total_ratings'))

    log.info('[%s@%s] Updating bayesian ratings.' %
             (len(addons), addon_bayesian_rating.rate_limit))

    avg = cache_get_or_set(make_key('task.bayes.avg'), addon_aggregates,
                           60 * 60 * 60)
    # Rating can be NULL in the DB, so don't update it if it's not there.
    if avg['rating'] is None:
        return

    mc = avg['reviews'] * avg['rating']

    for addon in Addon.objects.no_cache().filter(id__in=addons):
        if addon.average_rating is None:
            # Ignoring addons with no average rating.
            continue

        # Update the addon bayesian_rating atomically using F objects (unless
        # it has no reviews, in which case directly set it to 0).
        qs = Addon.objects.filter(id=addon.id)
        if addon.total_ratings:
            num = mc + F('total_ratings') * F('average_rating')
            denom = avg['reviews'] + F('total_ratings')
            qs.update(bayesian_rating=num / denom)
        else:
            qs.update(bayesian_rating=0)
def side_nav(context, addon_type, category=None):
    app = context['request'].APP.id
    cat = str(category.id) if category else 'all'
    cache_key = make_key(
        'side-nav-%s-%s-%s' % (app, addon_type, cat),
        # We have potentially very long names in the cache-key,
        # normalize to not hit any memcached key-limits
        normalize=True)
    return cache_get_or_set(cache_key,
                            lambda: _side_nav(context, addon_type, category))
def side_nav(context, addon_type, category=None):
    app = context['request'].APP.id
    cat = str(category.id) if category else 'all'
    cache_key = make_key(
        'side-nav-%s-%s-%s' % (app, addon_type, cat),
        # We have potentially very long names in the cache-key,
        # normalize to not hit any memcached key-limits
        normalize=True)
    return cache_get_or_set(
        cache_key, lambda: _side_nav(context, addon_type, category))
예제 #8
0
def get_versions(order=('application', 'version_int')):
    def fetch_versions():
        apps = amo.APP_USAGE
        versions = {app.id: [] for app in apps}
        qs = list(AppVersion.objects.order_by(*order)
                  .filter(application__in=versions)
                  .values_list('application', 'version'))
        for app, version in qs:
            versions[app].append(version)
        return apps, versions
    return cache_get_or_set('getv' + ':'.join(order), fetch_versions)
    def get_entries(self, obj):
        commit = self._get_commit(obj)
        result = OrderedDict()

        def _fetch_entries():
            for entry_wrapper in self.repo.iter_tree(commit.tree):
                entry = entry_wrapper.tree_entry
                path = force_text(entry_wrapper.path)
                blob = entry_wrapper.blob

                is_directory = entry.type == 'tree'
                mime, encoding = mimetypes.guess_type(entry.name)
                is_binary = (
                    self.is_binary(path, mime, blob)
                    if not is_directory else False)
                sha_hash = (
                    get_sha256(io.BytesIO(memoryview(blob)))
                    if not is_directory else '')

                commit_tzinfo = FixedOffset(commit.commit_time_offset)
                commit_time = datetime.fromtimestamp(
                    float(commit.commit_time),
                    commit_tzinfo)

                result[path] = {
                    'binary': is_binary,
                    'depth': path.count(os.sep),
                    'directory': is_directory,
                    'filename': force_text(entry.name),
                    'sha256': sha_hash,
                    'mimetype': mime or 'application/octet-stream',
                    'path': path,
                    'size': blob.size if blob is not None else None,
                    'modified': commit_time,
                }
            return result

        # Given that this is a very expensive operation we have a two-fold
        # cache, one that is stored on this instance for very-fast retrieval
        # to support other method calls on this serializer
        # and another that uses memcached for regular caching
        if hasattr(self, '_entries'):
            return self._entries

        self._entries = cache_get_or_set(
            'reviewers:fileentriesserializer:entries:{}'.format(commit.hex),
            _fetch_entries,
            # Store information about this commit for 24h which should be
            # enough to cover regular review-times but not overflow our
            # cache
            60 * 60 * 24)

        return self._entries
예제 #10
0
def get_versions(order=('application', 'version_int')):
    def fetch_versions():
        apps = amo.APP_USAGE
        versions = {app.id: [] for app in apps}
        qs = list(
            AppVersion.objects.order_by(*order).filter(
                application__in=versions).values_list('application',
                                                      'version'))
        for app, version in qs:
            versions[app].append(version)
        return apps, versions

    return cache_get_or_set('getv' + ':'.join(order), fetch_versions)
예제 #11
0
파일: serializers.py 프로젝트: diox/olympia
    def get_entries(self, obj):
        # Given that this is a very expensive operation we have a two-fold
        # cache, one that is stored on this instance for very-fast retrieval
        # to support other method calls on this serializer
        # and another that uses memcached for regular caching
        if hasattr(self, '_entries'):
            return self._entries

        commit = self._get_commit(obj)
        result = OrderedDict()

        def _fetch_entries():
            tree = self.repo.get_root_tree(commit)
            for entry_wrapper in self.repo.iter_tree(tree):
                entry = entry_wrapper.tree_entry
                path = force_text(entry_wrapper.path)
                blob = entry_wrapper.blob

                sha_hash = (
                    get_sha256(io.BytesIO(memoryview(blob)))
                    if not entry.type == 'tree' else '')

                commit_tzinfo = FixedOffset(commit.commit_time_offset)
                commit_time = datetime.fromtimestamp(
                    float(commit.commit_time),
                    commit_tzinfo)

                mimetype, entry_mime_category = self.get_entry_mime_type(
                    entry, blob)

                result[path] = {
                    'depth': path.count(os.sep),
                    'filename': force_text(entry.name),
                    'sha256': sha_hash,
                    'mime_category': entry_mime_category,
                    'mimetype': mimetype,
                    'path': path,
                    'size': blob.size if blob is not None else None,
                    'modified': commit_time,
                }
            return result

        self._entries = cache_get_or_set(
            'reviewers:fileentriesserializer:entries:{}'.format(commit.hex),
            _fetch_entries,
            # Store information about this commit for 24h which should be
            # enough to cover regular review-times but not overflow our
            # cache
            60 * 60 * 24)

        return self._entries
예제 #12
0
파일: views.py 프로젝트: eviljeff/olympia
def get_versions(order=('application', 'version_int')):
    def fetch_versions():
        if waffle.switch_is_active('disallow-thunderbird-and-seamonkey'):
            apps = amo.APP_USAGE_FIREFOXES_ONLY
        else:
            apps = amo.APP_USAGE
        versions = {app.id: [] for app in apps}
        qs = list(AppVersion.objects.order_by(*order)
                  .filter(application__in=versions)
                  .values_list('application', 'version'))
        for app, version in qs:
            versions[app].append(version)
        return apps, versions
    return cache_get_or_set('getv' + ':'.join(order), fetch_versions)
예제 #13
0
    def blocked(cls, name):
        """
        Check to see if a given name is in the (cached) deny list.
        Return True if the name contains one of the denied terms.

        """
        name = name.lower()
        qs = cls.objects.all()

        def fetch_names():
            return [n.lower() for n in qs.values_list('name', flat=True)]

        blocked_list = cache_get_or_set('denied-name:blocked', fetch_names)
        return any(n in name for n in blocked_list)
    def get_files(self):
        """
        Returns an OrderedDict, ordered by the filename of all the files in the
        addon-file. Full of all the useful information you'll need to serve
        this file, build templates etc.
        """
        if self._files:
            return self._files

        if not self.is_extracted():
            extract_file(self)

        self._files = cache_get_or_set(self._cache_key(), self._get_files)
        return self._files
예제 #15
0
파일: models.py 프로젝트: diox/olympia
    def blocked(cls, name):
        """
        Check to see if a given name is in the (cached) deny list.
        Return True if the name contains one of the denied terms.

        """
        name = name.lower()
        qs = cls.objects.all()

        def fetch_names():
            return [n.lower() for n in qs.values_list('name', flat=True)]

        blocked_list = cache_get_or_set('denied-name:blocked', fetch_names)
        return any(n in name for n in blocked_list)
예제 #16
0
    def get_files(self):
        """
        Returns an OrderedDict, ordered by the filename of all the files in the
        addon-file. Full of all the useful information you'll need to serve
        this file, build templates etc.
        """
        if self._files:
            return self._files

        if not self.is_extracted():
            extract_file(self)

        self._files = cache_get_or_set(self._cache_key(), self._get_files)
        return self._files
예제 #17
0
def get_versions(order=('application', 'version_int')):
    def fetch_versions():
        if waffle.switch_is_active('disallow-thunderbird-and-seamonkey'):
            apps = amo.APP_USAGE_FIREFOXES_ONLY
        else:
            apps = amo.APP_USAGE
        versions = {app.id: [] for app in apps}
        qs = list(
            AppVersion.objects.order_by(*order).filter(
                application__in=versions).values_list('application',
                                                      'version'))
        for app, version in qs:
            versions[app].append(version)
        return apps, versions

    return cache_get_or_set('getv' + ':'.join(order), fetch_versions)
예제 #18
0
파일: views.py 프로젝트: waf/addons-server
def version_detail(request, addon, version_num):
    # TODO: Does setting this in memcachd even make sense?
    # This is specific to an add-ons version so the chance of this hitting
    # the cache and not missing seems quite bad to me (cgrebs)
    def _fetch():
        qs = _version_list_qs(addon)
        return list(qs.values_list('version', flat=True))

    ids = cache_get_or_set(
        u'version-detail:{}:{}'.format(addon.id, version_num),
        _fetch)

    url = reverse('addons.versions', args=[addon.slug])
    if version_num in ids:
        page = 1 + ids.index(version_num) / PER_PAGE
        to = urlparams(url, 'version-%s' % version_num, page=page)
        return http.HttpResponseRedirect(to)
    else:
        raise http.Http404()
예제 #19
0
    def get_addons(self):
        addons = self.collection.addons.public()
        kw = {
            'addon_type': 'ALL',
            'limit': self.limit,
            'app': self.request.APP,
            'platform': self.platform,
            'version': self.version,
            'compat_mode': self.compat_mode
        }

        def fetch_and_filter_addons():
            return addon_filter(addons, **kw)

        # The cache-key can be very long, let's normalize it to make sure
        # we never hit the 250-char limit of memcached.
        cache_key = make_key(
            'collections-promo-get-addons:{}'.format(repr(kw)),
            normalize=True)
        return cache_get_or_set(cache_key, fetch_and_filter_addons)
예제 #20
0
    def get_addons(self):
        addons = self.collection.addons.public()
        kw = {
            'addon_type': 'ALL',
            'limit': self.limit,
            'app': self.request.APP,
            'platform': self.platform,
            'version': self.version,
            'compat_mode': self.compat_mode
        }

        def fetch_and_filter_addons():
            return addon_filter(addons, **kw)

        # The cache-key can be very long, let's normalize it to make sure
        # we never hit the 250-char limit of memcached.
        cache_key = make_key('collections-promo-get-addons:{}'.format(
            repr(kw)),
                             normalize=True)
        return cache_get_or_set(cache_key, fetch_and_filter_addons)
예제 #21
0
파일: views.py 프로젝트: eviljeff/olympia
def version_detail(request, addon, version_num):
    # TODO: Does setting this in memcachd even make sense?
    # This is specific to an add-ons version so the chance of this hitting
    # the cache and not missing seems quite bad to me (cgrebs)
    def _fetch():
        qs = _version_list_qs(addon)
        return list(qs.values_list('version', flat=True))

    cache_key = make_key(
        u'version-detail:{}:{}'.format(addon.id, version_num),
        normalize=True)

    ids = cache_get_or_set(cache_key, _fetch)

    url = reverse('addons.versions', args=[addon.slug])
    if version_num in ids:
        page = 1 + ids.index(version_num) / PER_PAGE
        to = urlparams(url, 'version-%s' % version_num, page=page)
        return http.HttpResponseRedirect(to)
    else:
        raise http.Http404()
예제 #22
0
def _category_personas(qs, limit):
    def fetch_personas():
        return randslice(qs, limit=limit)

    key = make_key('cat-personas:' + str(qs.query), normalize=True)
    return cache_get_or_set(key, fetch_personas)
예제 #23
0
def site_nav(context):
    app = context['request'].APP.id
    cache_key = make_key('site-nav-%s' % app, normalize=True)
    return cache_get_or_set(cache_key, lambda: _site_nav(context))
def site_nav(context):
    app = context['request'].APP.id
    cache_key = make_key('site-nav-%s' % app, normalize=True)
    return cache_get_or_set(cache_key, lambda: _site_nav(context))
예제 #25
0
파일: views.py 프로젝트: bqbn/addons-server
def _category_personas(qs, limit):
    def fetch_personas():
        return randslice(qs, limit=limit)
    key = make_key('cat-personas:' + str(qs.query), normalize=True)
    return cache_get_or_set(key, fetch_personas)