Пример #1
0
def get_cart(request):
    """
    Returns the cart of the current shop customer. if the customer has no cart
    yet it returns None.
    """
    session_key = request.session.session_key
    user = request.user

    if user.is_authenticated():
        try:
            cache_key = "%s-cart-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, user)
            cart = cache.get(cache_key)
            if cart is None:
                cart = Cart.objects.get(user=user)
                cache.set(cache_key, cart)
            return cart
        except ObjectDoesNotExist:
            return None
    else:
        try:
            cache_key = "%s-cart-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, session_key)
            cart = cache.get(cache_key)
            if cart is None:
                cart = Cart.objects.get(session=session_key)
                cache.set(cache_key, cart)
            return cart
        except ObjectDoesNotExist:
            return None
Пример #2
0
    def test_cached_list(self):

        # Set up the test data.
        users = User.objects.all()[:10]
        user_cache = TestUserCachedList(users)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # Force it through the pickle cycle.
        user_cache = pickle.loads(pickle.dumps(user_cache))
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # The pickle size is greatly reduced. While making this test, it went
        # from 6377 bytes to 201 bytes. To avoid a brittle test, just check
        # that it's less that half the size.
        normal_pickle_size = len(pickle.dumps(users))
        improved_pickle_size = len(pickle.dumps(user_cache))
        self.assertTrue(improved_pickle_size < normal_pickle_size / 2.0)

        # Force it through the cache cycle.
        cache_key = 'apncore.cache.tests.test_cached_list'
        user_cache.cache(cache_key)
        user_cache = cache.get(cache_key)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # Delete the cached items, forcing the class to rebuild them.
        # The main list must be retrieved again to test unpacking its items.
        item_cache_keys = list(user_cache.make_cache_keys([user.pk for user in users]))
        cache.delete_many(item_cache_keys)
        user_cache = cache.get(cache_key)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])
Пример #3
0
def get_recommended_for_single_category(request, *args, **kwargs):
    category_id = request.GET.get('category_id')
    response = []
    if category_id:
        category = Category.objects.get(pk=category_id)
        member = request.user
        cache_key = member.email + ':recommended-' + category_id
        recommended = cache.get(cache_key)
        if not recommended:
            exclude_list_keys = cache.get(member.email + ':' + EXCLUDE_LIST_KEYS_KEY)
            exclude_list = []
            if not exclude_list_keys:
                exclude_list_keys = set()
            else:
                for key in exclude_list_keys:
                    items = cache.get(key)
                    if items:
                        exclude_list.extend(items)
            recommended = get_recommended_for_category(category, category.previews_length, exclude_list)
            exclude_list_keys.add(cache_key)
            cache.set(cache_key, recommended)
            cache.set(member.email + ':' + EXCLUDE_LIST_KEYS_KEY, exclude_list_keys)
        response = [item.to_dict() for item in recommended]
    return HttpResponse(
        json.dumps(response),
        'content-type: text/json'
    )
Пример #4
0
def check_setting(scope, scope_category, name):
    #check cache first
    keys = [d_settings.CACHE_PRE_KEY, SETTING_PRE_KEY, scope,
            scope_category, name]
    key = '.'.join(keys)

    setting = cache.get(key)
    if setting:
        return True

    missing_keys = [d_settings.CACHE_PRE_KEY, SETTING_PRE_KEY, scope,
            scope_category, name, "missing"]
    missing_key = '.'.join(missing_keys)

    missing = cache.get(missing_key)
    if not missing:
        #check the db if it is not in the cache
        exists = Setting.objects.filter(scope=scope,
            scope_category=scope_category, name=name).exists()

        #cache that it does not exist
        if not exists:
            #set to True to signify that it is missing so we do not
            #come back into this if statement and query db again
            is_set = cache.add(missing_key, True)
            if not is_set:
                cache.set(missing_key, True)

        return exists
    return False
Пример #5
0
def form_article(request, id_article=None):
	if request.method == "POST":
		request.POST['last_editor_id'] = cache.get('user_id')
		if id_article:
			article = Article.objects.get(id=id_article)
			form = ArticleForm(request.POST, instance=article)
		else:
			request.POST['author_id'] = cache.get('user_id')
			form = ArticleForm(request.POST)

		if form.is_valid():
			form.save()
			return redirect(request.META.get('HTTP_REFERER','/'))
	else: #GET
		id_article = request.GET.get('id_article', None)
		args = {}
		args.update(csrf(request))
		if id_article:
			article = Article.objects.get(id=id_article)
			args['id_article'] = id_article
			args['form'] = ArticleForm(instance=article)
		else:
			args['form'] = ArticleForm()

		return render_to_response('edit_article.html', args)
Пример #6
0
    def test_sitemap_cache(self):
        page1, page2, page3 = self.get_pages()
        PageSitemapProperties.objects.create(
            extended_object=page1, priority='0.2', changefreq='never'
        )
        PageSitemapProperties.objects.create(
            extended_object=page2, priority='0.8', changefreq='hourly'
        )
        page1.publish('it')
        page1 = page1.get_public_object()
        page2.publish('it')
        page2 = page2.get_public_object()
        sitemap = ExtendedSitemap()
        self.assertEqual(len(sitemap.items()), 6)
        for item in sitemap.items():
            if item.page.pk == page1.pk:
                self.assertEqual(sitemap.changefreq(item), 'never')
                self.assertEqual(sitemap.priority(item), Decimal('0.2'))
                ext_key = get_cache_key(item.page)
                self.assertEqual(cache.get(ext_key), item.page.pagesitemapproperties)
            if item.page.pk == page2.pk:
                self.assertEqual(sitemap.changefreq(item), 'hourly')
                self.assertEqual(sitemap.priority(item), Decimal('0.8'))

        ext_key = get_cache_key(page1)
        page1.pagesitemapproperties.save()
        self.assertEqual(cache.get(ext_key), None)

        ext_key = get_cache_key(page2)
        page2.delete()
        self.assertEqual(cache.get(ext_key), None)
Пример #7
0
def brand_tree(category=None):
    """
    Creates an unordered list of the brands.

    Example::

        <ul>
            <li>Books
                <ul>
                <li>Science Fiction
                    <ul>
                    <li>Space stories</li>
                    <li>Robot stories</li>
                    </ul>
                </li>
                <li>Non-fiction</li>
                </ul>
        </ul>
    """
    key = 'shop_tree_%s' % category.slug
    if cache.get(key):
        brands = cache.get(key)
    else:
        if category:
            brands = Brand.objects.filter(categories__slug=category.slug)
        else:
            brands = Brand.objects.all()
        cache.set(key, brands, 86000)
    return {"brands": brands, "category": category.slug}
Пример #8
0
def event_screencaptures(request, event):
    if event.status != Event.STATUS_INITIATED:
        return http.HttpResponseBadRequest(
            "Events NOT in the state of initiated."
        )
    upload = event.upload
    video_url = upload.url

    context = {}

    cache_key = 'fetching-{0}'.format(event.id)

    # This function sets the cache `fetching-{id}` before and after calling
    # those functions in the videoinfo module.
    # The reason is that those calls might take many many seconds
    # and the webapp might send async calls to the event_picture view
    # which will inform the webapp that the slow videoinfo processes
    # are running and thus that the webapp shouldn't kick if off yet.

    seconds = event.duration
    if not event.duration:
        # it's a poor man's lock
        if not cache.get(cache_key):
            cache.set(cache_key, True, 60)
            seconds = videoinfo.fetch_duration(
                event,
                video_url=video_url,
                save=True,
                verbose=settings.DEBUG
            )
            cache.delete(cache_key)
            event = Event.objects.get(id=event.id)
    context['seconds'] = seconds
    # The reason we can't use `if event.duration:` is because the
    # fetch_duration() does an inline-update instead of modifying
    # the instance object.
    no_pictures = Picture.objects.filter(event=event).count()
    if event.duration and not no_pictures:
        if not cache.get(cache_key):
            cache.set(cache_key, True, 60)
            event = Event.objects.get(id=event.id)
            no_pictures = videoinfo.fetch_screencapture(
                event,
                video_url=video_url,
                save=True,
                verbose=settings.DEBUG,
                set_first_available=not event.picture,
                import_immediately=True,
            )
            cache.delete(cache_key)
            event = Event.objects.get(id=event.id)
    if no_pictures and not event.picture:
        # no picture has been chosen previously
        pictures = Picture.objects.filter(event=event).order_by('created')[:1]
        for picture in pictures:
            event.picture = picture
            event.save()
            break
    context['no_pictures'] = no_pictures
    return context
Пример #9
0
 def testGetOrInsertObjectInCache(self):
     """
     Tests the get_or_insert_object_in_helpers.cache.
     """
     
     
     space_props = {'url': 'test_space', 'name': 'some_name'}
     #print Space.__class__.__name__
     space_key = cache_helper._get_cache_key_for_model(Space, 'test_space')
     expected = None
     actual = cache.get(space_key)
     self.assertEqual(expected, actual)
     
     space = Space(**space_props)
     space.save()
     expected = space
     actual = cache_helper.get_or_insert_object_in_cache(Space, 
                                                         space.url, url=space.url)
     self.assertEqual(expected, actual)
     
     cache.delete(space_key)
     self.assertEqual(cache.get(space_key), None)
     expected = space
     actual = cache_helper.get_or_insert_object_in_cache(Space, 
                                                         space.url, url=space.url)
     self.assertEqual(expected, actual)
     
Пример #10
0
def _slowed_down_user_ids():
    val = cache.get(SLOWDOWN_USER_IDS_CACHE_KEY)
    if val is None:
        _populate_cache()
        val = cache.get(SLOWDOWN_USER_IDS_CACHE_KEY)

    return val
Пример #11
0
 def test_data_caching(self):
     cache.clear()
     key = pypi.cache_key('django')
     package = pypi.Package('django')
     self.assertIsNone(cache.get(key))
     self.assertEqual(package.data(), self.test_data)
     self.assertEqual(cache.get(key), self.package.data())
Пример #12
0
def save(request):
    form = forms.SaveForm(request.POST)
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))
    url = form.cleaned_data['url']
    upload_time = form.cleaned_data['upload_time']
    cache_key = 'length_%s' % hashlib.md5(url).hexdigest()
    size = cache.get(cache_key)
    if not size:
        r = requests.head(url)
        size = int(r.headers['content-length'])
        if not size:
            return http.HttpResponseBadRequest('URL could not be downloaded')
    cache_key = 'file_name_%s' % hashlib.md5(url).hexdigest()
    file_name = cache.get(cache_key)
    if not file_name:
        file_name = os.path.basename(url)

    cache_key = 'mime_type_%s' % hashlib.md5(url).hexdigest()
    mime_type = cache.get(cache_key)

    new_upload = Upload.objects.create(
        user=request.user,
        url=url,
        size=size,
        file_name=file_name,
        mime_type=mime_type,
        upload_time=upload_time,
    )
    messages.info(
        request,
        'Upload saved.'
    )
    context = {'id': new_upload.pk, 'url': new_upload.url}
    if request.session.get('active_event'):
        event_id = request.session['active_event']
        event = Event.objects.get(pk=event_id)
        event.upload = new_upload
        event.save()
        new_upload.event = event
        new_upload.save()
        next_url = reverse('manage:event_archive', args=(event_id,))
        next_url += '#vidly-shortcutter'
        context['event'] = {
            'url': next_url,
            'title': event.title,
        }
    elif request.session.get('active_suggested_event'):
        event_id = request.session['active_suggested_event']
        event = SuggestedEvent.objects.get(pk=event_id)
        event.upload = new_upload
        event.save()
        new_upload.suggested_event = event
        new_upload.save()
        next_url = reverse('suggest:description', args=(event_id,))
        context['suggested_event'] = {
            'url': next_url,
            'title': event.title
        }
    return context
Пример #13
0
def get_tags_and_musics(tag_key, music_key):
    color_array, music_list, tag_list = [
        '0', '1', '2', '3', '4', '5', '6', '7',
        '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
    ], [], []
    if tag_key in cache:
        tag_list = cache.get(tag_key)
    else:
        tag_list = list(Tag.objects.all())
        for tag in tag_list:
            tag.color = '#' + ''.join(random.sample(color_array, 6))  # 为每个标签随机生成颜色
        random.shuffle(tag_list)  # random.shuffle()的返回值是none,改变的是原来的元素
        cache.set(tag_key, tag_list, CACHE_TIME)
    if music_key in cache:
        music_list = cache.get(music_key)
    else:
        musics = Music.objects.all()
        for item in musics:
            music_list.append({
                "name": item.name,
                "url": item.url,
                "cover": item.cover,
                "artist": item.artist,
                "lrc": item.lrc,
            })
        random.shuffle(music_list)
        cache.set(music_key, json.dumps(music_list[:3]), CACHE_TIME)

    return tag_list, music_list
Пример #14
0
    def test_equipmet_cache_update(self):
        '''
        Test that the template cache for the overview is correctly reseted when
        performing certain operations
        '''

        self.assertFalse(cache.get(get_template_cache_name('equipment-overview', 2)))

        self.client.get(reverse('exercise:equipment:overview'))
        self.client.get(reverse('exercise:exercise:view', kwargs={'id': 2}))

        old_overview = cache.get(get_template_cache_name('equipment-overview', 2))

        exercise = Exercise.objects.get(pk=2)
        exercise.name = 'Very cool exercise 2'
        exercise.description = 'New description'
        exercise.equipment.add(Equipment.objects.get(pk=2))
        exercise.save()

        self.assertFalse(cache.get(get_template_cache_name('equipment-overview', 2)))

        self.client.get(reverse('exercise:equipment:overview'))
        self.client.get(reverse('exercise:exercise:view', kwargs={'id': 2}))

        new_overview = cache.get(get_template_cache_name('equipment-overview', 2))

        self.assertNotEqual(old_overview, new_overview)
Пример #15
0
def get_global_count():

    count_cached_value = cache.get(GLOBAL_COUNT_CACHE_KEY, None)
    if count_cached_value:
        return count_cached_value

    try:
        old_site_reporter_counter_keys = cache.keys('org:*:reporters:old-site')

        cached_values = [cache.get(key) for key in old_site_reporter_counter_keys]

        # no old sites cache values, double check with a fetch
        if not cached_values:
            cached_values = fetch_old_sites_count()

        count = sum([elt['results'].get('size', 0) for elt in cached_values if elt.get('results', None)])

        for org in Org.objects.filter(is_active=True):
            if org.get_config('is_on_landing_page'):
                count += get_reporters_count(org)

        # cached for 10 min
        cache.set(GLOBAL_COUNT_CACHE_KEY, count, 60 * 10)
    except AttributeError:
        import traceback
        traceback.print_exc()
        count = '__'

    return count
 def get_topics_and_positions(self, election):
     cache_key = 'topics_and_positions_' + str(election.id)
     if cache.get(cache_key) is not None:
         return cache.get(cache_key)
     topics, positions = self._get_topics_and_positions(election)
     cache.set(cache_key, (topics, positions))
     return (topics, positions)
Пример #17
0
 def get_context_data(self):
     out = super(HomeView, self).get_context_data()
     out.update({
         'last_receive': cache.get('last_receive', None),
         'photos_count': cache.get('photos_count', 0),
     })
     return out
Пример #18
0
def billing(request):
    duration = 3600
    initial = {
        'months_total': '1',
        'types': ['Permanent - full-time','Permanent - part-time','Permanent - no working time'],
        'till': 'today',
        'exclude': '',
        }
    form = BillingForm(request.POST or None, initial=initial)
    cleaned_data = form.data
    if request.POST:
        form.full_clean()
        cleaned_data = form.cleaned_data
    else:
        cleaned_data = form.initial

    formhash = hashlib.md5(unicode(cleaned_data)).hexdigest()
    key = '{}_{}'.format('billing', formhash)
    key_updated = '{}_updated'.format(key)
    result = cache.get(key)

    if result is None:
        cache.set(key, {}, duration)#poor-man's stampede protection
        only_this_month = cleaned_data.get('only_this_month')
        months = []
        if only_this_month:
            only_this_month = parse_date(only_this_month)
            only_this_month = only_this_month.replace(day=1)
            months = [only_this_month]

        result = find_missing_hour_markings(
                n=int(cleaned_data.get('months_total')),
                wanted_user_types=cleaned_data.get('types'),
                till=cleaned_data.get('till'),
                exclude_usernames=(cleaned_data.get('exclude') or '').split(','),
                only_this_month=only_this_month,
                months=months,
                )
        cache.set(key, result, duration)
        cache.set(key_updated, dtnow(), duration)

    # group hours by year, month
    for tribe, users in result.iteritems():
        for user_index, user in enumerate(users):
            fmt_days = {}
            for day in user[2]:
                fmt_days.setdefault(day.year, {})
                month_human = calendar.month_name[day.month]
                fmt_days[day.year].setdefault(month_human, [])
                fmt_days[day.year][month_human].append(day)
            result[tribe][user_index][2] = fmt_days

    updated = cache.get(key_updated) or dtnow()
    context = {
        'updated': int((dtnow()-updated).total_seconds()/60),
        'result': result,
        'update_interval': int(duration/60),
        'form': form,
    }
    return render(request, 'billing.html', context)
Пример #19
0
def analyze_model_fetches(request):
    context = {}
    measurements = []
    for label, value_type in (('API', 'classes'), ('URLS', 'urls')):
        all = cache.get('all_%s' % value_type) or []
        records = []
        for item in all:
            itemkey = hashlib.md5(item.encode('utf-8')).hexdigest()

            data = {}
            data['times'] = {}
            data['times']['hits'] = cache.get('times_HIT_%s' % itemkey, 0)
            data['times']['misses'] = cache.get('times_MISS_%s' % itemkey, 0)
            data['times']['both'] = (
                data['times']['hits'] + data['times']['misses']
            )
            data['uses'] = {}
            data['uses']['hits'] = cache.get('uses_HIT_%s' % itemkey, 0)
            data['uses']['misses'] = cache.get('uses_MISS_%s' % itemkey, 0)
            data['uses']['both'] = (
                data['uses']['hits'] + data['uses']['misses']
            )
            records.append((item, data))
        measurements.append([label, value_type, records])
    context['measurements'] = measurements
    return render(request, 'manage/analyze-model-fetches.html', context)
Пример #20
0
def retrieve(host, path, params):
    """Get an API document from our cache."""
    cacheKey = "%s%s%s" % (host, path, params)
    if cache.get(hash(cacheKey)):
        return pickle.loads(zlib.decompress(cache.get(hash(cacheKey))))
    else:
        return None
Пример #21
0
def get_forum_info():
    #请使用缓存
    oneday = timedelta(days=1)
    today = now().date()
    lastday = today - oneday
    todayend = today + oneday
    post_number = Post.objects.count()
    account_number = LoginUser.objects.count()

    lastday_post_number = cache.get('lastday_post_number', None)
    today_post_number = cache.get('today_post_number', None)

    if lastday_post_number is None:
        lastday_post_number = Post.objects.filter(created_at__range=[lastday,today]).count()
        cache.set('lastday_post_number',lastday_post_number,60*60)
    
    if today_post_number is None:
        today_post_number = Post.objects.filter(created_at__range=[today,todayend]).count()
        cache.set('today_post_number',today_post_number,60*60)

    info = {"post_number":post_number,
		"account_number":account_number,
		"lastday_post_number":lastday_post_number,
		"today_post_number":today_post_number}
    return info
Пример #22
0
 def test_individual_rendition_cache_clear(self):
     """
     Test that VersatileImageField can clear cache entries for individual
     renditions.
     """
     expected_image_url = (
         '/media/__sized__/python-logo-delete-test-thumbnail-100x100.jpg'
     )
     self.assertEqual(
         cache.get(expected_image_url),
         None
     )
     img = self.delete_test
     img.image.create_on_demand = True
     img_url = img.image.thumbnail['100x100'].url
     del img_url
     self.assertEqual(
         cache.get(expected_image_url),
         1
     )
     img.image.thumbnail['100x100'].delete()
     self.assertEqual(
         cache.get(expected_image_url),
         None
     )
     self.assertFalse(
         img.image.field.storage.exists(
             '__sized__/python-logo-delete-test-thumbnail-100x100.jpg'
         )
     )
Пример #23
0
    def similar_beers_by_reviews(self):
        """
    Returns a cached list of beers similar to this one, based on reviews.
    i.e. "People who liked this beer also liked..."
    """
        # UNCACHED VERSION
        # if self.rating:
        #   return [recommendation[1] for recommendation in Recommender.objects.get_similar_items(self, User.objects.all(), Beer.objects.filter(rating__isnull=False))]
        # else:
        #   return []

        # CACHED VERSION.
        if self.rating:
            cache_key = slugify(u"similar_beers_by_reviews_%s" % self.slug)
            similar_beers_by_reviews = cache.get(cache_key)
            if similar_beers_by_reviews == []:
                return similar_beers_by_reviews
            if not similar_beers_by_reviews:
                cache.add(
                    cache_key,
                    [
                        recommendation[1]
                        for recommendation in Recommender.objects.get_similar_items(
                            self, User.objects.all(), Beer.objects.filter(rating__isnull=False)
                        )
                    ],
                    7200,
                )
                similar_beers_by_reviews = cache.get(cache_key)
            return similar_beers_by_reviews
        else:
            return []
Пример #24
0
 def favorite_varieties(self):
     """
 An algorithmically generated list of a user's favorite beer categories.
 """
     cache_key = slugify(u"favorite_varieties_%s" % self.__unicode__())
     favorite_varieties = cache.get(cache_key)
     if favorite_varieties == []:
         return favorite_varieties
     if not favorite_varieties:
         faves = self.user.faves.filter(withdrawn=False)
         reviews = self.user.review_created.all()
         varieties = Category.objects.all()
         favorite_varieties = {}
         for fave in faves:
             if not favorite_varieties.has_key(fave.content_object.variety):
                 favorite_varieties[fave.content_object.variety] = 5
             favorite_varieties[fave.content_object.variety] = favorite_varieties[fave.content_object.variety] + 5
         for review in reviews:
             if not favorite_varieties.has_key(review.beer.variety):
                 favorite_varieties[review.beer.variety] = 1
             if review.rating > 80:
                 favorite_varieties[review.beer.variety] = favorite_varieties[review.beer.variety] + 5
             elif review.rating > 60:
                 favorite_varieties[review.beer.variety] = favorite_varieties[review.beer.variety] + 4
             elif review.rating > 40:
                 favorite_varieties[review.beer.variety] = favorite_varieties[review.beer.variety] + 3
             elif review.rating > 20:
                 favorite_varieties[review.beer.variety] = favorite_varieties[review.beer.variety] + 2
             else:
                 favorite_varieties[review.beer.variety] = favorite_varieties[review.beer.variety] + 1
         items = [(value, key) for key, value in favorite_varieties.items()]
         items.sort(reverse=True)
         cache.add(cache_key, [item[1] for item in items], 28800)
         favorite_varieties = cache.get(cache_key)
     return favorite_varieties
Пример #25
0
    def recommended_for_users(self):
        """
    Returns a cached list of users that this beer is recommended for.
    """
        # UNCACHED VERSION
        # if self.rating:
        #   return Recommender.objects.get_best_users_for_item(self, User.objects.all(), Beer.objects.filter(rating__isnull=False))
        # else:
        #   return []

        # CACHED VERSION.
        if self.rating:
            cache_key = slugify(u"recommended_for_users_%s" % self.slug)
            recommended_for_users = cache.get(cache_key)
            if recommended_for_users == []:
                return recommended_for_users
            if not recommended_for_users:
                cache.add(
                    cache_key,
                    Recommender.objects.get_best_users_for_item(
                        self, User.objects.all(), Beer.objects.filter(rating__isnull=False)
                    ),
                    7200,
                )
                recommended_for_users = cache.get(cache_key)
            return recommended_for_users
        else:
            return []
Пример #26
0
    def similar_users(self, values=False):
        """
    Returns a cached list of similar users for this user.
    """
        # UNCACHED VERSION
        # if values:
        #   Recommender.objects.get_similar_users(self.user, User.objects.all(), Beer.objects.filter(rating__isnull=False))
        # else:
        #   return [ item[1] for item in Recommender.objects.get_similar_users(self.user, User.objects.all(), Beer.objects.filter(rating__isnull=False))]

        # CACHED VERSION.
        cache_key = slugify(u"similar_users_%s" % self.__unicode__())
        similar_users = cache.get(cache_key)
        if similar_users == []:
            return similar_users
        if not similar_users:
            similar_users = Recommender.objects.get_similar_users(
                self.user, User.objects.all(), Beer.objects.filter(rating__isnull=False)
            )
            cache.add(cache_key, similar_users, 7200)
            similar_users = cache.get(cache_key)
        if values:
            return similar_users
        else:
            return [item[1] for item in similar_users]
Пример #27
0
    def recommended_beers_by_tags(self, values=False):
        """
    Returns a cached list of recommended beers, based on tags. 
    """
        # UNCACHED VERSION
        # if values:
        #   return Recommender.objects.get_content_based_recs(self.user, Beer.objects.filter(rating__isnull=False))
        # else:
        #   return [ item[1] for item in Recommender.objects.get_content_based_recs(self.user, Beer.objects.filter(rating__isnull=False))]

        # CACHED VERSION.
        cache_key = slugify(u"recommended_beers_by_tags_%s" % self.__unicode__())
        recommended_beers_by_tags = cache.get(cache_key)
        if recommended_beers_by_tags == []:
            return recommended_beers_by_tags
        if not recommended_beers_by_tags:
            recommended_beers_by_tags = Recommender.objects.get_content_based_recs(
                self.user, Beer.objects.filter(rating__isnull=False)
            )
            cache.add(cache_key, recommended_beers_by_tags, 7200)
            recommended_beers_by_tags = cache.get(cache_key)
        if values:
            return recommended_beers_by_tags
        else:
            faves_list = [fave.content_object for fave in Fave.active_objects.filter(user=self.user)]
            return [item[1] for item in recommended_beers_by_tags if item[1] not in faves_list]
Пример #28
0
 def test_create_on_demand_functionality(self):
     """Ensures create_on_demand functionality works as advertised"""
     jpg = VersatileImageTestModel.objects.get(img_type='jpg')
     img_url = jpg.image.crop['100x100'].url
     self.assertEqual(
         cache.get(img_url),
         None
     )
     jpg.image.create_on_demand = True
     jpg.image.crop['100x100'].url
     self.assertEqual(
         cache.get(img_url),
         1
     )
     self.assertTrue(
         jpg.image.field.storage.exists(jpg.image.crop['100x100'].name)
     )
     jpg.image.field.storage.delete(jpg.image.crop['100x100'].name)
     self.assertFalse(
         jpg.image.field.storage.exists(jpg.image.crop['100x100'].name)
     )
     cache.delete(img_url)
     self.assertEqual(
         cache.get(img_url),
         None
     )
Пример #29
0
    def test_jinja_cache_tag_queryset(self):
        env = jinja2.Environment(extensions=['caching.ext.cache'])
        def check(q, expected):
            t = env.from_string(
                "{% cache q %}{% for x in q %}{{ x.id }}:{{ x.val }};"
                "{% endfor %}{% endcache %}")
            eq_(t.render(q=q), expected)

        # Get the template in cache, then hijack iterator to make sure we're
        # hitting the cached fragment.
        check(Addon.objects.all(), '1:42;2:42;')
        qs = Addon.objects.all()
        qs.iterator = mock.Mock()
        check(qs, '1:42;2:42;')
        assert not qs.iterator.called

        # Make changes, make sure we dropped the cached fragment.
        a = Addon.objects.get(id=1)
        a.val = 17
        a.save()

        q = Addon.objects.all()
        flush = cache.get(q.flush_key())
        assert cache.get(q.flush_key()) is None

        check(Addon.objects.all(), '1:17;2:42;')
        qs = Addon.objects.all()
        qs.iterator = mock.Mock()
        check(qs, '1:17;2:42;')
Пример #30
0
def _generate_waffle_js(request):
    flags = cache.get(keyfmt(FLAGS_ALL_CACHE_KEY))
    if not flags:
        flags = Flag.objects.values_list('name', flat=True)
        cache.add(keyfmt(FLAGS_ALL_CACHE_KEY), flags)
    flag_values = [(f, flag_is_active(request, f)) for f in flags]

    switches = cache.get(keyfmt(SWITCHES_ALL_CACHE_KEY))
    if not switches:
        switches = Switch.objects.values_list('name', 'active')
        cache.add(keyfmt(SWITCHES_ALL_CACHE_KEY), switches)

    samples = cache.get(keyfmt(SAMPLES_ALL_CACHE_KEY))
    if not samples:
        samples = Sample.objects.values_list('name', flat=True)
        cache.add(keyfmt(SAMPLES_ALL_CACHE_KEY), samples)
    sample_values = [(s, sample_is_active(s)) for s in samples]

    flag_default = getattr(settings, 'WAFFLE_FLAG_DEFAULT', False)
    switch_default = getattr(settings, 'WAFFLE_SWITCH_DEFAULT', False)
    sample_default = getattr(settings, 'WAFFLE_SAMPLE_DEFAULT', False)

    return loader.render_to_string('waffle/waffle.js',
                              {
                                'flags': flag_values,
                                'switches': switches,
                                'samples': sample_values,
                                'flag_default': flag_default,
                                'switch_default': switch_default,
                                'sample_default': sample_default,
                              })
Пример #31
0
def cache_is_hash_taken(hash: str) -> bool:
    return cache.get('hash:taken:{}'.format(hash)) is not None
Пример #32
0
    def render(self, context):
        if self.is_variable:
            real_key = template.Variable(self.key).resolve(context)
        else:
            real_key = self.key

        if isinstance(self.template_name, template.Variable):
            real_tpl = self.template_name.resolve(context)
        else:
            real_tpl = self.template_name

        context['chunk_key'] = real_key
        if self.content_type == 'edit':
            context['tag'] = self.tag
        sources = dict(text=Chunk,
                       edit=Chunk,
                       image=Image,
                       media=Media,
                       group=Group)
        model = sources[self.content_type]

        obj = None
        # try to get cached object
        if self.cache_time > 0:
            cache_key = CACHE_PREFIX + self.content_type + get_language(
            ) + real_key
            obj = cache.get(cache_key)
        # otherwise get it from database
        if obj is None:
            if self.content_type == 'group':
                obj = model.objects.filter(key=real_key)
            else:
                try:
                    obj = model.objects.get(key=real_key)
                except model.DoesNotExist:
                    # this place we should create an empty object in database
                    obj = model(key=real_key)
                    if self.content_type == 'image':
                        # image object must exist, so save the stub picture
                        filename = join(dirname(__file__), '..', 'static',
                                        'chunks', 'stub.png')
                        with open(filename, 'r') as file:
                            obj.image.save(basename(filename),
                                           File(file),
                                           save=True)
                    else:
                        obj.content = real_key
                        obj.save()

            # cache the object
            if self.cache_time == 0:
                logger.debug("Don't cache %s" % (real_key, ))
            else:
                if self.cache_time is None or self.cache_time == 'None':
                    logger.debug("Caching %s for the cache's default timeout" %
                                 real_key)
                    cache.set(cache_key, obj)
                else:
                    logger.debug("Caching %s for %s seconds" %
                                 (real_key, str(self.cache_time)))
                    cache.set(cache_key, obj, int(self.cache_time))

        # Eventually we want to pass the whole context to the template so that
        # users have the maximum of flexibility of what to do in there.
        if self.with_template:
            new_ctx = template.Context(context)
            if hasattr(obj, 'content'):
                obj.content = Template(obj.content).render(new_ctx)
            new_ctx.update({'obj': obj})
            tpl = template.loader.get_template(real_tpl)
            return tpl.render(new_ctx)
        elif hasattr(obj, 'image'):
            return obj.image.url
        elif hasattr(obj, 'content'):
            return obj.content
        else:
            return None
Пример #33
0
 def last_seen(self):
     """
     Get last seen
     """
     return cache.get('seen_%s' % self.iduser.username)
Пример #34
0
 def repost_count(self):
     if not hasattr(self, "_repost_count"):
         self._repost_count = cache.get(self.cache_key("repost_count"))
         if self._repost_count is None:
             self.cache_repost_count()
     return self._repost_count
Пример #35
0
    def test_overview_cache_update(self):
        '''
        Test that the template cache for the overview is correctly reseted when
        performing certain operations
        '''
        self.assertFalse(cache.get(cache_mapper.get_exercise_key(2)))
        self.assertFalse(cache.get(cache_mapper.get_exercise_muscle_bg_key(2)))
        self.assertFalse(
            cache.get(get_template_cache_name('muscle-overview', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('muscle-overview-mobile', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('muscle-overview-search', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-overview', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-detail-header', 2, 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-detail-muscles', 2,
                                              2)))

        self.client.get(reverse('exercise:exercise:overview'))
        self.client.get(reverse('exercise:exercise:view', kwargs={'id': 2}))

        old_exercise = cache.get(cache_mapper.get_exercise_key(2))
        old_exercise_bg = cache.get(cache_mapper.get_exercise_muscle_bg_key(2))
        old_muscle_overview = cache.get(
            get_template_cache_name('muscle-overview', 2))
        old_exercise_overview = cache.get(
            get_template_cache_name('exercise-overview', 2))
        old_exercise_overview_mobile = cache.get(
            get_template_cache_name('exercise-overview-mobile', 2))
        old_detail_header = cache.get(
            get_template_cache_name('exercise-detail-header', 2, 2))
        old_detail_muscles = cache.get(
            get_template_cache_name('exercise-detail-muscles', 2, 2))

        exercise = Exercise.objects.get(pk=2)
        exercise.name = 'Very cool exercise 2'
        exercise.description = 'New description'
        exercise.muscles_secondary.add(Muscle.objects.get(pk=2))
        exercise.save()

        self.assertFalse(cache.get(cache_mapper.get_exercise_key(2)))
        self.assertFalse(cache.get(cache_mapper.get_exercise_muscle_bg_key(2)))
        self.assertFalse(
            cache.get(get_template_cache_name('muscle-overview', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-overview', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-overview-mobile', 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-detail-header', 2, 2)))
        self.assertFalse(
            cache.get(get_template_cache_name('exercise-detail-muscles', 2,
                                              2)))

        self.client.get(reverse('exercise:exercise:overview'))
        self.client.get(reverse('exercise:muscle:overview'))
        self.client.get(reverse('exercise:exercise:view', kwargs={'id': 2}))

        new_exercise = cache.get(cache_mapper.get_exercise_key(2))
        new_exercise_bg = cache.get(cache_mapper.get_exercise_muscle_bg_key(2))
        new_muscle_overview = cache.get(
            get_template_cache_name('muscle-overview', 2))
        new_exercise_overview = cache.get(
            get_template_cache_name('exercise-overview', 2))
        new_exercise_overview_mobile = cache.get(
            get_template_cache_name('exercise-overview-mobile', 2))
        new_detail_header = cache.get(
            get_template_cache_name('exercise-detail-header', 2, 2))
        new_detail_muscles = cache.get(
            get_template_cache_name('exercise-detail-muscles', 2, 2))

        self.assertNotEqual(old_exercise.name, new_exercise.name)
        if not self.is_mobile:
            self.assertNotEqual(old_exercise_bg, new_exercise_bg)
            self.assertNotEqual(old_exercise_overview, new_exercise_overview)
            self.assertNotEqual(old_muscle_overview, new_muscle_overview)
            self.assertNotEqual(old_detail_header, new_detail_header)
            self.assertNotEqual(old_detail_muscles, new_detail_muscles)
        else:
            self.assertNotEqual(old_exercise_overview_mobile,
                                new_exercise_overview_mobile)
Пример #36
0
 def test_already_queued(self, switch_is_active, delay):
     switch_is_active.return_value = True
     cache.set(settings.WIKI_REBUILD_TOKEN, True)
     schedule_rebuild_kb()
     assert cache.get(settings.WIKI_REBUILD_TOKEN)
     assert not delay.called
Пример #37
0
 def test_task_queue(self, switch_is_active, delay):
     switch_is_active.return_value = True
     schedule_rebuild_kb()
     assert cache.get(settings.WIKI_REBUILD_TOKEN)
     assert delay.called
Пример #38
0
def get_instance(model, instance_or_pk, timeout=None, using=None):
    """
    Returns the ``model`` instance with a primary key of ``instance_or_pk``.

    If the data is cached it will be returned from there, otherwise the regular
    Django ORM is queried for this instance and the data stored in the cache.

    If omitted, the timeout value defaults to
    ``settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT`` instead of 0 (zero).

    Example::

        >>> get_instance(User, 1) # Cache miss
        <User: lamby>
        >>> get_instance(User, 1) # Cache hit
        <User: lamby>
        >>> User.objects.get(pk=1) == get_instance(User, 1)
        True

    """
    pk = getattr(instance_or_pk, 'pk', instance_or_pk)
    key = instance_key(model, instance_or_pk)
    data = cache.get(key)

    if data is not None:
        try:
            # Try and construct instance from dictionary
            instance = model(pk=pk, **data)

            # Ensure instance knows that it already exists in the database,
            # otherwise we will fail any uniqueness checks when saving the
            # instance.
            instance._state.adding = False

            # Specify database so that instance is setup correctly. We don't
            # namespace cached objects by their origin database, however.
            instance._state.db = using or DEFAULT_DB_ALIAS

            return instance
        except:
            # Error when deserialising - remove from the cache; we will
            # fallback and return the underlying instance
            cache.delete(key)

    # Use the default manager so we are never filtered by a .get_queryset()


#    import logging
#    log = logging.getLogger("tracking")
#    log.info( str(pk) )

    instance = model._default_manager.using(using).get(pk=pk)

    data = {}
    for field in instance._meta.fields:
        # Harmless to save, but saves space in the dictionary - we already know
        # the primary key when we lookup
        if field.primary_key:
            continue

        if field.get_internal_type() == 'FileField':
            # Avoid problems with serializing FileFields
            # by only serializing the file name
            file = getattr(instance, field.attname)
            data[field.attname] = file.name
        else:
            data[field.attname] = getattr(instance, field.attname)

    if timeout is None:
        timeout = app_settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT

    cache.set(key, data, timeout)

    return instance
Пример #39
0
 def cache_get(key):
     return cache.get(key)
Пример #40
0
def cache_get_short_url(hash: str) -> str:
    return cache.get('short:url:{}'.format(hash))
Пример #41
0
def search(request):

    start_time = time.time()

    body = request.data

    player_name = body.get('player_name', None)
    platform = body.get('platform', None)

    player_response_cache_key = api_settings.PLAYER_RESPONSE_CACHE_KEY.format(
        player_name, platform)

    cached_player_response = cache.get(player_response_cache_key, None)

    if cached_player_response and 'data' in cached_player_response:
        return Response(cached_player_response)

    player_request_cache_key = api_settings.PLAYER_REQUEST_CACHE_KEY.format(
        player_name, platform)
    player_platform_url_cache_key = api_settings.PLAYER_PLATFORM_URL_CACHE_KEY.format(
        player_name, platform)
    player_player_url_cache_key = api_settings.PLAYER_URL_CACHE_KEY.format(
        player_name, platform)

    cached_platform_url = cache.get(player_platform_url_cache_key, None)
    cached_player_request = cache.get(player_request_cache_key, None)

    if not cached_platform_url:
        platform_url = build_url(platform)
        cache.set(player_platform_url_cache_key, platform_url, 60 * 30)
    else:
        platform_url = cached_platform_url

    cached_player_url = cache.get(player_player_url_cache_key, None)

    if not cached_player_url:
        player_url = build_player_url(base_url=platform_url,
                                      player_name=player_name)
        cache.set(player_player_url_cache_key, player_url, 60 * 30)
    else:
        player_url = cached_player_url

    if not cached_player_request or 'data' not in cached_player_request:
        player_request = make_request(player_url)

        if 'data' not in player_request:
            potential_current_player = Participant.objects.filter(
                player_name=player_name)
            if potential_current_player.exists():
                potential_current_player = potential_current_player.first()
                player_url = potential_current_player.player.api_url
                player_request = make_request(player_url)

        cache.set(cached_player_request, player_request, 120)
    else:
        player_request = cached_player_request

    ajax_data = {}

    player_data_length = 0

    if 'data' in player_request:

        api_ids = list(
            set(Match.objects.values_list('api_id', flat=True).distinct()))

        if isinstance(player_request['data'], list):
            player_id = player_request['data'][0]['id']
            player_data_length = (len(
                player_request['data'][0]['relationships']['matches']['data']
            ), [
                match['id'] for match in player_request['data'][0]
                ['relationships']['matches']['data']
                if get_player_match_id(player_id, match['id']) not in api_ids
            ])
        else:
            player_id = player_request['data']['id']
            player_data_length = (len(
                player_request['data']['relationships']['matches']['data']), [
                    match['id']
                    for match in player_request['data']['relationships']
                    ['matches']['data'] if get_player_match_id(
                        player_id, match['id']) not in api_ids
                ])

        if player_data_length[0] > 0:

            ajax_data['player_id'] = player_id
            ajax_data['player_name'] = player_name

            length_of_matches = len(player_data_length[1])

            if length_of_matches > 0:

                player_currently_processing_cache_key = api_settings.PLAYER_CURRENTLY_PROCESSING_CACHE_KEY.format(
                    player_id, platform)
                currently_processing = cache.get(
                    player_currently_processing_cache_key, None)

                if not currently_processing:
                    cache.set(player_currently_processing_cache_key, True, 60)
                    ajax_data['currently_processing'] = True
                    thread = threading.Thread(target=get_player_matches,
                                              kwargs={
                                                  'platform_url': platform_url,
                                                  'player_response':
                                                  player_request
                                              })
                    thread.daemon = True
                    thread.start()
                else:
                    ajax_data['currently_processing'] = False

            else:
                ajax_data[
                    'message'] = "No new matches to process for this user."
                ajax_data['no_new_matches'] = True
                if cached_player_url:
                    cache.touch(player_player_url_cache_key, 120)
                if cached_platform_url:
                    cache.touch(player_platform_url_cache_key, 120)
                if cached_player_request:
                    cache.touch(player_request_cache_key, 120)

        else:
            ajax_data[
                'error'] = "Sorry, looks like this player has not played any matches in the last 14 days."

    else:
        ajax_data['error'] = "Sorry, looks like this player does not exist."

    cache.set(player_response_cache_key, ajax_data, 120)
    return Response(ajax_data)
Пример #42
0
    def get_or_create_list(self, key, paramdict, forcerender=True):
        #returns list of rendered objs
        cache = memcache.get(key)
        if cache is not None and not forcerender:
            cached_list = cache[0]
            tot_items = cache[1]
        elif cache is None or forcerender:
            if paramdict == {}:
                key, rtype, paramdict = interpret_hash(key)
            ctype_id = paramdict.get('TYPE_KEY', None)
            obj_id = paramdict.get('OBJ_KEY', None)
            start = paramdict.get('START_KEY', None)
            end = paramdict.get('END_KEY', None)
            dimension = paramdict.get('DIM_KEY', None)
            ctype_list = paramdict.get('CTYPE_KEY', None)
            phasekey = paramdict.get('PHASE_KEY', None)

            if ctype_id is not None and obj_id is not None:
                content_type = ContentType.objects.get(pk=ctype_id)
                parent = content_type.get_object_for_this_type(pk=obj_id)
            else:
                parent = None

            if start is None or end is None:
                paramdict['START_KEY'] = 0
                paramdict['END_KEY'] = 10

            if dimension is None:
                dimension = 'h'
                paramdict['DIM_KEY'] = 'hn'

            #later these functions can be rendered via some loosely coupled method
            if self.template == 'issues':
                func = get_ranked_list
                update = True
            elif self.template == 'comments':
                func = get_comments
                update = False
            elif self.template == 'yea':
                func = get_argument_list
                dimension = "yea"
                update = False
            elif self.template == 'nay':
                func = get_argument_list
                dimension = "nay"
                update = False
            elif self.template == 'children':
                func = get_ranked_list
                update = False
            elif self.template == 'topics':
                func = get_topics
                update = True
            elif self.template == 'users':
                func = get_users
                update = True
            else:
                func = get_ranked_list
                update = False
            #TODO
            #elif self.template == 'users':
            #    func = get_topics
            #    update = True

            kwr = {
                'parent': parent,
                'start': paramdict['START_KEY'],
                'end': paramdict['END_KEY'],
                'dimension': dimension,
                'ctype_list': ctype_list
            }
            if phasekey is not None:
                kwr['phase'] = phasekey
            cached_list, tot_items = func(**kwr)
            if update:
                codes = memcache.get("rank_update_codes")
                #stores all the encoded pages for tasks/update_ranks
                newkey, rendertype, paramdict = interpret_hash(key)
                if codes is not None:
                    codes[key] = paramdict
                    memcache.set("rank_update_codes", codes)
                else:
                    codes = {}
                    memcache.set("rank_update_codes", codes)
                #save newly rendered list
            memcache.set(key, (cached_list, tot_items))
        return cached_list, tot_items
Пример #43
0
def retrieve_season_stats(request):

    body = request.data

    player_id = body.get('player_id', None)
    ranked = body.get('ranked', None)

    if ranked == 'true':
        is_ranked = True
    else:
        is_ranked = False

    if is_ranked:
        season_stats_cache_key = api_settings.PLAYER_RANKED_SEASON_STATS_CACHE_KEY.format(
            player_id)
    else:
        season_stats_cache_key = api_settings.PLAYER_SEASON_STATS_CACHE_KEY.format(
            player_id)

    cached_ajax_data = cache.get(season_stats_cache_key, None)

    if cached_ajax_data:
        return Response(cached_ajax_data)

    platform = body.get('platform', None)

    player = get_object_or_404(Player, api_id=player_id)

    retrieve_player_season_stats(player_id, platform, is_ranked)

    all_game_modes = list(
        set(
            PlayerSeasonStats.objects.filter(
                mode__icontains='squad').values_list('mode', flat=True)))

    season_stats_queryset = PlayerSeasonStats.objects.filter(
        player=player,
        season__is_current=True,
        season__platform=platform,
        is_ranked=is_ranked).select_related('season')

    modes_not_added = []

    if is_ranked:

        ajax_data = [{
            f"ranked_{x.mode.lower().replace('-', '_')}_season_stats":
            correct_mode(x.mode.replace('_', ' ')).upper(),
            f"ranked_{x.mode.lower().replace('-', '_')}_season_matches":
            "{} {}".format(x.rounds_played, 'Matches Played'),
            f"ranked_{x.mode.lower().replace('-', '_')}_season_kills__text":
            'Kills',
            f"ranked_{x.mode.lower().replace('-', '_')}_season_kills__figure":
            x.kills,
            f"ranked_{x.mode.lower().replace('-', '_')}_season_damage__text":
            'Damage Dealt',
            f"ranked_{x.mode.lower().replace('-', '_')}_season_damage__figure":
            str(x.damage_dealt),
            f"ranked_{x.mode.lower().replace('-', '_')}_season_longest_kill__text":
            'Longest Kill',
            f"ranked_{x.mode.lower().replace('-', '_')}_season_longest_kill__figure":
            str(x.longest_kill),
            f"ranked_{x.mode.lower().replace('-', '_')}_season_headshots__text":
            'Headshot kills',
            f"ranked_{x.mode.lower().replace('-', '_')}_season_headshots__figure":
            x.headshot_kills
        } for x in season_stats_queryset]

    else:

        ajax_data = [{
            f"{x.mode.lower().replace('-', '_')}_season_stats":
            correct_mode(x.mode.replace('_', ' ')).upper(),
            f"{x.mode.lower().replace('-', '_')}_season_matches":
            "{} {}".format(x.rounds_played, 'Matches Played'),
            f"{x.mode.lower().replace('-', '_')}_season_kills__text":
            'Kills',
            f"{x.mode.lower().replace('-', '_')}_season_kills__figure":
            x.kills,
            f"{x.mode.lower().replace('-', '_')}_season_damage__text":
            'Damage Dealt',
            f"{x.mode.lower().replace('-', '_')}_season_damage__figure":
            str(x.damage_dealt),
            f"{x.mode.lower().replace('-', '_')}_season_longest_kill__text":
            'Longest Kill',
            f"{x.mode.lower().replace('-', '_')}_season_longest_kill__figure":
            str(x.longest_kill),
            f"{x.mode.lower().replace('-', '_')}_season_headshots__text":
            'Headshot kills',
            f"{x.mode.lower().replace('-', '_')}_season_headshots__figure":
            x.headshot_kills
        } for x in season_stats_queryset]

    if len(ajax_data) < 6:
        modes_not_added = []
        for x in all_game_modes:
            for y in ajax_data:
                if is_ranked:
                    dict_key = f"ranked_{x.lower().replace('-', '_')}_season_stats"
                else:
                    dict_key = f"{x.lower().replace('-', '_')}_season_stats"

                if dict_key not in y:
                    modes_not_added.append(x)

        if ranked:
            ajax_data += [{
                'container':
                f"ranked_{x.lower().replace('-', '_')}_season_stats_container",
                'text':
                f"No ranked data available for {correct_mode(x.replace('_', ' ')).upper()}"
            } for x in modes_not_added]
        else:
            ajax_data += [{
                'container':
                f"{x.lower().replace('-', '_')}_season_stats_container",
                'text':
                f"No data available for {correct_mode(x.replace('_', ' ')).upper()}"
            } for x in modes_not_added]

    cache.set(season_stats_cache_key, ajax_data, 60 * 20)

    return Response(ajax_data)
Пример #44
0
 def cache_get(self, key, force=False):
     '''
     Gets value from django cache.
     '''
     from django.core.cache import cache
     return cache.get(self.cache_key(key))
Пример #45
0
def footer(request):
    """Retrieve the branded footer.

    This end-point provides information about the site footer,
    allowing for consistent display of the footer across other sites
    (for example, on the marketing site and blog).

    It can be used in one of two ways:
    1) A client renders the footer from a JSON description.
    2) A browser loads an HTML representation of the footer
        and injects it into the DOM.  The HTML includes
        CSS and JavaScript links.

    In case (2), we assume that the following dependencies
    are included on the page:
    a) JQuery (same version as used in edx-platform)
    b) font-awesome (same version as used in edx-platform)
    c) Open Sans web fonts

    Example: Retrieving the footer as JSON

        GET /api/branding/v1/footer
        Accepts: application/json

        {
            "navigation_links": [
                {
                  "url": "http://example.com/about",
                  "name": "about",
                  "title": "About"
                },
                # ...
            ],
            "social_links": [
                {
                    "url": "http://example.com/social",
                    "name": "facebook",
                    "icon-class": "fa-facebook-square",
                    "title": "Facebook",
                    "action": "Sign up on Facebook!"
                },
                # ...
            ],
            "mobile_links": [
                {
                    "url": "http://example.com/android",
                    "name": "google",
                    "image": "http://example.com/google.png",
                    "title": "Google"
                },
                # ...
            ],
            "legal_links": [
                {
                    "url": "http://example.com/terms-of-service.html",
                    "name": "terms_of_service",
                    "title': "Terms of Service"
                },
                # ...
            ],
            "openedx_link": {
                "url": "http://open.edx.org",
                "title": "Powered by Open edX",
                "image": "http://example.com/openedx.png"
            },
            "logo_image": "http://example.com/static/images/logo.png",
            "copyright": "EdX, Open edX and their respective logos are trademarks or registered trademarks of edX Inc."
        }


    Example: Retrieving the footer as HTML

        GET /api/branding/v1/footer
        Accepts: text/html


    Example: Including the footer with the "Powered by Open edX" logo

        GET /api/branding/v1/footer?show-openedx-logo=1
        Accepts: text/html


    Example: Retrieving the footer in a particular language

        GET /api/branding/v1/footer?language=en
        Accepts: text/html


    Example: Retrieving the footer with a language selector

        GET /api/branding/v1/footer?include-language-selector=1
        Accepts: text/html


    Example: Retrieving the footer with all JS and CSS dependencies (for testing)

        GET /api/branding/v1/footer?include-dependencies=1
        Accepts: text/html

    """
    if not branding_api.is_enabled():
        raise Http404

    # Use the content type to decide what representation to serve
    accepts = request.META.get('HTTP_ACCEPT', '*/*')

    # Show the OpenEdX logo in the footer
    show_openedx_logo = bool(request.GET.get('show-openedx-logo', False))

    # Include JS and CSS dependencies
    # This is useful for testing the end-point directly.
    include_dependencies = bool(request.GET.get('include-dependencies', False))

    # Override the language if necessary
    language = request.GET.get('language', translation.get_language())
    try:
        language = get_supported_language_variant(language)
    except LookupError:
        language = settings.LANGUAGE_CODE

    # Include a language selector
    include_language_selector = request.GET.get('include-language-selector',
                                                '') == '1'

    # Render the footer information based on the extension
    if 'text/html' in accepts or '*/*' in accepts:
        cache_params = {
            'language': language,
            'show_openedx_logo': show_openedx_logo,
            'include_dependencies': include_dependencies
        }
        if include_language_selector:
            cache_params['language_selector_options'] = ','.join(
                sorted([lang.code for lang in released_languages()]))
        cache_key = u"branding.footer.{params}.html".format(
            params=six.moves.urllib.parse.urlencode(cache_params))

        content = cache.get(cache_key)
        if content is None:
            with translation.override(language):
                content = _render_footer_html(request, show_openedx_logo,
                                              include_dependencies,
                                              include_language_selector,
                                              language)
                cache.set(cache_key, content, settings.FOOTER_CACHE_TIMEOUT)
        return HttpResponse(content,
                            status=200,
                            content_type="text/html; charset=utf-8")

    elif 'application/json' in accepts:
        cache_key = u"branding.footer.{params}.json".format(
            params=six.moves.urllib.parse.urlencode(
                {
                    'language': language,
                    'is_secure': request.is_secure(),
                }))
        footer_dict = cache.get(cache_key)
        if footer_dict is None:
            with translation.override(language):
                footer_dict = branding_api.get_footer(
                    is_secure=request.is_secure())
                cache.set(cache_key, footer_dict,
                          settings.FOOTER_CACHE_TIMEOUT)
        return JsonResponse(footer_dict,
                            200,
                            content_type="application/json; charset=utf-8")

    else:
        return HttpResponse(status=406)
Пример #46
0
def match_detail(request, match_id):

    match_detail_cache_key = api_settings.MATCH_DETAIL_CACHE_KEY.format(
        match_id)
    match_detail_response = cache.get(match_detail_cache_key, None)

    if match_detail_response:
        return Response(match_detail_response)

    matches = Match.objects.only('api_id')
    match_exists = matches.filter(api_id__iexact=match_id)
    telemetry_objects = Telemetry.objects.filter(match__in=matches)

    split = match_id.split('_')
    account_id = split[0]
    match_id = split[1]

    if match_exists.exists():
        current_player = get_object_or_404(Player, api_id=account_id)
        player_name = Participant.objects.filter(
            player=current_player).latest('id').player_name

        match = match_exists.first()
        telemetry_exists = telemetry_objects.filter(match=match)

        if not telemetry_exists.exists():

            match_url = match.api_url

            if not match_url or match_id not in match_url:
                platform_url = current_player.platform_url
                match_url = build_match_url(platform_url, match_id)

            match_json = make_request(match_url)
            match_type = match_json['data']['attributes']['matchType']

            get_match_telemetry_from_match(match_json=match_json,
                                           match=match,
                                           return_early=False)

            telemetry = telemetry_objects.filter(match=match)
            telemetry = telemetry.first()
        else:
            telemetry = telemetry_exists.first()

        telemetry_events = TelemetryEvent.objects.filter(telemetry=telemetry)

        log_match_start = get_object_or_404(telemetry_events,
                                            event_type__iexact='LogMatchStart')
        total_match_kills = get_object_or_404(
            telemetry_events, event_type__iexact='LogTotalMatchKills')
        log_match_end = get_object_or_404(telemetry_events,
                                          event_type__iexact='LogMatchEnd')
        roster_telem = get_object_or_404(TelemetryRoster, telemetry=telemetry)
        roster_participant = RosterParticipant.objects.filter(
            roster__match=match, participant__player=current_player).first()

        log_match_start_timestamp = parse(log_match_start.timestamp)
        log_match_start_timestamp = str(log_match_start_timestamp)

        if '+' in log_match_start_timestamp:
            log_match_start_timestamp = str(log_match_start_timestamp).split(
                '+')[0]

        log_match_start_timestamp = str(log_match_start_timestamp).split(
            '.')[0]
        log_match_end_timestamp = parse(log_match_end.timestamp)

        log_match_end_timestamp = str(log_match_end_timestamp)

        if '+' in log_match_end_timestamp:
            log_match_end_timestamp = str(log_match_end_timestamp).split(
                '+')[0]

        log_match_end_timestamp = str(log_match_end_timestamp).split('.')[0]

        FMT = '%Y-%m-%d %H:%M:%S'

        elapased_time = datetime.strptime(log_match_end_timestamp,
                                          FMT) - datetime.strptime(
                                              log_match_start_timestamp, FMT)

        heals_items_used = telemetry_events.filter(
            event_type__iexact='LogItemUseMed').count()
        boost_items_used = telemetry_events.filter(
            event_type__iexact='LogItemUseBoost').count()

        ai_events = telemetry_events.filter(event_type__iexact='AICount')
        player_events = telemetry_events.filter(
            event_type__iexact='PlayerCount')

        ais = False
        ai_count = 0
        player_count = 0
        ai_percentage = 0.00

        if ai_events.exists():
            ai_count = int(ai_events.first().description)
            ais = True

        if player_events.exists():
            player_count = int(player_events.first().description)

        total_count = ai_count + player_count
        ai_percentage = round((ai_count / total_count) * 100)
        player_percentage = round((player_count / total_count) * 100)

        telemetry_excluding_some_events = telemetry_events.exclude(
            Q(event_type__iexact='LogTotalMatchKills')
            | Q(event_type__iexact='Roster') | Q(timestamp__isnull=True))

        match_map_url = match.map.image_url
        map_name = match.map.name

        telemetry_data = {
            'telemetry_data': {
                'platform': get_platform(current_player.platform_url),
                'match_data': {
                    'match_id':
                    match_id,
                    'match_elapsed_time':
                    f'{elapased_time} minutes',
                    'match_map_name':
                    map_name,
                    'map_image':
                    match_map_url,
                    'time_since':
                    timesince(match.created),
                    'events': [{
                        'timestamp':
                        datetime.strftime(parse(x.timestamp), '%H:%M:%S'),
                        'event':
                        x.description,
                        'killer_x_cord':
                        x.killer_x_cord,
                        'killer_y_cord':
                        x.killer_y_cord,
                        'victim_x_cord':
                        x.victim_x_cord,
                        'victim_y_cord':
                        x.victim_y_cord
                    } for x in telemetry_excluding_some_events],
                    'player_breakdown': {
                        'ais': ais,
                        'ai_count': ai_count,
                        'ai_percentage': ai_percentage,
                        'player_count': player_count,
                        'player_percentage': player_percentage,
                        'total_count': total_count,
                        'rosters': roster_telem.json,
                    }
                },
                'player_data': {
                    'player_kills': total_match_kills.description,
                    'player_damage': roster_participant.participant.damage,
                    'knocks': roster_participant.participant.knocks,
                    'player_name': player_name,
                    'boost_items_used': boost_items_used,
                    'heals_items_used': heals_items_used,
                }
            }
        }

        cache.set(match_detail_cache_key, telemetry_data, 60 * 10)

        return Response(telemetry_data)
Пример #47
0
def run_ingest_consumer(
    commit_batch_size,
    consumer_group,
    consumer_type,
    max_batch_time_seconds,
    is_shutdown_requested=lambda: False,
):
    """
    Handles events coming via a kafka queue.

    The events should have already been processed (normalized... ) upstream (by Relay).

    :param commit_batch_size: the number of message the consumer will try to process/commit in one loop
    :param consumer_group: kafka consumer group name
    :param consumer_type: an enumeration defining the types of ingest messages see `ConsumerType`
    :param max_batch_time_seconds: the maximum number of seconds a consume operation will be blocked waiting
        for the specified commit_batch_size number of messages to appear in the queue before it returns. At the
        end of the specified time the consume operation will return however many messages it has ( including
        an empty array if no new messages are available).
    :param is_shutdown_requested: Callable[[],bool] predicate checked after each loop, if it returns
        True the forwarder stops (by default is lambda: False). In normal operation this should be left to default.
        For unit testing it offers a way to cleanly stop the forwarder after some particular condition is achieved.
    """

    logger.debug("Starting ingest-consumer...")
    consumer = _create_consumer(consumer_group, consumer_type, settings)

    consumer.subscribe([ConsumerType.get_topic_name(consumer_type, settings)])
    # setup a flag to mark termination signals received, see below why we use an array
    termination_signal_received = [False]

    def termination_signal_handler(_sig_id, _frame):
        """
        Function to use a hook for SIGINT and SIGTERM

        This signal handler only remembers that the signal was emitted.
        The batch processing loop detects that the signal was emitted
        and stops once the whole batch is processed.
        """
        # We need to use an array so that terminal_signal_received is not a
        # local variable assignment, but a lookup in the clojure's outer scope.
        termination_signal_received[0] = True

    with set_termination_request_handlers(termination_signal_handler):
        while not (is_shutdown_requested() or termination_signal_received[0]):
            # get up to commit_batch_size messages
            messages = consumer.consume(num_messages=commit_batch_size,
                                        timeout=max_batch_time_seconds)

            for message in messages:
                message_error = message.error()
                if message_error is not None:
                    logger.error(
                        "Received message with error on %s, error:'%s'",
                        consumer_type,
                        message_error,
                    )
                    raise ValueError("Bad message received from consumer",
                                     consumer_type, message_error)

                message = msgpack.unpackb(message.value(), use_list=False)
                body = message["payload"]
                start_time = float(message["start_time"])
                event_id = message["event_id"]
                project_id = message["project_id"]

                # check that we haven't already processed this event (a previous instance of the forwarder
                # died before it could commit the event queue offset)
                deduplication_key = "ev:{}:{}".format(project_id, event_id)
                if cache.get(deduplication_key) is not None:
                    logger.warning(
                        "pre-process-forwarder detected a duplicated event"
                        " with id:%s for project:%s.",
                        event_id,
                        project_id,
                    )
                    continue

                cache_key = cache_key_from_project_id_and_event_id(
                    project_id=project_id, event_id=event_id)
                cache_timeout = 3600
                default_cache.set(cache_key, body, cache_timeout, raw=True)
                preprocess_event.delay(cache_key=cache_key,
                                       start_time=start_time,
                                       event_id=event_id)

                # remember for an 1 hour that we saved this event (deduplication protection)
                cache.set(deduplication_key, "", 3600)

            if len(messages) > 0:
                # we have read some messages in the previous consume, commit the offset
                consumer.commit(asynchronous=False)

    logger.debug("Closing ingest-consumer %s...", consumer_type)
    consumer.close()
Пример #48
0
def retrieve_matches(request):

    body = request.data
    player_id = body.get('player_id', None)
    player_match_data_cache = api_settings.PLAYER_MATCH_DATA_CACHE_KEY.format(
        player_id)

    cached_ajax_data = cache.get(player_match_data_cache, None)

    if cached_ajax_data:
        return Response(cached_ajax_data)

    ajax_data = {}

    current_player = Player.objects.filter(api_id=player_id).first()

    if current_player:

        match_data = get_match_data(player_id, current_player.id)

        if match_data.exists():

            match_ids = match_data.values_list('match_id',
                                               flat=True).distinct()

            ajax_data = {
                'data': [{
                    'id':
                    roster.match.id,
                    'map':
                    roster.match.map.name if roster.match.map else None,
                    'mode':
                    f'{roster.match.mode.upper()}<br>' +
                    '<span class="badge badge-success">Ranked</span>'
                    if roster.match.match_type
                    and 'comp' in roster.match.match_type else
                    f'{roster.match.mode.upper()}<br><span class="badge badge-secondary">Not Ranked</span>',
                    'raw_mode':
                    f'{roster.match.mode.upper()}',
                    'date_created':
                    datetime.strftime(roster.match.created,
                                      '%d/%m/%Y %H:%M:%S'),
                    'time_since':
                    timesince(roster.match.created),
                    'team_details':
                    ''.join([
                        f"{x.player_name}: {x.kills} kill(s) | {x.damage} damage<br>"
                        for x in roster.participants.all()
                    ]),
                    'team_details_object': [{
                        'kills': x.kills,
                        'player_name': x.player_name,
                        'damage': x.damage
                    } for x in roster.participants.all()],
                    'team_placement':
                    player_placement_format(roster.match.total_teams,
                                            roster.placement),
                    'actions':
                    f'<a href="/match_detail/{roster.match.api_id}/" class="btn btn-link btn-sm active" role="button">View Match</a>',
                    'btn_link':
                    f"/match_detail/{roster.match.api_id}/"
                } for roster in match_data],
                'api_id':
                current_player.api_id,
                'match_ids':
                match_ids
            }

            cache.set(player_match_data_cache, ajax_data, 60)

        else:

            message = "It would seem no TPP/FPP (SOLO, DUO, SQUAD) matches exist for this user for the last 14 days."

            ajax_data = {'error': message, 'api_id': current_player.api_id}

        return Response(ajax_data)
Пример #49
0
def get_config(key=''):
    if not cache.get(key):
        config = Config.objects.first()
        value = getattr(config, key)
        cache.set(key, value, timeout=30)
    return cache.get(key)
Пример #50
0
    def get(self, *raw_args, **raw_kwargs):
        """
        Return the data for this function (using the cache if possible).

        This method is not intended to be overidden
        """
        # We pass args and kwargs through a filter to allow them to be
        # converted into values that can be pickled.
        args = self.prepare_args(*raw_args)
        kwargs = self.prepare_kwargs(**raw_kwargs)

        # Build the cache key and attempt to fetch the cached item
        key = self.key(*args, **kwargs)
        item = cache.get(key)

        if item is None:
            # Cache MISS - we can either:
            # a) fetch the data immediately, blocking execution until
            #    the fetch has finished, or
            # b) trigger an async refresh and return an empty result
            if self.should_missing_item_be_fetched_synchronously(
                    *args, **kwargs):
                logger.debug(("Job %s with key '%s' - cache MISS - running "
                              "synchronous refresh"), self.class_path, key)
                return self.refresh(*args, **kwargs)
            else:
                logger.debug(("Job %s with key '%s' - cache MISS - triggering "
                              "async refresh and returning empty result"),
                             self.class_path, key)
                # To avoid cache hammering (ie lots of identical Celery tasks
                # to refresh the same cache item), we reset the cache with an
                # empty result which will be returned until the cache is
                # refreshed.
                empty = self.empty()
                self.cache_set(key, self.timeout(*args, **kwargs), empty)
                self.async_refresh(*args, **kwargs)
                return empty

        expiry, data = item
        delta = time.time() - expiry
        if delta > 0:
            # Cache HIT but STALE expiry - we can either:
            # a) fetch the data immediately, blocking execution until
            #    the fetch has finished, or
            # b) trigger a refresh but allow the stale result to be
            #    returned this time.  This is normally acceptable.
            if self.should_stale_item_be_fetched_synchronously(
                    delta, *args, **kwargs):
                logger.debug(
                    ("Job %s with key '%s' - STALE cache hit - running "
                     "synchronous refresh"), self.class_path, key)
                return self.refresh(*args, **kwargs)
            else:
                logger.debug(
                    ("Job %s with key '%s' - STALE cache hit - triggering "
                     "async refresh and returning stale result"),
                    self.class_path, key)
                # We replace the item in the cache with a 'timeout' expiry - this
                # prevents cache hammering but guards against a 'limbo' situation
                # where the refresh task fails for some reason.
                timeout = self.timeout(*args, **kwargs)
                self.cache_set(key, timeout, data)
                self.async_refresh(*args, **kwargs)
        else:
            logger.debug("Job %s with key '%s' - cache HIT", self.class_path,
                         key)
        return data
Пример #51
0
def cache_get(*keys, **kwargs):
    """
    Gets the object identified by all ``keys`` from the cache.

    kwargs:
        default:
            Default value used if the object is not in the cache. If the object
            is not found and ``default`` is not set or is None, the exception
            ``NotCachedError`` is raised with the attribute ``.key = keys``.
        other kwargs:
            Unknown key=val is interpreted like two aditional keys: (key, val)
    """
    if kwargs.has_key('default'):
        default_value = kwargs.pop('default')
        use_default = True
    else:
        use_default = False

    key = cache_key(keys, **kwargs)

    if not cache_enabled():
        raise NotCachedError(key)
    else:
        global CACHE_CALLS, CACHE_HITS, REQUEST_CACHE
        CACHE_CALLS += 1
        if CACHE_CALLS == 1:
            cache_require()

        obj = None
        tid = -1
        if REQUEST_CACHE['enabled']:
            tid = cache_get_request_uid()
            if tid > -1:
                try:
                    obj = REQUEST_CACHE[tid][key]
                    log.debug('Got from request cache: %s', key)
                except KeyError:
                    pass

        if obj == None:
            obj = cache.get(key)

        if obj and isinstance(obj, CacheWrapper):
            CACHE_HITS += 1
            CACHED_KEYS[key] = True
            log.debug('got cached [%i/%i]: %s', CACHE_CALLS, CACHE_HITS, key)
            if obj.inprocess:
                raise MethodNotFinishedError(obj.val)

            cache_set_request(key, obj, uid=tid)

            return obj.val
        else:
            try:
                del CACHED_KEYS[key]
            except KeyError:
                pass

            if use_default:
                return default_value

            raise NotCachedError(key)
Пример #52
0
def breadcrumbs(context, obj):
    """
    """
    if isinstance(obj, Category):
        cache_key = "%s-category-breadcrumbs-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, obj.slug)
        objects = cache.get(cache_key)
        if objects is not None:
            return objects

        objects = []
        while obj is not None:
            objects.insert(0, {
                "name": obj.name,
                "url": obj.get_absolute_url(),
            })
            obj = obj.parent

        result = {
            "objects": objects,
            "STATIC_URL": context.get("STATIC_URL"),
        }
        cache.set(cache_key, result)

    elif isinstance(obj, Product):
        try:
            if obj.is_variant():
                parent_product = obj.parent
            else:
                parent_product = obj
        except ObjectDoesNotExist:
            return []
        else:
            request = context.get("request")
            category = obj.get_current_category(request)
            if category is None:
                return []
            else:
                objects = [{
                    "name": obj.get_name(),
                    "url": obj.get_absolute_url(),
                }]
                while category is not None:
                    objects.insert(0, {
                        "name": category.name,
                        "url": category.get_absolute_url(),
                    })
                    category = category.parent

        result = {
            "objects": objects,
            "STATIC_URL": context.get("STATIC_URL"),
        }

    elif isinstance(obj, Page):
        objects = []
        objects.append({
            "name": _(u"Information"),
            "url": reverse("muecke_pages")})
        objects.append({"name": obj.title})

        result = {
            "objects": objects,
            "STATIC_URL": context.get("STATIC_URL"),
        }
    else:
        result = {
            "objects": ({"name": obj},),
            "STATIC_URL": context.get("STATIC_URL"),
        }

    return result
Пример #53
0
 def test_compressed(self):
     cache.set("key", "a" * 11)
     assert cache.get("key") == "a" * 11
Пример #54
0
    def get(self, request, *args, **kwargs):
        tag = request.GET.get('tag', None)
        productCaterogy = request.GET.get('productCategory', None)
        sizeTypes = request.GET.get('sizeType', None)
        size = request.GET.getlist('size', None)
        prices = request.GET.getlist('prices', None)
        releaseYears = request.GET.getlist('releaseYear', None)
        sort = request.GET.get('sort', None)
        page_limit = int(request.GET.get('limit', 40))
        page_num = int(request.GET.get('page', 1))

        response_data_set = {'Pagination': {}, 'Product': []}

        all_product_list = cache.get('all_product_list')
        if not all_product_list:
            temp_product_list = []
            all_products = Product.objects.select_related(
                'release_date').order_by('id')
            for product in all_products:
                average_price = product.average_price if product.average_price else 0
                volatility = product.volatility if product.volatility else 0.0
                price_premium = product.price_premium if product.price_premium else 0
                temp_product_list.append({
                    'product_id': product.id,
                    'name': product.name,
                    'release_date': product.release_date.date,
                    'average_price': average_price,
                    'price_premium': price_premium,
                    'lowest_ask': [],
                    'highest_bid': [],
                    'sale_count': 0
                })

            all_images = Image.objects.filter(image_type=1).order_by('product')
            for image in all_images:
                temp_product_list[image.product_id - 1]['image'] = image.url

            all_orders = Order.objects.all().select_related(
                'ask', 'bid', 'ask__product_size',
                'bid__product_size').order_by('-date')
            for order in all_orders:
                temp_product_list[order.ask.product_size.product_id -
                                  1]['lowest_ask'].append(order.ask.price)
                temp_product_list[order.bid.product_size.product_id -
                                  1]['highest_bid'].append(order.bid.price)
                temp_product_list[order.ask.product_size.product_id -
                                  1]['sale_count'] += 1

            for product in temp_product_list:
                lowest_ask = int(min(product['lowest_ask'])) if len(
                    product['lowest_ask']) > 0 else 0
                highest_ask = int(max(product['highest_bid'])) if len(
                    product['highest_bid']) > 0 else 0
                last_sale = int(product['lowest_ask'][0]) if len(
                    product['lowest_ask']) > 0 else 0
                product['lowest_ask'] = lowest_ask
                product['highest_bid'] = highest_ask
                product['product_ask_price'] = lowest_ask
                product['most_popular'] = product['sale_count']
                product['last_sales'] = last_sale

            cache.set('all_product_list', temp_product_list)
            all_product_list = temp_product_list

        if sort == 'most_popular':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['sale_count'])
        elif sort == 'lowest_ask':
            all_product_list = sorted(all_product_list,
                                      key=lambda x: x['lowest_ask'])
        elif sort == 'highest_bid':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['highest_bid'])
        elif sort == 'release_date':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['release_date'])
        elif sort == 'last_sales':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['last_sales'])
        elif sort == 'average_price':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['average_price'])
        elif sort == 'price_premium':
            all_product_list = sorted(all_product_list,
                                      reverse=True,
                                      key=lambda x: x['price_premium'])

        count = 0
        start_index = (page_num * page_limit) - page_limit
        end_index = page_num * page_limit
        for product in all_product_list[start_index:end_index]:
            response_data_set['Product'].append(product)
            count += 1
            if count == page_limit:
                break

        product_total = len(all_product_list)
        page_count_condition = product_total % page_limit
        page_count = int(product_total //
                         page_limit) if page_count_condition == 0 else int(
                             product_total // page_limit) + 1
        response_data_set['Pagination']['limit'] = page_limit
        response_data_set['Pagination']['page'] = page_num
        response_data_set['Pagination']['product_total'] = product_total
        response_data_set['Pagination']['last_page'] = page_count
        response_data_set['Pagination']['current_page'] = page_num
        response_data_set['Pagination']['next_page'] = page_num + 1 if not (
            page_num == page_count) else None
        response_data_set['Pagination'][
            'previous_page'] = page_num - 1 if not (page_num == 1) else None

        return JsonResponse({'message': response_data_set}, status=200)
Пример #55
0
    def test_cache_versioning_incr_decr(self):
        cache.set('answer1', 37, version=1)
        cache.set('answer1', 42, version=2)
        cache.incr('answer1')
        assert cache.get('answer1', version=1) == 38
        assert cache.get('answer1', version=2) == 42
        cache.decr('answer1')
        assert cache.get('answer1', version=1) == 37
        assert cache.get('answer1', version=2) == 42

        cache.set('answer2', 37, version=1)
        cache.set('answer2', 42, version=2)
        cache.incr('answer2', version=2)
        assert cache.get('answer2', version=1) == 37
        assert cache.get('answer2', version=2) == 43
        cache.decr('answer2', version=2)
        assert cache.get('answer2', version=1) == 37
        assert cache.get('answer2', version=2) == 42

        cache.set('answer3', 37, version=1)
        cache.set('answer3', 42, version=2)
        caches['v2'].incr('answer3')
        assert cache.get('answer3', version=1) == 37
        assert cache.get('answer3', version=2) == 43
        caches['v2'].decr('answer3')
        assert cache.get('answer3', version=1) == 37
        assert cache.get('answer3', version=2) == 42

        cache.set('answer4', 37, version=1)
        cache.set('answer4', 42, version=2)
        caches['v2'].incr('answer4', version=1)
        assert cache.get('answer4', version=1) == 38
        assert cache.get('answer4', version=2) == 42
        caches['v2'].decr('answer4', version=1)
        assert cache.get('answer4', version=1) == 37
        assert cache.get('answer4', version=2) == 42
def get_cached_query_count(qs, key, ttl):
    count = cache.get(key, None)
    if not count:
        count = qs.count()
        cache.set(key, count, ttl)
    return count
Пример #57
0
    def test_cache_versioning_get_set(self):
        # set, using default version = 1
        cache.set('answer1', 42)
        assert cache.get('answer1') == 42
        assert cache.get('answer1', version=1) == 42
        assert cache.get('answer1', version=2) is None

        assert caches['v2'].get('answer1') is None
        assert caches['v2'].get('answer1', version=1) == 42
        assert caches['v2'].get('answer1', version=2) is None

        # set, default version = 1, but manually override version = 2
        cache.set('answer2', 42, version=2)
        assert cache.get('answer2') is None
        assert cache.get('answer2', version=1) is None
        assert cache.get('answer2', version=2) == 42

        assert caches['v2'].get('answer2') == 42
        assert caches['v2'].get('answer2', version=1) is None
        assert caches['v2'].get('answer2', version=2) == 42

        # v2 set, using default version = 2
        caches['v2'].set('answer3', 42)
        assert cache.get('answer3') is None
        assert cache.get('answer3', version=1) is None
        assert cache.get('answer3', version=2) == 42

        assert caches['v2'].get('answer3') == 42
        assert caches['v2'].get('answer3', version=1) is None
        assert caches['v2'].get('answer3', version=2) == 42

        # v2 set, default version = 2, but manually override version = 1
        caches['v2'].set('answer4', 42, version=1)
        assert cache.get('answer4') == 42
        assert cache.get('answer4', version=1) == 42
        assert cache.get('answer4', version=2) is None

        assert caches['v2'].get('answer4') is None
        assert caches['v2'].get('answer4', version=1) == 42
        assert caches['v2'].get('answer4', version=2) is None
Пример #58
0
 def test_get_or_set(self):
     assert cache.get('projector') is None
     assert cache.get_or_set('projector', 42) == 42
     assert cache.get('projector') == 42
Пример #59
0
 def test_float_timeout(self):
     # Make sure a timeout given as a float doesn't crash anything.
     cache.set("key1", "spam", 100.2)
     assert cache.get("key1") == "spam"
Пример #60
0
    def test_cache_versioning_add(self):

        # add, default version = 1, but manually override version = 2
        cache.add('answer1', 42, version=2)
        assert cache.get('answer1', version=1) is None
        assert cache.get('answer1', version=2) == 42

        cache.add('answer1', 37, version=2)
        assert cache.get('answer1', version=1) is None
        assert cache.get('answer1', version=2) == 42

        cache.add('answer1', 37, version=1)
        assert cache.get('answer1', version=1) == 37
        assert cache.get('answer1', version=2) == 42

        # v2 add, using default version = 2
        caches['v2'].add('answer2', 42)
        assert cache.get('answer2', version=1) is None
        assert cache.get('answer2', version=2) == 42

        caches['v2'].add('answer2', 37)
        assert cache.get('answer2', version=1) is None
        assert cache.get('answer2', version=2) == 42

        caches['v2'].add('answer2', 37, version=1)
        assert cache.get('answer2', version=1) == 37
        assert cache.get('answer2', version=2) == 42

        # v2 add, default version = 2, but manually override version = 1
        caches['v2'].add('answer3', 42, version=1)
        assert cache.get('answer3', version=1) == 42
        assert cache.get('answer3', version=2) is None

        caches['v2'].add('answer3', 37, version=1)
        assert cache.get('answer3', version=1) == 42
        assert cache.get('answer3', version=2) is None

        caches['v2'].add('answer3', 37)
        assert cache.get('answer3', version=1) == 42
        assert cache.get('answer3', version=2) == 37