コード例 #1
0
ファイル: models.py プロジェクト: weijia/django-treenav
def treenav_save_other_object_handler(sender, instance, created, **kwargs):
    """
    This signal attempts to update the HREF of any menu items that point to
    another model object, when that objects is saved.
    """
    try:
        cache_key = 'django-treenav-menumodels'
        if sender == MenuItem:
            cache.delete(cache_key)
        menu_models = cache.get(cache_key)
        if not menu_models:
            menu_models = []
            for menu_item in MenuItem.objects.exclude(content_type__isnull=True):
                menu_models.append(menu_item.content_type.model_class())
            cache.set(cache_key, menu_models)
        # only attempt to update MenuItem if sender is known to be referenced
        if sender in menu_models:
            ct = ContentType.objects.get_for_model(sender)
            items = MenuItem.objects.filter(content_type=ct, object_id=instance.pk)
            for item in items:
                if item.href != instance.get_absolute_url():
                    item.href = instance.get_absolute_url()
                    item.save()
    except:
        pass
コード例 #2
0
ファイル: __init__.py プロジェクト: musamusa/ureport
def fetch_old_sites_count():
    import requests, re
    from ureport.polls.models import UREPORT_ASYNC_FETCHED_DATA_CACHE_TIME

    start = time.time()
    this_time = datetime.now()
    linked_sites = list(getattr(settings, 'PREVIOUS_ORG_SITES', []))

    for site in linked_sites:
        count_link = site.get('count_link', "")
        if count_link:
            try:
                response = requests.get(count_link)
                response.raise_for_status()

                count = int(re.search(r'\d+', response.content).group())
                key = "org:%s:reporters:%s" % (site.get('name').lower(), 'old-site')
                cache.set(key,
                          {'time': datetime_to_ms(this_time), 'results': dict(size=count)},
                          UREPORT_ASYNC_FETCHED_DATA_CACHE_TIME)
            except:
                import traceback
                traceback.print_exc()

    # delete the global count cache to force a recalculate at the end
    cache.delete(GLOBAL_COUNT_CACHE_KEY)

    print "Fetch old sites counts took %ss" % (time.time() - start)
コード例 #3
0
ファイル: views.py プロジェクト: zavsnar/bioface
def create_organism(request):
    if request.method == 'POST':
        form = CreateOrganismForm(data = request.POST)
        if form.is_valid():
            query_dict = {
                "method" : "add_organism",
                "key": request.user.sessionkey,
                "params" : {
                    "data" : {
                        "name": form.cleaned_data['name']
                    }
                }
            }

            content_dict = api_request(query_dict)
            
            if content_dict.has_key('result'):
                cache.delete('organisms')
                messages.success(request, 'Organism "{}" successfully create.'.format(form.cleaned_data['name']))
            elif content_dict.has_key('error'):
                messages.error(request, 'ERROR: {}'.format(content_dict['error']['message']))

    else:
        form = CreateOrganismForm()

    template_context = {'form': form,}
    return render_to_response('create_organism.html', template_context, context_instance=RequestContext(request))
コード例 #4
0
ファイル: subproject.py プロジェクト: beingsane/weblate
 def clear_repo_cache(self):
     """
     Clears cached information on repository update.
     """
     cache.delete(
         '{0}-last-commit'.format(self.get_full_slug())
     )
コード例 #5
0
ファイル: __init__.py プロジェクト: musamusa/ureport
def fetch_reporter_group(org):
    start = time.time()
    print "Fetching reporter group for %s" % org.name
    try:
        from ureport.polls.models import CACHE_ORG_REPORTER_GROUP_KEY, UREPORT_ASYNC_FETCHED_DATA_CACHE_TIME

        this_time = datetime.now()

        reporter_group = org.get_config('reporter_group')
        if reporter_group:
            temba_client = org.get_temba_client()
            groups = temba_client.get_groups(name=reporter_group)

            key = CACHE_ORG_REPORTER_GROUP_KEY % (org.pk, slugify(unicode(reporter_group)))
            group_dict = dict()
            if groups:
                group = groups[0]
                group_dict = dict(size=group.size, name=group.name, uuid=group.uuid)
            cache.set(key,
                      {'time': datetime_to_ms(this_time), 'results': group_dict},
                      UREPORT_ASYNC_FETCHED_DATA_CACHE_TIME)
    except:
        client.captureException()
        import traceback
        traceback.print_exc()
    # delete the global count cache to force a recalculate at the end
    cache.delete(GLOBAL_COUNT_CACHE_KEY)

    print "Fetch %s reporter group took %ss" % (org.name, time.time() - start)
コード例 #6
0
ファイル: utils.py プロジェクト: ujdhesa/olympia
 def finish(self, file_id):
     file_id = int(file_id)
     newfiles = dict([(k, v) for (k, v) in self.files().items()
                      if k != file_id])
     cache.set(self.file_key, newfiles)
     if not newfiles:
         cache.delete(self.version_key)
コード例 #7
0
def set_setting_value(module_name, class_name, attribute_name, value):
    setting = get_setting(module_name, class_name, attribute_name)
    storage = get_setting_storage(module_name, class_name, attribute_name)
    storage.value = setting.get_db_prep_save(value)
    storage.save()
    key = _get_cache_key(module_name, class_name, attribute_name)
    cache.delete(key)
コード例 #8
0
ファイル: cron.py プロジェクト: Fjoerfoks/zamboni
def cleanup_extracted_file():
    log.info('Removing extracted files for file viewer.')
    root = os.path.join(settings.TMP_PATH, 'file_viewer')
    # Local storage uses local time for file modification. S3 uses UTC time.
    now = datetime.utcnow if storage_is_remote() else datetime.now
    for path in private_storage.listdir(root)[0]:
        full = os.path.join(root, path)
        age = now() - private_storage.modified_time(
            os.path.join(full, 'manifest.webapp'))
        if age.total_seconds() > (60 * 60):
            log.debug('Removing extracted files: %s, %dsecs old.' %
                      (full, age.total_seconds()))
            for subroot, dirs, files in walk_storage(full):
                for f in files:
                    private_storage.delete(os.path.join(subroot, f))
            # Nuke out the file and diff caches when the file gets removed.
            id = os.path.basename(path)
            try:
                int(id)
            except ValueError:
                continue

            key = hashlib.md5()
            key.update(str(id))
            cache.delete('%s:memoize:%s:%s' % (settings.CACHE_PREFIX,
                                               'file-viewer', key.hexdigest()))
コード例 #9
0
ファイル: modelcache.py プロジェクト: sfpiano/rbclone
 def _clear(sender, instance, *args, **kwargs):
     mk = md5('{0}{1}'.format(model_class.__name__,
                            str(model_class.__dict__)))
     key = 'pk{0}{1}'.format(mk, str(instance.pk))
     if verbose:
         debug('expiring key', key)
     cache.delete(key)
コード例 #10
0
ファイル: models.py プロジェクト: bmswgnp/wger
    def save(self, *args, **kwargs):
        '''
        Reset the cache
        '''

        super(Ingredient, self).save(*args, **kwargs)
        cache.delete(cache_mapper.get_ingredient_key(self.id))
コード例 #11
0
ファイル: manager.py プロジェクト: bgyss/sentry
    def _post_save(self, instance, **kwargs):
        """
        Pushes changes to an instance into the cache, and removes invalid (changed)
        lookup values.
        """
        pk_name = instance._meta.pk.name
        pk_names = ('pk', pk_name)
        pk_val = instance.pk
        for key in self.cache_fields:
            if key in pk_names:
                continue
            # store pointers
            cache.set(self._get_from_cache_key(**{key: getattr(instance, key)}), pk_val, self.cache_ttl)  # 1 hour

        # Ensure we dont serialize the database into the cache
        db = instance._state.db
        instance._state.db = None
        # store actual object
        cache.set(self._get_from_cache_key(**{pk_name: pk_val}), instance, self.cache_ttl)
        instance._state.db = db

        # Kill off any keys which are no longer valid
        for key in self.cache_fields:
            if key not in instance.__cache_data:
                continue
            value = instance.__cache_data[key]
            if value != getattr(instance, key):
                cache.delete(self._get_from_cache_key(**{key: value}))

        self._cache_state(instance)
コード例 #12
0
ファイル: test_cache.py プロジェクト: MiguelPin/e-cidadania
 def testGetOrInsertObjectInCache(self):
     """
     Tests the get_or_insert_object_in_helpers.cache.
     """
     
     
     space_props = {'url': 'test_space', 'name': 'some_name'}
     #print Space.__class__.__name__
     space_key = cache_helper._get_cache_key_for_model(Space, 'test_space')
     expected = None
     actual = cache.get(space_key)
     self.assertEqual(expected, actual)
     
     space = Space(**space_props)
     space.save()
     expected = space
     actual = cache_helper.get_or_insert_object_in_cache(Space, 
                                                         space.url, url=space.url)
     self.assertEqual(expected, actual)
     
     cache.delete(space_key)
     self.assertEqual(cache.get(space_key), None)
     expected = space
     actual = cache_helper.get_or_insert_object_in_cache(Space, 
                                                         space.url, url=space.url)
     self.assertEqual(expected, actual)
     
コード例 #13
0
ファイル: views.py プロジェクト: phadej/kamu
def clear_cache(sender, **kwargs):
    # FIXME: oh my
    k = cache.get('opinions_summary_keys')
    if not k:
        return
    cache.delete_many(k)
    cache.delete('opinions_summary_keys')
コード例 #14
0
ファイル: views.py プロジェクト: jlin/airmozilla
def event_screencaptures(request, event):
    if event.status != Event.STATUS_INITIATED:
        return http.HttpResponseBadRequest(
            "Events NOT in the state of initiated."
        )
    upload = event.upload
    video_url = upload.url

    context = {}

    cache_key = 'fetching-{0}'.format(event.id)

    # This function sets the cache `fetching-{id}` before and after calling
    # those functions in the videoinfo module.
    # The reason is that those calls might take many many seconds
    # and the webapp might send async calls to the event_picture view
    # which will inform the webapp that the slow videoinfo processes
    # are running and thus that the webapp shouldn't kick if off yet.

    seconds = event.duration
    if not event.duration:
        # it's a poor man's lock
        if not cache.get(cache_key):
            cache.set(cache_key, True, 60)
            seconds = videoinfo.fetch_duration(
                event,
                video_url=video_url,
                save=True,
                verbose=settings.DEBUG
            )
            cache.delete(cache_key)
            event = Event.objects.get(id=event.id)
    context['seconds'] = seconds
    # The reason we can't use `if event.duration:` is because the
    # fetch_duration() does an inline-update instead of modifying
    # the instance object.
    no_pictures = Picture.objects.filter(event=event).count()
    if event.duration and not no_pictures:
        if not cache.get(cache_key):
            cache.set(cache_key, True, 60)
            event = Event.objects.get(id=event.id)
            no_pictures = videoinfo.fetch_screencapture(
                event,
                video_url=video_url,
                save=True,
                verbose=settings.DEBUG,
                set_first_available=not event.picture,
                import_immediately=True,
            )
            cache.delete(cache_key)
            event = Event.objects.get(id=event.id)
    if no_pictures and not event.picture:
        # no picture has been chosen previously
        pictures = Picture.objects.filter(event=event).order_by('created')[:1]
        for picture in pictures:
            event.picture = picture
            event.save()
            break
    context['no_pictures'] = no_pictures
    return context
コード例 #15
0
 def delete(self, session_key=None):
     super(SessionStore, self).delete(session_key)
     if session_key is None:
         if self.session_key is None:
             return
         session_key = self.session_key
     cache.delete(KEY_PREFIX + session_key)
コード例 #16
0
    def __setattr__(self, name, value):
        """
        Add support of setting values to settings as instance attribute.
        """
        if name.startswith('_'):
            return self._safe_super_method('__setattr__', name, value)

        # First of all try to setup value to Django setting
        if hasattr(django_settings, name):
            setattr(django_settings, name, value)
        # Then setup value to project setting
        elif not self._prefix:
            custom = self._custom
            setattr(custom, name, value)
            custom.save()
            cache.delete(CACHE_KEY)
        # And finally setup value to app setting
        else:
            custom = self._custom
            data, prefix = custom.data, self._prefix

            if not prefix in data:
                data[prefix] = {}

            data[prefix].update({name: value})
            custom.save()
            cache.delete(CACHE_KEY)
コード例 #17
0
ファイル: models.py プロジェクト: jmiguelv/wagtail
    def save(self, *args, **kwargs):
        update_descendant_url_paths = False

        if self.id is None:
            # we are creating a record. If we're doing things properly, this should happen
            # through a treebeard method like add_child, in which case the 'path' field
            # has been set and so we can safely call get_parent
            self.set_url_path(self.get_parent())
        else:
            # see if the slug has changed from the record in the db, in which case we need to
            # update url_path of self and all descendants
            old_record = Page.objects.get(id=self.id)
            if old_record.slug != self.slug:
                self.set_url_path(self.get_parent())
                update_descendant_url_paths = True
                old_url_path = old_record.url_path
                new_url_path = self.url_path

        result = super(Page, self).save(*args, **kwargs)

        if update_descendant_url_paths:
            self._update_descendant_url_paths(old_url_path, new_url_path)

        # Check if this is a root page of any sites and clear the 'wagtail_site_root_paths' key if so
        if Site.objects.filter(root_page=self).exists():
            cache.delete('wagtail_site_root_paths')

        return result
コード例 #18
0
ファイル: views.py プロジェクト: akatsoulas/airmozilla
def unsubscribe(request, identifier, id=None):
    context = {}
    event = discussion = None
    if id:
        discussion = get_object_or_404(Discussion, id=id)
        event = discussion.event

    context['event'] = event
    cache_key = 'unsubscribe-%s' % identifier
    user_id = cache.get(cache_key)
    if user_id:
        user = get_object_or_404(User, id=user_id)
    else:
        user = None
    context['user'] = user

    if request.method == 'POST':
        if not user:
            return http.HttpResponseBadRequest('No user')
        Unsubscription.objects.get_or_create(
            user=user,
            discussion=discussion
        )
        cache.delete(cache_key)
        if discussion:
            return redirect('comments:unsubscribed', discussion.id)
        else:
            return redirect('comments:unsubscribed_all')

    return render(request, 'comments/unsubscribe.html', context)
コード例 #19
0
ファイル: admin.py プロジェクト: awaykened/site
    def recalculate_score(self, request, queryset):
        if not request.user.has_perm('judge.rejudge_submission'):
            self.message_user(request, ugettext('You do not have the permission to rejudge submissions.'),
                              level=messages.ERROR)
            return
        submissions = list(queryset.select_related('problem').only('points', 'case_points', 'case_total',
                                                                   'problem__partial', 'problem__points'))
        for submission in submissions:
            submission.points = round(submission.case_points / submission.case_total * submission.problem.points
                                      if submission.case_total else 0, 1)
            if not submission.problem.partial and submission.points < submission.problem.points:
                submission.points = 0
            submission.save()

            if hasattr(submission, 'contest'):
                contest = submission.contest
                contest.points = round(submission.case_points / submission.case_total * contest.problem.points
                                       if submission.case_total > 0 else 0, 1)
                if not contest.problem.partial and contest.points < contest.problem.points:
                    contest.points = 0
                contest.save()

        for profile in Profile.objects.filter(id__in=queryset.values_list('user_id', flat=True).distinct()):
            profile.calculate_points()
            cache.delete('user_complete:%d' % profile.id)
        
        for participation in ContestParticipation.objects.filter(id__in=queryset.values_list('contest__participation_id')):
            participation.recalculate_score()

        self.message_user(request, ungettext('%d submission were successfully rescored.',
                                             '%d submissions were successfully rescored.',
                                             len(submissions)) % len(submissions))
コード例 #20
0
ファイル: cache.py プロジェクト: wjdp/xSACdb
 def invalidate_object_property_cache(self, property):
     """Invalidate a single property, useful for calling from elsewhere"""
     if property in self.__dict__:
         # Remove from instance cache, see bug #285
         self.__dict__.pop(property)
     # Remove from external cache
     cache.delete(object_cache_key(self.__class__.__name__, self.pk, property))
コード例 #21
0
ファイル: tests.py プロジェクト: t0in4/exgit
    def test_13_catalog_filters(self):
        settings.LANGUAGES = (('fr', 'French'), ('xx', 'Dummy Language'),)
        cache.delete('rosetta_django_paths')
        self.client.get(reverse('rosetta-pick-file') + '?filter=third-party')
        r = self.client.get(reverse('rosetta-pick-file'))
        self.assertTrue(os.path.normpath('rosetta/locale/xx/LC_MESSAGES/django.po') in str(r.content))
        self.assertTrue(('contrib') not in str(r.content))

        self.client.get(reverse('rosetta-pick-file') + '?filter=django')
        r = self.client.get(reverse('rosetta-pick-file'))
        self.assertTrue(os.path.normpath('rosetta/locale/xx/LC_MESSAGES/django.po') not in str(r.content))

        if django.VERSION[0:2] >= (1, 3):
            self.assertTrue(('contrib') in str(r.content))

        self.client.get(reverse('rosetta-pick-file') + '?filter=all')
        r = self.client.get(reverse('rosetta-pick-file'))
        self.assertTrue(os.path.normpath('rosetta/locale/xx/LC_MESSAGES/django.po') in str(r.content))

        if django.VERSION[0:2] >= (1, 3):
            self.assertTrue(('contrib') in str(r.content))

        self.client.get(reverse('rosetta-pick-file') + '?filter=project')
        r = self.client.get(reverse('rosetta-pick-file'))
        self.assertTrue(os.path.normpath('rosetta/locale/xx/LC_MESSAGES/django.po') not in str(r.content))
        if django.VERSION[0:2] >= (1, 3):
            self.assertTrue(('contrib') not in str(r.content))
コード例 #22
0
ファイル: views.py プロジェクト: yalis/OpenBAID
def confirm_deletion(request, token):
    key = cache.get(token, None)
    if key:
        cache.delete(token)
        try:
            user = User.objects.get(username=key)
            back_user = serializers.serialize('json', [user])
            ex_profile = ExtendedProfile.objects.filter(user=user)
            back_profile = serializers.serialize('json', ex_profile)
            backup_info = {'user': back_user, 'extended_profile': back_profile}
            deleted_user = DeletedUser()
            deleted_user.identifier = user.email
            deleted_user.user_information = backup_info
            deleted_user.save()
            user.delete()
            messages.success(request, _(u'Tu cuenta ha sido borrada.'))
            auth_logout(request)
        except Exception as e:
            logger.error("Error {e} en borrado de usuario".format(e=e))
            msg = _(u'Tu cuenta no ha sido borrada. Por favor intentar nuevamente')
            messages.error(request, msg)
        return redirect('accounts:profile')
    else:
        msg = _(u'Tu cuenta no ha sido borrada. Por favor intentar nuevamente')
        messages.error(request, msg)
        return redirect('accounts:profile')
コード例 #23
0
ファイル: models.py プロジェクト: ajayk1205/opencomparison
 def save(self, *args, **kwargs):
     # reset the last_updated and commits_over_52 caches on the package
     package = self.package
     cache.delete(package.cache_namer(self.package.last_updated))
     cache.delete(package.cache_namer(package.commits_over_52))
     self.package.last_updated()
     super(Commit, self).save(*args, **kwargs)
コード例 #24
0
ファイル: models.py プロジェクト: ctalbert/scrumbugz
 def refresh_bugs(self):
     try:
         delattr(self, '_bugs')
     except AttributeError:
         pass
     cache.delete(self._bugs_cache_key)
     return self.get_bugs()
コード例 #25
0
ファイル: guessnumber.py プロジェクト: Manfred123/manfred
def ask(request):
	if ("play" in request.text and "num" in request.text) or ("gra" in request.text and "licz" in request.text):
		cache.set(str(request.user.id) + "guessnumber", { "Number": random.randint(1,100), "Tryies": 0 }, 3600)
		return response(image="/static/imgs/modules/guessnumber.png", text="Pomyślałem liczbę od 1 do 100. Zgaduj, a jeżeli ci się znudzi wpisz 'exit'.", quality=1)
	elif "exit" in request.text:
		cache.delete(str(request.user.id) + "guessnumber")
		return response(image="/static/imgs/modules/guessnumber.png", text="Game ended", quality=1)
	elif str(request.user.id) + "guessnumber" in cache:
		data = cache.get(str(request.user.id) + "guessnumber")
		cont = True
		try:
			data["Tryies"] += 1
			if data["Number"] < int(request.text):
				return response(image="/static/imgs/modules/guessnumber.png", quality=1, text=request.text + ": Moja liczba jest mniejsza")
			if data["Number"] > int(request.text):
				return response(image="/static/imgs/modules/guessnumber.png", quality=1, text=request.text + ": Moja liczba jest większa")
			if data["Number"] == int(request.text):
				t = data["Tryies"]
				cont = False
				return response(image="/static/imgs/modules/guessnumber.png", quality=1, text=request.text + ": Wygrałeś. To ta liczba. Ilość prób: " + str(t))
		finally:
			if cont:
				cache.set(str(request.user.id) + "guessnumber", data, 3600)
			else:
				cache.delete(str(request.user.id) + "guessnumber")
	else:
		return response(image="/static/imgs/modules/guessnumber.png", quality=0.01, text="Ej.. Zagrajmy w zgadywanie liczb wpisz 'zagrajmy w zgadywanie liczb'.")
コード例 #26
0
ファイル: views.py プロジェクト: bluefan/auto_install
def delivery(request,obj_id):
    if request.method == "GET":
        o = get_object_or_404(online,id=obj_id)
        o.delete()
        disk_sotl.objects.filter(host_id=int(obj_id)).delete()
        cache.delete("%s_ip" % obj_id)
        return HttpResponseRedirect('/his/')
コード例 #27
0
ファイル: views.py プロジェクト: nikozavr/lab3
def create(request):
	if request.method == "POST":
		data = request.body
		data = json.loads(data.decode('utf8'))
		login = data["login"]
		password = data["password"]
		logger = logging.getLogger('session')
		try:
			user = Users.objects.get(login=login)
			if user.password == password: #check_password(password, user.password):
				session_key = ""
				if cache.get(session_key) != None:	
					cache.delete(session_key)
				session_key = create_session(user)
				cache.set(session_key, user.id)	
				json_data = json.dumps({"user_id": user.id, "session_key": session_key})
				logger.info(json_data)
				return HttpResponse(json_data, content_type="application/json")
			else: 
				with open(os.path.join(settings.BASE_DIR, "static/jsons/error_log_pas.json")) as data_file:    
					data = json.load(data_file)
				logger.info(data)
				return HttpResponse(json.dumps(data), status=400)
		except ObjectDoesNotExist:
			with open(os.path.join(settings.BASE_DIR, "static/jsons/error_log_pas.json")) as data_file:    
				data = json.load(data_file)
			logger.info(data)
			return HttpResponse(json.dumps(data), status=400)

	return HttpResponse("Ok")
コード例 #28
0
ファイル: query.py プロジェクト: subc/anchovy
    def delete_cache(self):
        if not self._can_manage_cache:
            return

        _logger.debug('DELETE(%s)', self._manage_cache_key)
        for cache_key in self._cache_keys_for_delete():
            cache.delete(cache_key)
コード例 #29
0
    def save(self, *args, **kwargs):
        # Force validation and save.
        self.full_clean()
        super(Page, self).save(*args, **kwargs)

        # Delete this entry from the cache, to avoid confusion.
        cache.delete(Page.get_key_for_path(self.url))
コード例 #30
0
 def test_create_on_demand_functionality(self):
     """Ensures create_on_demand functionality works as advertised"""
     jpg = VersatileImageTestModel.objects.get(img_type='jpg')
     img_url = jpg.image.crop['100x100'].url
     self.assertEqual(
         cache.get(img_url),
         None
     )
     jpg.image.create_on_demand = True
     jpg.image.crop['100x100'].url
     self.assertEqual(
         cache.get(img_url),
         1
     )
     self.assertTrue(
         jpg.image.field.storage.exists(jpg.image.crop['100x100'].name)
     )
     jpg.image.field.storage.delete(jpg.image.crop['100x100'].name)
     self.assertFalse(
         jpg.image.field.storage.exists(jpg.image.crop['100x100'].name)
     )
     cache.delete(img_url)
     self.assertEqual(
         cache.get(img_url),
         None
     )
コード例 #31
0
ファイル: models.py プロジェクト: fernandocrb/openwisp-radius
 def delete_cache(self, *args, **kwargs):
     username = f'rt-{self.user.username}'
     if cache.get(username):
         cache.delete(username)
コード例 #32
0
def add_variants(request, product_id):
    """Adds variants to product with passed product_id based on property/option-
    combinations passed within request body.
    """
    cache.delete("%s-variants%s" %
                 (settings.CACHE_MIDDLEWARE_KEY_PREFIX, product_id))

    product = Product.objects.get(pk=product_id)

    # Add variant(s)
    variant_simple_form = ProductVariantSimpleForm(data=request.POST)

    # We don't have to check whether the form is valid. If the fields
    # are empty we create default ones.

    # First we need to prepare the requested properties for the use
    # with cartesian product. That means if the keyword "all" is
    # found we collect all options of this properties.
    properties = []
    for key, value in request.POST.items():
        if key.startswith("property"):
            property_id = key.split("_")[1]
            if value == "all":
                temp = []
                for option in PropertyOption.objects.filter(
                        property=property_id):
                    temp.append("%s|%s" % (property_id, option.id))
                properties.append(temp)
            else:
                properties.append(["%s|%s" % (property_id, value)])

    # Create a variant for every requested option combination
    for i, options in enumerate(manage_utils.cartesian_product(*properties)):

        if product.has_variant(options):
            continue

        name = request.POST.get("name")
        price = request.POST.get("price")
        slug = request.POST.get("slug")

        for option in options:
            property_id, option_id = option.split("|")
            o = PropertyOption.objects.get(pk=option_id)
            if slug:
                slug += "-"
            slug += slugify(o.name)

        slug = "%s-%s" % (product.slug, slug)
        sku = "%s-%s" % (product.sku, i + 1)

        variant = None
        # need to validate the amalgamated slug to make sure it is not already in use
        try:
            product = Product.objects.get(slug=slug)
            message = _(u"That slug is already in use. Please use another.")
        except Product.MultipleObjectsReturned:
            message = _(u"That slug is already in use. Please use another.")
        except Product.DoesNotExist:
            variant = Product(name=name,
                              slug=slug,
                              sku=sku,
                              parent=product,
                              price=price,
                              variant_position=(i + 1) * 10,
                              sub_type=VARIANT)
            try:
                variant.save()
            except IntegrityError:
                continue
            else:
                # By default we copy the property groups of the product to
                # the variants
                for property_group in product.property_groups.all():
                    variant.property_groups.add(property_group)

            # Save the value for this product and property.
            for option in options:
                property_id, option_id = option.split("|")
                ProductPropertyValue.objects.create(
                    product=variant,
                    property_id=property_id,
                    value=option_id,
                    type=PROPERTY_VALUE_TYPE_VARIANT)
                # By default we create also the filter values as this most of
                # the users would excepct.
                if Property.objects.get(pk=property_id).filterable:
                    ProductPropertyValue.objects.create(
                        product=variant,
                        property_id=property_id,
                        value=option_id,
                        type=PROPERTY_VALUE_TYPE_FILTER)

            message = _(u"Variants have been added.")

    html = (
        ("#selectable-products-inline",
         _selectable_products_inline(request, product)),
        ("#variants", manage_variants(request, product_id, as_string=True)),
    )

    result = simplejson.dumps({
        "html": html,
        "message": message,
    },
                              cls=LazyEncoder)

    return HttpResponse(result)
コード例 #33
0
ファイル: proxy.py プロジェクト: rixx/django-hierarkey
 def _flush_external_cache(self):
     cache.delete('hierarkey_{}_{}'.format(self._cache_namespace,
                                           self._obj.pk))
コード例 #34
0
ファイル: models.py プロジェクト: tbpmig/mig-website
 def delete(self, *args, **kwargs):
     super(MemberProfile, self).delete(*args, **kwargs)
     cache.delete('active_list_html')
コード例 #35
0
def purge_cache(sender, instance, **kwargs):
    cache.delete(u'proverb_entry:%s' % instance.pk)
    cache.delete(u'proverb_entry_list')
    cache.delete(u'inhalt_stats')
コード例 #36
0
ファイル: ua.py プロジェクト: tcarlander/django-slumber
 def tearDown(self):
     cache.delete('slumber.connector.ua.get.' + self.cache_url)
コード例 #37
0
ファイル: models.py プロジェクト: fernandocrb/openwisp-radius
 def delete_cache(self, *args, **kwargs):
     cache.delete(self.organization.pk)
     cache.delete(f'ip-{self.organization.pk}')
コード例 #38
0
ファイル: tztoday.py プロジェクト: fuzhengyu/hiyou
 def handle(self, *args, **options):
     aa = None
     label = None
     while True:
         filename = '/home/www/hiyoutest/logs/tanz.log'
         data = []
         aa = open(filename,'r')
         if cache.get('label'):
             try:
                 aa.seek(cache.get('label'),0)
             except:
                 pass
         while True:
             line = aa.readline()
             label = aa.tell()
             aa.seek(label,0)
             data.append(line)
             if not line:
                 break
         ##按景区存
         obj = Scenic.objects.filter(isdel=0)
         scenic = {}
         p = MacParser()
         today = datetime.datetime.today().strftime('%Y-%m-%d')
         for obj in obj:
             scenic['name'+str(obj.pk)] = {}
             objs = obj.dev_set.filter(dev_type='1',isonline=1)
             qwe = []
             for objs in objs:
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)] = {}
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other'] = []
                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['macaddr'] = objs.macaddr
                 for ww in data:
                     try:
                         typ = self.mac_type(p,eval(ww)['mac'])
                         if eval(ww)['macaddr'] == objs.macaddr:
                             if typ == 'vivo':
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo'].append(eval(ww)['mac'])
                             elif typ == 'apple':
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple'].append(eval(ww)['mac'])
                             elif typ == 'huawei':
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei'].append(eval(ww)['mac'])
                             elif typ == 'oppo':
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo'].append(eval(ww)['mac'])
                             elif typ == 'samsung':
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung'].append(eval(ww)['mac'])
                             else:
                                 scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other'].append(eval(ww)['mac'])
                             qwe.append(eval(ww)['mac'])
                     except:
                         pass
                 if not cache.get('tztoday'):
                     Census.objects.create(scenic=obj,dev=objs,vivo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo']))),apple=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple']))),huawei=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei']))),oppo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo']))),samsung=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung']))),other=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other']))))
                 else:
                     ww = Census.objects.filter(scenic=obj,dev=objs).first()
                     if cache.get('tztoday')['name'+str(ww.scenic.pk)].has_key('dev'+str(ww.dev.pk)) == False:
                         ww.delete()
                         Census.objects.create(scenic=obj,dev=objs,vivo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo']))),apple=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple']))),huawei=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei']))),oppo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo']))),samsung=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung']))),other=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other']))))             
                     else:
                         if ww.date < datetime.datetime.strptime(today, "%Y-%m-%d"):
                             Census.objects.create(scenic=obj,dev=objs,vivo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo']))),apple=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple']))),huawei=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei']))),oppo=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo']))),samsung=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung']))),other=len(list(set(scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other']))))
                         else:
                             ww.vivo = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['vivo']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo'])))
                             ww.apple = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['apple']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple'])))
                             ww.huawei = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['huawei']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei'])))
                             ww.oppo = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['oppo']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo'])))
                             ww.samsung = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['samsung']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung'])))
                             ww.other = len(list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['other']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other'])))
                             ww.save()
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['vivo'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['vivo']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['vivo']))
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['apple'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['apple']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['apple']))
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['huawei'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['huawei']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['huawei']))
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['oppo'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['oppo']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['oppo']))
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['samsung'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['samsung']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['samsung']))
                             scenic['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['other'] = list(set(cache.get('tztoday')['name'+str(ww.scenic.pk)]['dev'+str(ww.dev.pk)]['other']+scenic['name'+str(obj.pk)]['dev'+str(objs.pk)]['other']))
             nn = []
             for sz in scenic['name'+str(obj.pk)]:
                 for sz2 in sz:
                     for sz3 in sz2:
                         nn.append(sz3)
             ##################今天新老用户####################
             tt = list(set(qwe))
             pp = []
             ll = cache.get('usertoday'+str(obj.pk))
             if ll:
                 for kk in tt:
                     if kk not in ll:
                         pp.append(kk)
             uu = []
             ob = Client.objects.filter(scenic_id=obj.pk)
             for ob in ob:
                 uu.append(ob.mac)
             uu = list(set(uu))
             if pp:
                 new = len(list(set(uu+pp)))-len(uu)
                 old = len(pp) - new
             else:
                 new = len(list(set(tt+uu)))-len(uu)
                 old = len(tt) - new
             if not Newo.objects.filter(scenic_id=obj.pk,date=1):
                 Newo.objects.create(scenic=obj,xin=new,lao=old,date=1)
                 cache.set('usertoday'+str(obj.pk),tt,12*60*60)
             else:
                 oo = Newo.objects.filter(scenic_id=obj.pk,date=1).first()
                 if oo.update>datetime.datetime.strptime(today, "%Y-%m-%d"):
                     oo.xin = oo.xin+new
                     oo.lao = oo.lao+old
                     cache.set('usertoday'+str(obj.pk),ll+pp,12*60*60)
                 else:
                     cache.delete('usertoday'+str(obj.pk))
                     oo.xin = new
                     oo.lao = old
                     cache.set('usertoday'+str(obj.pk),tt,12*60*60)
                 oo.save()
             ##################昨天新老用户####################
             tt = Client.objects.filter(scenic_id=obj.pk,statrtime__lt=today,statrtime__gte=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 1))
             rr = []
             uu = []
             for tt in tt:
                 rr.append(tt.mac)
             rr = list(set(rr))
             ob = Client.objects.filter(scenic_id=obj.pk,statrtime__lt=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 1))
             for ob in ob:
                 uu.append(ob.mac)
             uu = list(set(uu))
             new = len(list(set(rr+uu)))-len(uu)
             old = len(rr)-new
             if not Newo.objects.filter(scenic_id=obj.pk,date=-1):
                 Newo.objects.create(scenic=obj,xin=new,lao=old,date=-1)
             else:
                 oo = Newo.objects.filter(scenic_id=obj.pk,date=-1).first()
                 oo.xin = new
                 oo.lao = old
                 oo.save()
             ################一周新老用户######################
             tt = Client.objects.filter(scenic_id=obj.pk,statrtime__gte=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 6))
             rr = []
             uu = []
             for tt in tt:
                 rr.append(tt.mac)
             rr = list(set(rr+nn))
             ob = Client.objects.filter(scenic_id=obj.pk,statrtime__lte=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 6))
             for ob in ob:
                 uu.append(ob.mac)
             uu = list(set(uu))
             new = len(list(set(rr+uu)))-len(uu)
             old = len(rr)-new
             if not Newo.objects.filter(scenic_id=obj.pk,date=7):
                 Newo.objects.create(scenic=obj,xin=new,lao=old,date=7) 
             else:
                 cc = Newo.objects.filter(scenic_id=obj.pk,date=7).first()
                 cc.xin = new
                 cc.lao = old
                 cc.save()
             ###############一个月新老用户#####################
             tt = Client.objects.filter(scenic_id=obj.pk,statrtime__gte=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 29))
             rr = []
             uu = []
             for tt in tt:
                 rr.append(tt.mac)
             rr = list(set(rr+nn))
             ob = Client.objects.filter(scenic_id=obj.pk,statrtime__lt=datetime.datetime.strptime(today, "%Y-%m-%d")-datetime.timedelta(days = 29))
             for ob in ob:
                 uu.append(ob.mac)
             uu = list(set(uu))
             new = len(list(set(rr+uu)))-len(uu)
             old = len(rr)-new
             if not Newo.objects.filter(scenic_id=obj.pk,date=30):
                 Newo.objects.create(scenic=obj,xin=new,lao=old,date=30)
             else:
                 bb = Newo.objects.filter(scenic_id=obj.pk,date=30).first()
                 bb.xin = new
                 bb.lao = old
                 bb.save()
         today = datetime.datetime.today().strftime('%Y-%m-%d')
         if Census.objects.all() and Census.objects.order_by("date").last().date>datetime.datetime.strptime(today, "%Y-%m-%d"):
             cache.set('tztoday',scenic,12*60*60)
             cache.set('label',label,12*60*60)
         else:
             cache.delete('tztoday')
             cache.delete('label')
         aa.close()
         time.sleep(5*60)
コード例 #39
0
def delete(key):
    return cache.delete(key)
コード例 #40
0
    def get_data(self, request, **kwargs):
        """
        Returns the data for the page requested with the specified
        parameters applied

        filters: filter and action name, e.g. "outcome:build_succeeded"
        filter_value: value to pass to the named filter+action, e.g. "on"
        (for a toggle filter) or "2015-12-11,2015-12-12" (for a date range filter)
        """

        page_num = request.GET.get("page", 1)
        limit = request.GET.get("limit", 10)
        search = request.GET.get("search", None)
        filters = request.GET.get("filter", None)
        filter_value = request.GET.get("filter_value", "on")
        orderby = request.GET.get("orderby", None)
        nocache = request.GET.get("nocache", None)

        # Make a unique cache name
        cache_name = self.__class__.__name__

        for key, val in request.GET.iteritems():
            if key == 'nocache':
                continue
            cache_name = cache_name + str(key) + str(val)

        for key, val in kwargs.iteritems():
            cache_name = cache_name + str(key) + str(val)

        # No special chars allowed in the cache name apart from dash
        cache_name = re.sub(r'[^A-Za-z0-9-]', "", cache_name)

        if nocache:
            cache.delete(cache_name)

        data = cache.get(cache_name)

        if data:
            logger.debug("Got cache data for table '%s'" % self.title)
            return data

        self.setup_columns(**kwargs)

        if search:
            self.apply_search(search)
        if filters:
            self.apply_filter(filters, filter_value, **kwargs)
        if orderby:
            self.apply_orderby(orderby)

        paginator = Paginator(self.queryset, limit)

        try:
            page = paginator.page(page_num)
        except EmptyPage:
            page = paginator.page(1)

        data = {
            'total': self.queryset.count(),
            'default_orderby': self.default_orderby,
            'columns': self.columns,
            'rows': [],
            'error': "ok",
        }

        try:
            for row in page.object_list:
                #Use collection to maintain the order
                required_data = collections.OrderedDict()

                for col in self.columns:
                    field = col['field_name']
                    if not field:
                        field = col['static_data_name']
                    if not field:
                        raise Exception(
                            "Must supply a field_name or static_data_name for column %s.%s"
                            % (self.__class__.__name__, col))
                    # Check if we need to process some static data
                    if "static_data_name" in col and col['static_data_name']:
                        required_data[
                            "static:%s" %
                            col['static_data_name']] = self.render_static_data(
                                col['static_data_template'], row)

                        # Overwrite the field_name with static_data_name
                        # so that this can be used as the html class name

                        col['field_name'] = col['static_data_name']

                    # compute the computation on the raw data if needed
                    model_data = row
                    if col['computation']:
                        model_data = col['computation'](row)
                    else:
                        # Traverse to any foriegn key in the object hierachy
                        for subfield in field.split("__"):
                            if hasattr(model_data, subfield):
                                model_data = getattr(model_data, subfield)
                        # The field could be a function on the model so check
                        # If it is then call it
                        if isinstance(model_data, types.MethodType):
                            model_data = model_data()

                    required_data[col['field_name']] = model_data

                data['rows'].append(required_data)

        except FieldError:
            # pass  it to the user - programming-error here
            raise
        data = json.dumps(data, indent=2, default=objtojson)
        cache.set(cache_name, data, 60 * 30)

        return data
コード例 #41
0
ファイル: models.py プロジェクト: gopinath81/vmss
 def invalidate_cache_for_course(cls, course_key):
     """Invalidate the cache. """
     cache_key = cls.CACHE_KEY.format(course_key=course_key)
     cache.delete(cache_key)
     log.info("Invalidated country access list for course %s", course_key)
コード例 #42
0
 def cancel(self):
     cache.delete(self.version_key)
     newfiles = dict([(k, v) for (k, v) in self.files().items()
                      if v.get('owner') != 'bulk'])
     cache.set(self.file_key, newfiles)
コード例 #43
0
ファイル: core.py プロジェクト: DoTamKma/debug-edx-platform
def get_instance(model, instance_or_pk, timeout=None, using=None):
    """
    Returns the ``model`` instance with a primary key of ``instance_or_pk``.

    If the data is cached it will be returned from there, otherwise the regular
    Django ORM is queried for this instance and the data stored in the cache.

    If omitted, the timeout value defaults to
    ``settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT`` instead of 0 (zero).

    Example::

        >>> get_instance(User, 1) # Cache miss
        <User: lamby>
        >>> get_instance(User, 1) # Cache hit
        <User: lamby>
        >>> User.objects.get(pk=1) == get_instance(User, 1)
        True

    """
    primary_key = getattr(instance_or_pk, 'pk', instance_or_pk)
    key = instance_key(model, instance_or_pk)
    data = cache.get(key)

    if data is not None:
        try:
            # Try and construct instance from dictionary
            instance = model(pk=primary_key, **data)

            # Ensure instance knows that it already exists in the database,
            # otherwise we will fail any uniqueness checks when saving the
            # instance.
            instance._state.adding = False  # pylint: disable=protected-access

            # Specify database so that instance is setup correctly. We don't
            # namespace cached objects by their origin database, however.
            instance._state.db = using or DEFAULT_DB_ALIAS  # pylint: disable=protected-access

            return instance
        except:  # pylint: disable=bare-except
            # Error when deserialising - remove from the cache; we will
            # fallback and return the underlying instance
            cache.delete(key)

    # Use the default manager so we are never filtered by a .get_queryset()

    instance = model._default_manager.using(using).get(pk=primary_key)  # pylint: disable=protected-access

    data = {}
    for field in instance._meta.fields:
        # Harmless to save, but saves space in the dictionary - we already know
        # the primary key when we lookup
        if field.primary_key:
            continue

        if field.get_internal_type() == 'FileField':
            # Avoid problems with serializing FileFields
            # by only serializing the file name
            file_value = getattr(instance, field.attname)
            data[field.attname] = file_value.name
        else:
            data[field.attname] = getattr(instance, field.attname)

    if timeout is None:
        timeout = app_settings.CACHE_TOOLBOX_DEFAULT_TIMEOUT

    cache.set(key, data, timeout)

    return instance
コード例 #44
0
ファイル: pushlog.py プロジェクト: xeniorac/treeherder
    def run(self,
            source_url,
            repository_name,
            changeset=None,
            last_push_id=None):
        cache_key = '{}:last_push_id'.format(repository_name)
        if not last_push_id:
            # get the last object seen from cache. this will
            # reduce the number of pushes processed every time
            last_push_id = cache.get(cache_key)

        if not changeset and last_push_id:
            startid_url = "{}&startID={}".format(source_url, last_push_id)
            logger.info(
                "Extracted last push for '%s', '%s', from cache, "
                "attempting to get changes only from that point at: %s",
                repository_name, last_push_id, startid_url)
            # Use the cached ``last_push_id`` value (saved from the last time
            # this API was called) for this repo.  Use that value as the
            # ``startID`` to get all new pushes from that point forward.
            extracted_content = self.extract(startid_url)

            if extracted_content['lastpushid'] < last_push_id:
                # Push IDs from Mercurial are incremental.  If we cached a value
                # from one call to this API, and a subsequent call told us that
                # the ``lastpushid`` is LOWER than the one we have cached, then
                # the Mercurial IDs were reset.
                # In this circumstance, we can't rely on the cached id, so must
                # throw it out and get the latest 10 pushes.
                logger.warning(
                    "Got a ``lastpushid`` value of %s lower than the cached value of %s "
                    "due to Mercurial repo reset. Getting latest changes for '%s' instead",
                    extracted_content['lastpushid'], last_push_id,
                    repository_name)
                cache.delete(cache_key)
                extracted_content = self.extract(source_url)
        else:
            if changeset:
                logger.info(
                    "Getting all pushes for '%s' corresponding to "
                    "changeset '%s'", repository_name, changeset)
                extracted_content = self.extract(source_url + "&changeset=" +
                                                 changeset)
            else:
                logger.warning(
                    "Unable to get last push from cache for '%s', "
                    "getting all pushes", repository_name)
                extracted_content = self.extract(source_url)

        pushes = extracted_content['pushes']

        # `pushes` could be empty if there are no new ones since we last fetched
        if not pushes:
            return None

        last_push_id = max(map(int, pushes.keys()))
        last_push = pushes[str(last_push_id)]
        top_revision = last_push["changesets"][-1]["node"]

        errors = []
        repository = Repository.objects.get(name=repository_name)

        for push in pushes.values():
            if not push['changesets']:
                # A push without commits means it was marked as obsolete (see bug 1286426).
                # Without them it's not possible to calculate the push revision required for ingestion.
                continue

            try:
                store_push(repository, self.transform_push(push))
            except Exception:
                newrelic.agent.record_exception()
                errors.append({
                    "project": repository,
                    "collection": "result_set",
                    "message": traceback.format_exc()
                })

        if errors:
            raise CollectionNotStoredException(errors)

        if not changeset:
            # only cache the last push if we're not fetching a specific changeset
            cache.set(cache_key, last_push_id, ONE_WEEK_IN_SECONDS)

        return top_revision
コード例 #45
0
 def save(self, **kwargs):
     r = super(CachedItem, self).save(**kwargs)
     if hasattr(self, self.CACHE_PART) and getattr(self, self.CACHE_PART):
         key = self._get_cache_key(self._cache_key_part())
         cache.delete(key)
     return r
コード例 #46
0
ファイル: cache.py プロジェクト: devs1991/test_edx_docmode
 def delete(self, key):
     cache.delete(key)
コード例 #47
0
 def save_model(self, request, obj, form, change):
     obj.save()
     generate_static_index.delay()
     cache.delete('index_context_data')
コード例 #48
0
 def delete(self):
     key = self._get_cache_key(self._cache_key_part())
     cache.delete(key)
     return super(CachedItem, self).delete()
コード例 #49
0
 def remove(self):
     cache.delete(self.name)
コード例 #50
0
 def clear_cache(self, athlete_id):
     """Clear the session's cached results for a single tag."""
     cache.delete(('ts_%i_athlete_%i_results' % (self.id, athlete_id)))
コード例 #51
0
 def expired_reset_password_token(cls, token):
     key = cls.CACHE_KEY_USER_RESET_PASSWORD_PREFIX.format(token)
     cache.delete(key)
コード例 #52
0
 def delete_model(self, request, obj):
     obj.delete()
     generate_static_index.delay()
     cache.delete('index_context_data')
コード例 #53
0
 def delete(self, *args, **kwargs):
     with transaction.atomic():
         super().delete(*args, **kwargs)
         transaction.on_commit(
             lambda: cache.delete('user_has_positions:%d' % self.owner_id))
コード例 #54
0
 def expire_user_cache(self):
     key = self.user_cache_key_prefix.format(self.id)
     cache.delete(key)
コード例 #55
0
ファイル: models.py プロジェクト: xuxiao19910803/edx-platform
def invalidate_credit_courses_cache(sender, **kwargs):  # pylint: disable=unused-argument
    """Invalidate the cache of credit courses. """
    cache.delete(CreditCourse.CREDIT_COURSES_CACHE_KEY)
コード例 #56
0
 def save(self, *args, **kwargs):
     super(BaseBlock, self).save(*args, **kwargs)
     cache.delete('%s%s' % ('mezzanine_blocks', self.slug))
コード例 #57
0
ファイル: throttle.py プロジェクト: handshake/django-tastypie
 def tearDown(self):
     cache.delete('daniel_accesses')
     cache.delete('cody_accesses')
コード例 #58
0
 def invalidate(self, language=None):
     """Invalidate local and cache data."""
     self._data = {}
     cache.delete(self.cache_key)
コード例 #59
0
ファイル: base.py プロジェクト: zornil/jumpserver
 def expire_assets_amount(self):
     cache_key = self.ASSETS_AMOUNT_CACHE_KEY.format(self.id)
     cache.delete(cache_key)
コード例 #60
0
ファイル: models.py プロジェクト: xuxiao19910803/edx-platform
def invalidate_provider_cache(sender, **kwargs):  # pylint: disable=unused-argument
    """Invalidate the cache of credit providers. """
    cache.delete(CreditProvider.CREDIT_PROVIDERS_CACHE_KEY)