def handle(self, **options): if options["full"]: commands = [ ("import_elections",), ("import_posts",), ("import_parties",), ("import_people",), ("import_cvs",), ] else: commands = [("import_people", "--recent")] for command in commands: print(" ".join(command)) call_command(*command) if options["full"]: # Delete the cache on a full import if hasattr(cache, "delete_pattern"): for fmt in ( POLLING_STATIONS_KEY_FMT, POSTCODE_TO_BALLOT_KEY_FMT, PEOPLE_FOR_BALLOT_KEY_FMT, ): cache.delete_pattern(fmt.format("*")) # Unset dirty file if it exists if getattr(settings, "CHECK_HOST_DIRTY", False): dirty_file_path = os.path.expanduser( getattr(settings, "DIRTY_FILE_PATH") ) if os.path.exists(dirty_file_path): os.remove(dirty_file_path)
def invalidate_easy_cache(source_image): keys = [] if source_image and isinstance(source_image, six.string_types): keys = [get_cache_key(source_image + '*', 'source'), get_cache_key(source_image + '*', 'thumbnail')] cache.delete_pattern(keys[0]) cache.delete_pattern(keys[1])
def perform_update(self, serializer): user = self.get_object() username = user.username if user else '' key_limit = self.key_prefix_limit.format(username, '*') key_block = self.key_prefix_block.format(username) cache.delete_pattern(key_limit) cache.delete(key_block)
def _invalidate_container_cache(self): # invalidate cache key = CONTAINER_KEY.format(self.name) try: cache.delete_pattern('*{0}*'.format(key)) except: # ignore cache bust errors pass
def invalidate_cache(self): ''' Remove any cached values associated with this scenario. Warning: additional caches will need to be added to this method ''' if not self.id: return True # depends on django-redis as the cache backend!!! # assumes that all caches associated with this scenario contain <uid>_* key_pattern = "%s_*" % self.uid cache.delete_pattern(key_pattern) # remove the xml file try: os.remove(self.mapnik_xml_path) except OSError: pass # delete the tiles directly (and any remaining ) [redisconn.delete(x) for x in redisconn.keys(pattern="*%s*" % self.uid)] # remove the PlanningUnitShapes PlanningUnitShapes.objects.filter(stamp=self.id).delete() return True
def cache_delete_pattern_or_all(pattern): # clear only cached pages if supported if hasattr(cache, 'delete_pattern'): cache.delete_pattern(pattern) # otherwise clear the entire cache else: cache.clear()
def expire_nodes_full_value(cls, nodes=None): if nodes: for node in nodes: node.expire_full_value() return key = cls._full_value_cache_key.format('*') cache.delete_pattern(key+'*')
def viewcountupdate(): """ This task is being executed hourly and pushed the cached view counters into the database using a single transaction """ # This is the prefix we are going to use to distinguish the cache keys # we need for the view counters PREFIX = settings.CACHE_PREFIX logger.warn('Entering viewcountupdate...') with redis_cache.lock('lock'): keys = redis_cache.keys(PREFIX + "*") if keys: try: with transaction.atomic(): for key in keys: post_id = key.split('_')[1] hourly_viewcount = redis_cache.get(key) try: post = Posts.objects.get(id=post_id) except ObjectDoesNotExist: continue; old_viewcount = post.viewcount post.viewcount = post.viewcount + hourly_viewcount new_viewcount = post.viewcount logger.warn('Updated: id = {0}. Oldcount = {1} -> Newcount = {2} '.format(post_id, old_viewcount, new_viewcount)) post.save(update_fields=['viewcount']) except IntegrityError: logger.warn('Rolling back...') transaction.rollback() redis_cache.delete_pattern(PREFIX + "*") logger.warn('Exiting viewcountupdate...')
def expire_nodes_assets_amount(cls, nodes=None): if nodes: for node in nodes: node.expire_assets_amount() return key = cls._assets_amount_cache_key.format('*') cache.delete_pattern(key)
def invalidate_language_list_cache(sender, instance, **kwargs): # XXX: maybe use custom signals or simple function calls? if instance.__class__.__name__ not in ['Language', 'TranslationProject']: return key = make_method_key('LiveLanguageManager', 'cached_dict', '*') cache.delete_pattern(key)
def clear_cache(sender, **kwargs): # clear only cached pages if supported if hasattr(cache, 'delete_pattern'): cache.delete_pattern('views.decorators.cache.cache*') # otherwise clear the entire cache else: cache.clear()
def _run_job(self, job): ret = job.run() # 缓存job最新的执行时间 for key in cache.keys("qa_paltform_loop_jobs_*"): old_job = pickle.loads(cache.get(key)) if job.task_id == old_job.task_id: cache.delete_pattern(key) cache.set(key, pickle.dumps(job), timeout=None)
def purge_related(key): # settings.TTLCACHE.clear() '''Remove all cached keys containing uuids present in cache_key''' key_pieces = key.split(':') for key_piece in key_pieces: valid_uuid = ValidateHandeler.is_valid_uuid(key_piece) if valid_uuid is not None: cache.delete_pattern(f'*{valid_uuid}*')
def save(self, *args, **kwargs): if self.pk: # invalidate to old cache orig_obj = ProjectGroup.objects.get(pk=self.pk) cache.delete_pattern('project_group_%s' % orig_obj.group_name) group = super(ProjectGroup, self).save(*args, **kwargs) self.add_projects_to_cache() return group
def release_lock(user: Model, resource: Model) -> None: key = f'lock:{resource.pk}' user_that_locked = cache.get(key) if user_that_locked: if user.pk == user_that_locked: cache.delete_pattern(key) else: raise UserDoesNotOwnTheLock('Este recurso no fue bloqueado por el usuario actual.')
def set_key_data_empty_on_update(cls, key): """ method to remove the response cache object if related model signal is added and remove cache. """ if cls.cache_keys in key: cache.delete_pattern(key+"*") return True return False
def invalidate_language_list_cache(**kwargs): instance = kwargs["instance"] # XXX: maybe use custom signals or simple function calls? if instance.__class__.__name__ not in ['Language', 'TranslationProject']: return key = make_method_key('LiveLanguageManager', 'cached_dict', '*') cache.delete_pattern(key)
def invalidate_cache(self): """ Method ``ProductCommonSerializer.render_html()`` caches the rendered HTML snippets. Invalidate this HTML snippet after changing relevant parts of the product. """ shop_app = apps.get_app_config('shop') if shop_app.cache_supporting_wildcard: cache.delete_pattern('product:{}|*'.format(self.id))
def invalidate_accessible_projects_cache(sender, instance, **kwargs): # XXX: maybe use custom signals or simple function calls? if (instance.__class__.__name__ not in ['Project', 'TranslationProject', 'PermissionSet']): return cache.delete_pattern(make_method_key('Project', 'cached_dict', '*')) cache.delete('projects:all') cache.delete_pattern('projects:accessible:*')
def invalidate_cache(self, product): """ The method ``ProductCommonSerializer.render_html`` caches the rendered HTML snippets. Invalidate them after changing something in the product. """ try: cache.delete_pattern('product:{}|*'.format(product.id)) except AttributeError: pass
def expire_cache(self): """ 因为 获取用户的节点,资产,系统用户等都能会缓存,这里会清理所有与该对象有关的 缓存,以免造成不统一的情况 :return: """ key = self.CACHE_KEY.format(str(self.object.id), '*') cache.delete_pattern(key) self.expire_cache_meta()
def get(self, request, token): user = cache.get(token) if user: user.save() cache.delete_pattern(token) return Response({"message": "Created"}, status=200) else: return Response({"message": "Your activation token is expired"}, status=404)
def clear_caches_view(request): """ 清除全部缓存 """ try: cache.delete_pattern("tmp_*") return http_response(request, statuscode=ERRORCODE.SUCCESS) except Exception as exp: return http_response(request, statuscode=ERRORCODE.FAILED, msg=exp)
def cancelJob(self, task_id): # print("尝试停止任务") with cache.lock("qa_test_platform_cancel"): for key in cache.keys("qa_paltform_loop_jobs_*"): job = pickle.loads(cache.get(key)) # print(job.task_id) if job.task_id == task_id: # print("删除id,",job.task_id) cache.delete_pattern(key)
def exec(name, jobs): logger.info('job {} begin...'.format(name)) for job in jobs: logger.info('site job {} begin...'.format(job.site.title)) request_config = json.loads(job.request_config) try: resp = requests.request(request_config.get('method', 'get'), request_config['url'], headers=request_config.get( 'headers', default_headers), json=request_config.get('json', None), timeout=100) except Exception as e: logger.error('site job {} request error: {}.'.format( job.site.title, str(e))) continue items = [] try: exec(job.extract_code) except Exception as e: logger.error('site job {} extract error: {}.'.format( job.site.title, str(e))) continue length = len(items) for r_index, i in enumerate(reversed(items)): index = length - r_index - 1 item = Item.objects.filter(site_id=job.site.id, title=i['title']).first() if item and item.priority == index: continue else: Item.objects.filter(site_id=job.site.id, priority=index).update(priority=-1) if item and item.priority != index: item.priority = index item.save() else: Item.objects.create( site_id=job.site.id, priority=index, title=i['title'], url=i['url'], ) Item.objects.filter(site_id=job.site.id, priority__gte=length).update(priority=-1) logger.info('site job {} end.'.format(job.site.title)) cache.delete_pattern('*sites_items_view*') logger.info('job {} end.'.format(name))
def expire_cache(self): """ 因为 获取用户的节点,资产,系统用户等都能会缓存,这里会清理所有与该对象有关的 缓存,以免造成不统一的情况 :return: """ cache_key = self.CACHE_KEY_PREFIX + '{obj_id}_*' key = cache_key.format(obj_id='*') cache.delete_pattern(key) self.expire_cache_meta()
def clear_cache_on_save(sender, instance, **kwargs): """ Clean current story from cache. """ if getattr(instance, '_dirty', None) is not None: return # ckey = # 'story.%s' % instance.short_url cache.delete_pattern('%s*' % get_cache_key(instance)) logger.debug('story@pre_save {pk:%s, short_url:%s} cache deleted.' % (instance.pk, instance.short_url))
def destroy(self, request, *args, **kwargs): instance = self.get_object() ip = instance.ip list = cache.get('iplist') num = list.index(ip) list.pop(num) cache.set('iplist', list, timeout=None) cache.delete_pattern(ip) self.perform_destroy(instance) return Response(status=status.HTTP_204_NO_CONTENT)
def put(self, request, *args, **kwargs): slug = request.data['slug'] if self.queryset.filter(slug=slug).exists(): return Response(status=status.HTTP_409_CONFLICT) logger.info( 'class FilmeDetailSlug - dentro do metodo PUT reseta cache key filme' ) cache.delete_pattern("*filme*") return self.update(request, *args, **kwargs)
def installPluginTheme(sender, instance, created, *args, **kwargs): if hasattr(cache, 'delete_pattern'): cache.delete_pattern("*core_init*") if 'active' in instance.get_dirty_fields(): if instance.active: # TODO: Install the Theme/Plugin, Maybe Run Migration or anything like this obj = sender.objects.get(pk=instance.pk) obj.current_version = obj.version obj.save()
def expire_all_user_tree_cache(cls): expire_cache_key = "USER_TREE_EXPIRED_AT" latest_expired = cache.get(expire_cache_key, 0) now = time.time() if now - latest_expired < 60: return key = cls.user_tree_cache_key.format('*', '1', '1') key = key.replace('_1', '') cache.delete_pattern(key) cache.set(expire_cache_key, now)
def blog_edit_view(request, item_id): """ 博客编辑 :param request: :return: """ article = Article.objects.filter(id=item_id).first() if not article: messages.warning(request, "此博客不存在") return HttpResponseRedirect(reverse('blog_list')) selected_tags = article.get_tags() auhtors = Author.objects.values("id", "name") classifications = Classification.objects.values("id", "name") tags = Tag.objects.values("id", "name") context = { "active_classes": ['.blog', '.blog_list'], "article": article, "auhtors": auhtors, "classifications": classifications, "tags": tags, "selected_tags": selected_tags, "blog_status": BlogStatus.CHOICES, "item_id": item_id } if request.method == "GET": return render(request, "manager/edit_blog.html", context) if request.method == "POST": form = OperateBlogForm(request.POST) if not form.is_valid(): messages.warning(request, "</br>".join(form_error(form))) return HttpResponseRedirect(reverse('blog_list')) try: new_tags = request.POST.getlist('tags') article.set_tags(new_tags) Article.objects.filter(id=item_id).update( title=form.cleaned_data.get("title"), author_id=form.cleaned_data.get("author"), classification_id=form.cleaned_data.get("classification"), content=form.cleaned_data.get("content"), count=form.cleaned_data.get("count"), status=form.cleaned_data.get("status"), editor=form.cleaned_data.get("editor"), last_update=datetime.now() ) messages.success(request, '修改成功') cache.delete_pattern("tmp_articles") # 清除缓存 cache.delete_pattern("tmp_archive") # 清除缓存 return HttpResponseRedirect(reverse('blog_list')) except Exception as ex: messages.warning(request, ex) return HttpResponseRedirect(reverse('blog_list'))
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.pk: self.slug = get_unique_slug(type(self), self.title) cache.delete_pattern("*:map_view:" + self.slug) cache.delete_pattern("*:maps") super().save(force_insert, force_update, using, update_fields)
def _decorator(task, *args, **kwargs): access = task.access if nps is None: cache.delete_pattern('/interviewer/%s/%s*'%(access.interviewer.pk, store)) print 'refreshed cache for ', args, ' store: ', store else: for np in nps: cache.delete_pattern('/interviewer/%s/%s/%s'%(access.interviewer.pk, store, np)) print 'refreshed cache for ', args, ' store: ', store, 'pattern: ', np result = func(task, *args, **kwargs) return result
def test_acquire_lock_success(fake_user, fake_resource): # pylint: disable=redefined-outer-name key = f'lock:{fake_resource.pk}' # Intenta adquirir el lock acquire_lock(fake_user, fake_resource) # Verifica que se obtuvo con éxito assert cache.get(key) == fake_user.pk # Cleanup cache.delete_pattern(key)
def save(self, *args, **kwargs): # self.pk = 1 super(SingletonModel, self).save(*args, **kwargs) name = "{}*".format(self.__class__.__name__.lower()) status = cache.delete_pattern(name) log.warning("Delete {}, Status: {}".format(name, status)) if isinstance(self, Group): log.warning("Delete Setup") log.warning( cache.delete_pattern("{}*".format(Setup.__name__.lower())))
def post(self, request, *args, **kwargs): accountName = request.POST.get("accountName") if accountName: if accountName == "全部": cache.delete_pattern("all_domain") else: cache.delete_pattern("domainName_{}".format(accountName)) code = 0 else: code = 1 return JsonResponse({"code": code})
def test_delete_pattern(self): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: cache.set(key, "foo") res = cache.delete_pattern("*foo-a*") assert bool(res) keys = cache.keys("foo*") assert set(keys) == set(["foo-bb", "foo-bc"]) res = cache.delete_pattern("*foo-a*") assert not bool(res)
def add_tag_view(request): """ 添加文章标签 """ try: Tag.objects.create(name=request.POST.get('name')) messages.success(request, u'添加成功') cache.delete_pattern('tmp_tags') # 清除缓存 return HttpResponseRedirect(reverse('tag_list')) except Exception as e: messages.error(request, u'添加失败: %s' % e) return HttpResponseRedirect(reverse('tag_list'))
def save(self, **kwargs): """ Check self.project exists. :return object: """ if self.project_id is None: raise ImproperlyConfigured( "Project foreign key field must be set in implementing class.") if hasattr(cache, 'delete_pattern'): cache.delete_pattern('project:%s:*' % self.project_id) super(ProjectContext, self).save(**kwargs)
def test_view_rate_limit(self, rf): try: remote_addr = '8.8.8.8' for i in range(5): request = rf.post(self.url, REMOTE_ADDR=remote_addr) SlackInvite.as_view()(request) request = rf.post(self.url, REMOTE_ADDR=remote_addr) response = SlackInvite.as_view()(request) assert response.status_code == 429 finally: cache.delete_pattern(f'{settings.RATELIMIT_CACHE_PREFIX}*')
def test_cache_page_get(rf, fake_url, fake_view): # pylint: disable=redefined-outer-name,invalid-name decorated_view = cache_page(60 * 60)(fake_view) request = rf.get(fake_url) test_user = User('foo', '*****@*****.**', 'bar') request.user = test_user key = f'cache-page:{request.user}:{request.method}:{slugify(request.get_full_path())}' response = decorated_view(request) assert cache.get(key) == response.content # Cleanup cache.delete_pattern(key)
def test_acquire_lock_raises(fake_user, fake_resource): # pylint: disable=redefined-outer-name key = f'lock:{fake_resource.pk}' # Crea un lock a mano cache.set(key, fake_user.pk) # Verifica que no se pueda obtener si ya está bloqueado with pytest.raises(ResourceAlreadyBlocked): acquire_lock(fake_user, fake_resource) # Cleanup cache.delete_pattern(key)
def test_release_lock_raises(fake_user, fake_resource): # pylint: disable=redefined-outer-name key = f'lock:{fake_resource.pk}' # Crea un lock a mano cache.set(key, fake_user.pk) # Verifica que no se pueda liberar si el usuario es diferente fake_user.pk = '789' with pytest.raises(UserDoesNotOwnTheLock): release_lock(fake_user, fake_resource) # Cleanup cache.delete_pattern(key)
def test_view_rate_limit(self, rf): """""" try: remote_addr = '8.8.8.8' for i in range(10): request = rf.post(self.url, REMOTE_ADDR=remote_addr) SlackInvite.as_view()(request) request = rf.post(self.url, REMOTE_ADDR=remote_addr) with pytest.raises(Ratelimited): SlackInvite.as_view()(request) finally: cache.delete_pattern(f'{settings.RATELIMIT_CACHE_PREFIX}*')
def monkey_patch_settings(sender, **kwargs): cache_key_prefix = '_SETTING_' custom_need_cache_settings = [ 'AUTHENTICATION_BACKENDS' ] custom_no_cache_settings = [ 'BASE_DIR', 'VERSION', 'AUTH_OPENID' ] django_settings = dir(global_settings) uncached_settings = [i for i in django_settings if i.isupper()] uncached_settings = [i for i in uncached_settings if not i.startswith('EMAIL')] uncached_settings = [i for i in uncached_settings if not i.startswith('SESSION_REDIS')] uncached_settings = [i for i in uncached_settings if i not in custom_need_cache_settings] uncached_settings.extend(custom_no_cache_settings) def monkey_patch_getattr(self, name): if name not in uncached_settings: key = cache_key_prefix + name cached = cache.get(key) if cached is not None: return cached if self._wrapped is empty: self._setup(name) val = getattr(self._wrapped, name) return val def monkey_patch_setattr(self, name, value): key = cache_key_prefix + name cache.set(key, value, None) if name == '_wrapped': self.__dict__.clear() else: self.__dict__.pop(name, None) super(LazySettings, self).__setattr__(name, value) def monkey_patch_delattr(self, name): super(LazySettings, self).__delattr__(name) self.__dict__.pop(name, None) key = cache_key_prefix + name cache.delete(key) try: cache.delete_pattern(cache_key_prefix+'*') LazySettings.__getattr__ = monkey_patch_getattr LazySettings.__setattr__ = monkey_patch_setattr LazySettings.__delattr__ = monkey_patch_delattr Setting.refresh_all_settings() except (ProgrammingError, OperationalError): pass
def love_update(): keys = cache.get(CACHE_KEY_VIDEO_KEYS) if keys is None: return False for row in keys: video = Video.objects.get(id=row) cacheKey = "%s_love_click_id_%s" % (CACHE_KEY_VIDEO, row) data = cache.get(cacheKey) if data['love'] >= 1: video.love += data['love'] video.love video.save() cache.delete(cacheKey) cache.delete(CACHE_KEY_VIDEO_KEYS) cache.delete_pattern(CACHE_KEY_VIDEO_LIST + "*")#clear list cache return True
def full_invalidate_cache(self): if settings.CACHE_ENABLED: prop_keys = [] meth_keys = [] for c in type(self).mro(): for k, v in c.__dict__.items(): if isinstance(v, CachedProperty): prop_keys.append(k) elif hasattr(v, '__is_cached_method__'): meth_keys.append(k) for attr_name in set(prop_keys): self.clean_cached_property(attr_name) for attr_name in set(prop_keys): self.invalidate_cached_property(attr_name, delete=False) for attr_name in set(meth_keys): cache.delete_pattern(settings.CACHED_METHOD_KEY_TEMPLATE.format( get_class_path(type(self)), attr_name, self.pk) + '*') for cls_name, func_name in self.cached_views: cache.delete_pattern(settings.CACHED_VIEW_PARTIAL_TEMPLATE_PREFIX.format(cls_name, func_name, self.pk) + '*')
def persist_cached(self, cache_pattern=None): """ Fetches all requests stored in the cache and push them to the database. Returns the persisted requests. """ created = [] if settings.REQUEST_USE_CACHE: # Set default cache pattern if not given if not cache_pattern: cache_pattern = '%s*' % settings.REQUEST_CACHE_PREFIX # Get all requests from cache requests_keys = cache.keys(cache_pattern) requests_dict = cache.get_many(requests_keys) requests = requests_dict.values() # Persist all requests to database created = self.bulk_create(requests) # Clear requests cache cache.delete_pattern(cache_pattern) return created
def remove_cache_and_lock_keys(apps, schema_editor): try: # clear redis cache and locks to allow the next task to fetch all the contacts cache.delete_pattern('last:fetch_contacts:*') cache.delete_pattern('fetch_contacts') cache.delete_pattern('fetch_contacts*') print "Removed all cache and lock keys for fetch contacts" except AttributeError as e: print e
def remove_cache_and_lock_keys(apps, schema_editor): try: # clear redis cache and locks to allow the next task to fetch all the contacts cache.delete_pattern("last:fetch_contacts:*") cache.delete_pattern("fetch_contacts") cache.delete_pattern("fetch_contacts*") logger.info("Removed all cache and lock keys for fetch contacts") except AttributeError as e: logger.info(e)
def handle(self, *args, **options): cache.delete_pattern("*") self.stdout.write("All cache are cleared.")
def invalidate_portfolio_cache(sender, **kwargs): cache.delete_pattern("portfolio.*")
def delete_cache(): cache.delete_pattern("bell_schedule:*") logger.debug("Deleted bell schedule cache.")
def _invalidate_container_cache(self): # invalidate cache key = CONTAINER_KEY.format(self.name) cache.delete_pattern('*{0}*'.format(key))
def expire_full_value(self): key = self._full_value_cache_key.format(self.key) cache.delete_pattern(key+'*')