def decr_comments_count(sender, instance, **kwargs): from django.db.models import F from tweets.models import Tweet from util.redis_helper import RedisHelper Tweet.objects.filter(id=instance.tweet_id).update( comments_count=F('comments_count') - 1) RedisHelper.decr_count(instance.tweet, 'comments_count')
def incr_comments_count(sender, instance, created, **kwargs): from django.db.models import F from tweets.models import Tweet from util.redis_helper import RedisHelper if not created: return Tweet.objects.filter(id=instance.tweet_id).update( comments_count=F('comments_count') + 1) RedisHelper.incr_count(instance.tweet, 'comments_count')
def decr_likes_count(sender, instance, **kwargs): from django.db.models import F from tweets.models import Tweet from util.redis_helper import RedisHelper model_class = instance.content_type.model_class() if model_class != Tweet: # TODO: support likes for comments. return Tweet.objects.filter(id=instance.object_id).update( likes_count=F('likes_count') - 1) tweet = instance.content_object RedisHelper.decr_count(tweet, 'likes_count')
def task_thread_submit(self, item, redis_key, task_dict): """ 多线程处理数据,并对结果以邮箱名称分组 :param item: :param redis_key: :param task_dict: :return: """ try: if type(item) == str: item = json.loads(item) result_item = self.observer(item) """设置redis key值""" result_item["redis_key"] = redis_key user_name = result_item.get("userName") """按邮箱名分组""" if user_name in task_dict: task_dict[user_name].append(result_item) else: task_dict.setdefault(user_name, []) task_dict[user_name].append(result_item) except BusinessExcetion as e: self.logger.error("【服务进程】处理队列异常,触发回滚,异常信息为:%s" % e) RedisHelper().lpush_item(item, redis_key) except Exception as e: self.logger.error("【服务进程】读取任务队列异常,信息为:%s" % traceback.format_exc())
def task_thread_submit(self, task_queue_item): """ 多线程发送邮件 :param task_queue_item: :return: """ retCode = None retMsg = None """复制备份""" task_item = copy.deepcopy(task_queue_item) try: """确定当前服务器""" serverIp = task_queue_item.get("serverIp") if self.public_ip != serverIp: task_queue_item["serverIp"] = self.public_ip self.logger.info("【消费】读取到的队列项信息为:%s" % task_queue_item) """获取返回码""" result_item = self.observer(task_queue_item) retCode = result_item.get("send_status", "1") except BusinessExcetion as e: self.logger.error("【消费】读取任务队列异常,开始信息回滚,信息为:%s" % e) retCode = e.retCode retMsg = e.retMsg RedisHelper().lpush_item(task_item, task_item.get("redis_key")) except Exception as e: self.logger.error("【消费】读取任务队列异常,信息为:%s" % traceback.format_exc()) retCode = "9999" retMsg = "其他异常" finally: result = { "retCode": retCode, "retMsg": retMsg, "send_server": self.public_ip } self.logger.info("【消费】多线程发送邮件结果为:%s" % result)
def redis_queue_manager_helper(self, server_dict): """ 生产 :param server_dict: :return: """ try: redis_helper = RedisHelper() """添加任务""" for key, value in server_dict.items(): if not value: continue """生产数据""" [redis_helper.lpush_item(item, key) for item in value] except Exception as e: self.logger.error("【服务进程】创建服务进程异常,异常信息为:%s" % traceback.format_exc())
def update(self): """ push任务队列 :return: """ result = self.subject.email_list rel_id = self.subject.rel_id self.logger.info("【数据提取】4、PUSH任务队列") for item in result: status = item.get("status") """判断状态 如果是黑名单 或者地址无效""" if status == "4" or status == "1": continue level = item.get("level", "1") originalBatchCode = item.get("originalBatchCode") channel = "%s_%s_pagoda_%s" % (originalBatchCode, rel_id, level) RedisHelper().lpush_item(item, channel)
def get_comments_count(self, obj): return RedisHelper.get_count(obj, 'comments_count')
def get_likes_count(self, obj): return RedisHelper.get_count(obj, 'likes_count')
def push_newsfeed_to_cache(cls, newsfeed): queryset = Newsfeed.objects.filter( user_id=newsfeed.user_id).order_by('-created_at') key = USER_NEWSFEEDS_PATTERN.format(user_id=newsfeed.user_id) RedisHelper.push_object(key, newsfeed, queryset)
def get_cached_newsfeeds(cls, user_id): queryset = Newsfeed.objects.filter( user_id=user_id).order_by('-created_at') key = USER_NEWSFEEDS_PATTERN.format(user_id=user_id) return RedisHelper.load_objects(key, queryset)
def push_tweet_to_cache(cls, tweet): queryset = Tweet.objects.filter( user_id=tweet.user_id).order_by('-created_at') key = USER_TWEETS_PATTERN.format(user_id=tweet.user_id) return RedisHelper.push_object(key, tweet, queryset)
def get_cached_tweets(cls, user_id): queryset = Tweet.objects.filter( user_id=user_id).order_by('-created_at') key = USER_TWEETS_PATTERN.format(user_id=user_id) return RedisHelper.load_objects(key, queryset)