def find_group(channel): cache_key = CHANNEL_LIMITER % channel logger.info("find_group cache_key: %s" % cache_key) # cache_key:http://download.52xuexi.net_limiter group = '' rate = '' category = '' Bbase = '' Balarm = '' Bhard = '' Bgrade = '' Bpolice = '' cache = REDIS_CONNECT_0.get(cache_key) ''' ---(2018-06-29 16:15:31.964469)cache--- b'{"category": "e1,e2,e3,e4", "rate": 800.0, "Bpolice": 1.2, "user": "******", "Bhard": 1.0, "_id": "599ab249d101b441a84bb1a5", "Bbase": 170.0, "Bgrade": 3, "Balarm": 0.75}' ''' if cache: data = json.loads(cache) group = data["_id"] rate = data["rate"] category = data["category"] user = data["user"] Bbase = data["Bbase"] Balarm = data["Balarm"] Bhard = data["Bhard"] Bgrade = data["Bgrade"] Bpolice = data["Bpolice"] if group and rate and category and user: return group, rate, category, user, Bbase, Balarm, Bhard, Bgrade, Bpolice result = limiter_conf.find({'channels': channel}, {'rate': 1, '_id': 1, 'category': 1, 'user': 1, 'Bbase': 1, 'Balarm': 1, 'Bhard': 1, 'Bgrade': 1, 'Bpolice': 1}) logger.info("find_group result.count(): %s" % result.count()) if not result or result.count() == 0: logger.info("find_group [channel not found.]: %s" % channel) return None, None, None, None, None, None, None, None, None for line in result: group = "rate_%s" % line["_id"] rate = line["rate"] category = line["category"] user = line["user"] Bbase = line["Bbase"] Balarm = line["Balarm"] Bhard = line["Bhard"] Bgrade = line["Bgrade"] Bpolice = line["Bpolice"] logger.info('find_group [config] group: %s|| rate: %s|| category: %s|| user: %s|| Bbase: %s|| Balarm: %s|| Bhard: %s|| Bgrade: %s|| Bpolice: %s' % (group, rate, category, user, Bbase, Balarm, Bhard, Bgrade, Bpolice)) cache = {} cache["_id"] = group cache["rate"] = rate cache["category"] = category cache["user"] = user cache["Bbase"] = Bbase cache["Balarm"] = Balarm cache["Bhard"] = Bhard cache["Bgrade"] = Bgrade cache["Bpolice"] = Bpolice REDIS_CONNECT_0.set(cache_key, json.JSONEncoder().encode(cache)) # json.dumps(cache) REDIS_CONNECT_0.expire(CHANNEL_LIMITER, CACHE_TTL) return group, rate, category, user, Bbase, Balarm, Bhard, Bgrade, Bpolice
def del_config_cache(channel, category): ''' 删除配置 ''' try: _key = get_channel_cache_key(channel, category) REDIS_CONNECT_0.delete(_key) except Exception as e: logger.info('del_config_cache[error]: %s' % (traceback.format_exc(e), ))
def del_limiter_cache(channels, category): ''' 删除limiter配置 ''' try: for channel in channels: _key = "%s_limiter_%s" % (channel, category) _key1 = "%s_limiter" % channel logger.info("del_limiter_cache _key: %s" % _key) REDIS_CONNECT_0.delete(_key) REDIS_CONNECT_0.delete(_key1) except Exception as e: logger.info('del_limiter_cache[error]: %s' % (traceback.format_exc(e), ))
def add_config_cache(channel, category, conf): ''' 配置缓存加入 ''' try: for k, v in conf.items(): if isinstance(v, datetime.datetime): conf[k] = datetime.datetime.strftime(v, '%Y-%m-%dT%H:%M:%S') if k == '_id': conf[k] = str(v) _key = get_channel_cache_key(channel, category) REDIS_CONNECT_0.hmset(_key, conf) except Exception as e: logger.info('add_config_cache[error]: %s' % (traceback.format_exc(e), ))
def collect_dataset(group): # 首先判断限速方向,下层13方向为一组,24方向为一组,一组出现一个就比较大小取最大的为最终的和.最终sum计算出来都是一个值 # 所有设备当前速率的集合 current_set = [] # 所有设备历史速率方差的集合 variance_set = [] for line_group in group: every_current_set = [] every_variance_set = [] group_data = REDIS_CONNECT_0.smembers(line_group) # logger.info("group_data: %s" % group_data) for line in group_data: node = REDIS_CONNECT_0.get(line) if node != None: every_current_set.append(json.loads(node)['current']) every_variance_set.append(json.loads(node)['variance']) current_set.append(every_current_set) variance_set.append(every_variance_set) # logger.info("current_set: %s, variance_set: %s" % (current_set, variance_set, )) return current_set, variance_set
def get_config_cache(channel, category): ''' 获取配置 ''' res = None try: _key = get_channel_cache_key(channel, category) res = REDIS_CONNECT_0.hgetall(_key) except Exception as e: logger.info('get_config_cache[error]: %s' % (traceback.format_exc(e), )) return res
def add_limiter_cache(channels, category, info): ''' 添加limiter cache缓存 ''' try: for channel in channels: _key = "%s_limiter_%s" % (channel, category) REDIS_CONNECT_0.set(_key, json.dumps(info)) REDIS_CONNECT_0.expire(_key, 604800) _key1 = "%s_limiter" % channel REDIS_CONNECT_0.set(_key1, json.dumps(info)) except Exception as e: logger.info('add_limiter_cache[error]: %s' % (traceback.format_exc(e), ))
def update_statistics(group, channel, dev, rate, category): key = CHANNEL_DEV % (channel, dev) REDIS_CONNECT_0.sadd(group, key) data = REDIS_CONNECT_0.get(key) ''' rate: {'e4': 0, 'e1': 0, 'e3': 0, 'e2': 0} key: 'http://download.52xuexi.net_CHN-JP-b' ---(2018-06-29 18:25:48.612896)data--- b'{"current": {"e4": 0.0, "e1": 0.0, "e3": 0.0, "e2": 0.0}, "variance": {"e4": 0.0, "e1": 0.0, "e3": 0.0, "e2": 0.0}, "previous": {"e4": [0.0, 0.0], "e1": [0.0, 0.0], "e3": [0.0, 0.0], "e2": [0.0, 0.0]}}' ''' logger.info("update_statistics [before data]: %s" % data) need_data = {} e_dict = {} count = 2 category_list = delete_in_out(category) logger.info("update_statistics [directions category_list]: %s" % category_list) if data: data = json.loads(data) for k, v in data.items(): if k == 'previous': for line in category_list: v[line][1] = v[line][0] v[line][0] = data["current"][line] current_init = C1 * v[line][0] + C2 * v[line][1] # 修改current的值 if line in rate.keys(): if rate[line] == '': # 什么时候是空字符串?初始化? data["current"][line] = current_init # 0.7的当前带宽+0.3的上一次 e_dict[line] = current_init else: data["current"][line] = float(rate[line]) e_dict[line] = float(rate[line]) else: data["current"][line] = current_init else: data = {} data["current"] = {} data["previous"] = {} index = 0 # 计算current for i in range(1, 5): every_previous_dict = {} direction_index = "e%s" % i if direction_index in rate.keys(): if rate[direction_index] == "": data["current"][direction_index] = 0.0 data["previous"][direction_index] = [0.0, 0.0] else: data["current"][direction_index] = float(rate[direction_index]) data["previous"][direction_index] = [float(rate[direction_index]), float(rate[direction_index])] else: data["current"][direction_index] = 0.0 data["previous"][direction_index] = [0.0, 0.0] if direction_index in category_list: e_dict[direction_index] = data["current"][direction_index] logger.info("update_statistics [after] data: %s" % data) need_data["current"] = e_dict variance_data = variance(data["previous"], category_list) data["variance"] = variance_data need_data["variance"] = variance_data logger.info("update_statistics need_data: %s" % need_data) REDIS_CONNECT_0.set(key, json.dumps(data)) REDIS_CONNECT_0.expire(key, CACHE_TTL) return need_data