def tm(): import json if request.method == "GET": id = request.args.get("id") key = "movie" + str(id) if redis.llen(key): msgs = redis.lrange(key, 0, 2999) res = {"code": 1, "danmaku": [json.loads(v) for v in msgs]} else: res = {"code": 1, "danmuku": []} resp = json.dumps(res) if request.method == "POST": data = json.loads(request.get_data()) msg = { "__v": 0, "author": data["author"], "time": data["time"], "text": data["text"], "color": data["color"], "type": data["type"], "ip": request.remote_addr, "_id": datetime.now().strftime("%Y%m%d%H%M%S") + uuid4().hex, "player": [data["player"]] } res = {"code": 1, "data": msg} resp = json.dumps(res) redis.lpush("movie" + str(data["player"]), json.dumps(msg)) return Response(resp, mimetype="application/jsn")
def get_dict_from_list(name, count=-1): items = redis.lrange(name, 0, count) ret = [] for id in items: item = redis.hgetall(id) item["job_id"] = id ret.append(item) return ret
def _usagecapacity(service): """calculate the current usage of the service.""" usage_gpu = 0 usage_cpu = 0 capacity_gpus = 0 capacity_cpus = 0 busy = 0 detail = {} servers = service.list_servers() for resource in service.list_resources(): detail[resource] = {'busy': '', 'reserved': ''} r_capacity = service.list_resources()[resource] detail[resource]['capacity'] = r_capacity capacity_gpus += r_capacity detail[resource]['ncpus'] = servers[resource]['ncpus'] capacity_cpus += servers[resource]['ncpus'] reserved = redis.get("reserved:%s:%s" % (service.name, resource)) if reserved: detail[resource]['reserved'] = reserved count_map_gpu = Counter() task_type = {} count_map_cpu = {} count_used_gpus = 0 count_used_cpus = 0 r_usage_gpu = redis.hgetall("gpu_resource:%s:%s" % (service.name, resource)).values() for t in r_usage_gpu: task_type[t] = redis.hget("task:%s" % t, "type") count_map_gpu[t] += 1 count_used_gpus += 1 if t not in count_map_cpu: count_map_cpu[t] = int(redis.hget("task:%s" % t, "ncpus")) count_used_cpus += count_map_cpu[t] r_usage_cpu = redis.lrange("cpu_resource:%s:%s" % (service.name, resource), 0, -1) for t in r_usage_cpu: task_type[t] = redis.hget("task:%s" % t, "type") if t not in count_map_cpu: count_map_cpu[t] = int(redis.hget("task:%s" % t, "ncpus")) count_map_gpu[t] = 0 count_used_cpus += count_map_cpu[t] detail[resource]['usage'] = ["%s %s: %d (%d)" % (task_type[k], k, count_map_gpu[k], count_map_cpu[k]) for k in count_map_gpu] detail[resource]['avail_cpus'] = int(redis.get("ncpus:%s:%s" % (service.name, resource))) detail[resource]['avail_gpus'] = r_capacity-count_used_gpus err = redis.get("busy:%s:%s" % (service.name, resource)) if err: detail[resource]['busy'] = err busy = busy + 1 usage_cpu += count_used_cpus usage_gpu += count_used_gpus queued = redis.llen("queued:"+service.name) return ("%d (%d)" % (usage_gpu, usage_cpu), queued, "%d (%d)" % (capacity_gpus, capacity_cpus), busy, detail)
def tm(): """ 弹幕消息处理 """ import json if request.method == "GET": # 获取弹幕消息队列 id = request.args.get('id') # 存放在redis队列中的键值 key = "movie" + str(id) if redis.llen(key): msgs = redis.lrange(key, 0, 2999) res = { "code": 1, "danmaku": [json.loads(v) for v in msgs] } else: res = { "code": 1, "danmaku": [] } resp = json.dumps(res) if request.method == "POST": # 添加弹幕 data = json.loads(request.get_data()) msg = { "__v": 0, "author": data["author"], "time": data["time"], "text": data["text"], "color": data["color"], "type": data['type'], "ip": request.remote_addr, "_id": datetime.datetime.now().strftime("%Y%m%d%H%M%S") + uuid.uuid4().hex, "player": [ data["player"] ] } res = { "code": 1, "data": msg } resp = json.dumps(res) # 将添加的弹幕推入redis的队列中 redis.lpush("movie" + str(data["player"]), json.dumps(msg)) return Response(resp, mimetype='application/json')
def get(self, id): """ Get a session by id from redis, populate data members or return None if not found """ sessionSettings = redis.hgetall(f"session::{id}") if len(sessionSettings) == 0: return None tmp = {"id": id} for k, v in sessionSettings.items(): tmp[k.decode('utf-8')] = v.decode('utf-8') emailList = redis.smembers(f"session::{id}::emails") tmp['emails'] = {email.decode('utf-8') for email in emailList} ipAddressList = redis.smembers(f"session::{id}::ip_addresses") tmp['ip_addresses'] = {ip.decode('utf-8') for ip in ipAddressList} postList = redis.lrange(f"session::{id}::posts", 0, -1) tmp['posts'] = [post.decode('utf-8') for post in postList] self.fromDict(tmp) return self
def bi(): app = create_app('test') app_context = app.app_context() app_context.push() target_date = '20190626' name = os.path.join(os.getcwd(), current_app.config['STATISTICS_TAG'], 'bi', target_date, 'report_d.json') if os.path.exists(name): print('read_report_d file exist') else: print('read_report_d file not exist') return True # if BiReport.query.filter_by(record_time=target_date).first() is not None: # print('read_report_d already exist') # return True dbreports = BiReportProtocol.query.filter_by(status=1).all() for re in dbreports: cache.set(str(re.we_id), '1', timeout=60 * 10) redis.lpush("BiReportProtocol", str(re.we_id)) bi_default = BiReport() with open(os.path.join(os.getcwd(), current_app.config['STATISTICS_TAG'], 'bi', target_date, 'report_d.json'), 'r') as file: data = file.readlines() for report in data: try: report = json.loads(report) if report['we_id'] != 'default': print(report['we_id']) data = cache.get(report['we_id']) if data: bi_report = BiReport() bi_report.record_time = report['record_time'] bi_report.we_id = report['we_id'] bi_report.rank_index = report['rank_index'] bi_report.latent_consumer_index = report['latent_consumer_index'] bi_report.activite_consumer_index = report['activite_consumer_index'] bi_report.extend_work_heat = report['extend_work_heat'] bi_report.sale_work_heat = report['sale_work_heat'] bi_report.income_index = report['income_index'] bi_report.pay_index = report['pay_index'] bi_report.v_webusiness_index = report['v_webusiness_index'] # db.session.add(bi_report) print('add bi_report') cache.set(str(report['we_id']), '2', timeout=60 * 10) else: bi_default.record_time = report['record_time'] bi_default.we_id = report['we_id'] bi_default.rank_index = report['rank_index'] bi_default.latent_consumer_index = report['latent_consumer_index'] bi_default.activite_consumer_index = report['activite_consumer_index'] bi_default.extend_work_heat = report['extend_work_heat'] bi_default.sale_work_heat = report['sale_work_heat'] bi_default.income_index = report['income_index'] bi_default.pay_index = report['pay_index'] bi_default.v_webusiness_index = report['v_webusiness_index'] except Exception as e: print('read_report_d: ', e) db.session.commit() all_data = redis.lrange('BiReportProtocol', 0, -1) for key in all_data: print(key) data = cache.get(key.decode()) print(data) if data == '1': print('add') bi_report = BiReport() bi_report.record_time = bi_default.record_time bi_report.we_id = key.decode() bi_report.rank_index = bi_default.rank_index bi_report.latent_consumer_index = bi_default.latent_consumer_index bi_report.activite_consumer_index = bi_default.activite_consumer_index bi_report.extend_work_heat = bi_default.extend_work_heat bi_report.sale_work_heat = bi_default.sale_work_heat bi_report.income_index = bi_default.income_index bi_report.pay_index = bi_default.pay_index bi_report.v_webusiness_index = bi_default.v_webusiness_index db.session.add(bi_report) db.session.commit() redis.delete('BiReportProtocol') print('read_report_d finish')
def index(): posts = [] for post_id in redis.lrange('post:post-list', 0, -1): post = redis.hgetall('post:%s' % post_id) posts.append(post) return render_template('blog/index.html', posts=posts)