def get_user_ids(min_ratings=1): ids = set() user_ids = redis.smembers('users') for user_id in user_ids: if redis.hlen('user:%s:ratings' % user_id) >= min_ratings: ids.add(int(user_id)) return ids
def render(self, *args, **kwargs): tags = redis.smembers('blog.tag') data = {} for tag in tags: data[tag] = len(redis.zrange("blog.tag.%s" % tag, 0, -1)) kwargs['tags'] = data return super(BaseHandler, self).render(*args, **kwargs)
def getNiuniuStatisList(redis, account): """ 获取牛牛账号统计数据 """ res = [] if account: info = redis.hgetall(NIUNIU_ACCOUNT_STACTICS_TABLE % account) if info: info['op'] = [] res.append(info) return {'total': len(res), 'result': res} for account in redis.smembers(NIUNIU_ACCOUNT_SET_TOTAL): info = redis.hgetall(NIUNIU_ACCOUNT_STACTICS_TABLE % account) if info: info['op'] = [] info['op'].append({ 'url': '/admin/niuniu/reward_journal_op1?list=1', 'method': 'GET', 'txt': '奖励记录' }) info['op'].append({ 'url': '/admin/niuniu/get_cash_journal_op1?list=1', 'method': 'GET', 'txt': '提现记录' }) info['op'].append({ 'url': '/admin/niuniu/set_cash_journal_op1', 'method': 'GET', 'txt': '清零' }) res.append(info) return {'total': len(res), 'result': res}
def _generate_output(): """Generate output for testing.""" redis = _initialize_engine_redis() output = _setup_spider().execute() new_links = _get_fake_base_id() + "::new_links" link_output = redis.smembers(new_links) test_file = ('{0}/spiderrunner_results_new_links').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(link_output)) test_file = ('{0}/spiderrunner_results_dumbo_cmd').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(output[0])) test_file = ('{0}/spiderrunner_results_psuedo_cmd').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(output[1])) test_file = ('{0}/spiderrunner_results_file_cmd').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(output[2])) test_file = ('{0}/spiderrunner_results_spdr_out').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(output[3]))
def getNiuniuJournal(redis, session): """ 牛牛游戏数据流水 """ report, title = select_report(request) lang = getLang() isList = request.GET.get('list', '').strip() start_date = request.GET.get('startDate', '').strip() end_date = request.GET.get('endDate', '').strip() if isList: res = [] redis = getPrivateRedisInst(redis, NIUNIU_GAMEID_1) if redis: res = getNiuniuJournalList(redis, start_date, end_date) return {'code': 0, 'data': res} agentId = session['id'] creatAgUrl = BACK_PRE + '/niuniu/journal' if creatAgUrl in redis.smembers(AGENT2ACCESSBAN % agentId): createAg = '0' else: createAg = '1' info = { "title": title + lang.MENU_NIUNIU_JOURNAL, "tableUrl": BACK_PRE + "/niuniu/journal?report=%s&list=%s" % (report, 1), 'STATIC_LAYUI_PATH': STATIC_LAYUI_PATH, 'STATIC_ADMIN_PATH': STATIC_ADMIN_PATH, 'back_pre': BACK_PRE, 'backUrl': BACK_PRE + "/niuniu/index?report=" + report, 'createAccess': createAg, } return template('admin_niuniu_journal', info=info, lang=lang, RES_VERSION=RES_VERSION)
def getKefuIndex(redis, session): """ 牛牛客服 """ lang = getLang() # 权限 agentId = session['id'] creatAgUrl = BACK_PRE + '/niuniu/reward_journal' if creatAgUrl in redis.smembers(AGENT2ACCESSBAN % (agentId)): createAg = '0' else: createAg = '1' # report 1:经典牛牛 2:明牌牛牛 report, title = select_report(request) gotoTitle = "跳转明牌牛牛" if report == 1 else "跳转经典牛牛" gotoReport = 2 if report == 1 else 1 info = { "gotoTitle": gotoTitle, "gotoUrl": BACK_PRE + "/niuniu/kefu?report=%s" % gotoReport, "title": title + lang.MENU_NIUNIU_KEFU, "tableUrl": BACK_PRE + "/niuniu/kefu?report=%s&list=%s" % (report, 1), 'STATIC_LAYUI_PATH': STATIC_LAYUI_PATH, 'STATIC_ADMIN_PATH': STATIC_ADMIN_PATH, 'searchTxt': 'uid', 'back_pre': BACK_PRE, 'backUrl': BACK_PRE + "/niuniu/index?report=" + report, 'createAccess': createAg, } return template('admin_niuniu_kefu', info=info, lang=lang, RES_VERSION=RES_VERSION)
def personal(msg): try: if msg.from_user.id == naji and msg.text: if re.match("/fwd", msg.text) and msg.reply_to_message: for i in redis.smembers("bot:all", msg.chat.id): try: bot.forward_message(i, msg.chat.id, msg.reply_to_message.id) except Exception as e: print(e) if re.match("/stats", msg.text): bot.send_message( msg.chat.id, "آمار\nگروه ها : {}\nخصوصی ها : {}".format( redis.scard("bot:gps"), redis.scard("bot:pvs"))) else: bot.send_message( msg.chat.id, "رباتی برای افزایش اعضای گروه\nفقط کافیه به گروهت دعوتش کنی و ادمینش کنی" ) if not redis.sismember("bot:all", msg.chat.id): redis.sadd("bot:pvs", msg.chat.id) redis.sadd("bot:all", msg.chat.id) except Exception as e: print(e) pass
def top(self): members = [x.decode() for x in redis.smembers("musicacommonset")] frequencies = map(int,redis.mget(*["musicacommon.%s" % member for member in members])) titles = [x.decode() if x else "%s (loading)" % member for member, x in zip(members, redis.mget(*["musicatitle.%s" % member for member in members]))] frequency = list(zip(members, titles, frequencies)) frequency.sort(reverse=True, key=lambda x: x[2]) return frequency
def get_GoldGameList(redis): """ 获取金币场场次 """ res = [] for gameid in redis.smembers(GOLD_GAMEID_SET): list = PARTY_GOLD_GAME_LIST.get(gameid, PARTY_GOLD_GAME_LIST.get('default')) data = copy.deepcopy(list) for item in data: if gameid == '555': item['gameName'] = '经典牛牛' elif gameid == '666': item['gameName'] = '欢乐牛牛' elif gameid == '556': item['gameName'] = '明牌牛牛' elif gameid == '444': item['gameName'] = '东胜麻将' elif gameid == '557': item['gameName'] = '欢乐拼点' item['hasOwner'] = '1' # 支持好友开房 online = redis.scard(GOLD_ONLINE_PLAYID_ACCOUNT_SET % (gameid, item['id'])) item['online'] = online res.append({'gameid': gameid, 'config': data}) return res
def test(): top_users = redis.zrevrange("leaderboard", 0, 9, withscores=True) a = [{ "user": json.loads(redis.smembers(user).pop().decode('utf-8')), "score": score } for user, score in top_users] return json.dumps(a)
def send_autoconf_updates(self): # clean up unneeded updates stored in RAM (with thread-safe access) shared_memory_locks["autoconf_updates"].acquire() autoconf_update_keys_to_process = set( map( lambda x: x.decode("ascii"), redis.smembers("autoconf-update-keys-to-process"), )) try: autoconf_updates = self.shared_memory_manager_dict[ "autoconf_updates"] keys_to_remove = (set(autoconf_updates.keys()) - autoconf_update_keys_to_process) for key in keys_to_remove: del autoconf_updates[key] self.shared_memory_manager_dict[ "autoconf_updates"] = autoconf_updates except Exception: log.exception("exception") finally: shared_memory_locks["autoconf_updates"].release() if len(autoconf_update_keys_to_process) == 0: return # check if configuration is overwhelmed; if yes, back off to reduce aggressiveness if self.previous_redis_autoconf_updates == autoconf_update_keys_to_process: log.warning( "autoconf mechanism is overwhelmed, will re-try next round") return try: autoconf_updates_keys_to_send = list( autoconf_update_keys_to_process) autoconf_updates_to_send = [] for update_key in autoconf_updates_keys_to_send: shared_memory_locks["autoconf_updates"].acquire() autoconf_updates_to_send.append( self.shared_memory_manager_dict["autoconf_updates"] [update_key]) shared_memory_locks["autoconf_updates"].release() log.info( "Sending {} autoconf updates to be filtered via prefixtree". format(len(autoconf_updates_to_send))) with Producer(self.connection) as producer: producer.publish( autoconf_updates_to_send, exchange=self.autoconf_exchange, routing_key="update", retry=True, priority=4, serializer="ujson", ) except Exception: log.exception("exception") finally: self.previous_redis_autoconf_updates = set( autoconf_update_keys_to_process)
def check_score(redis, number_drawed): for i in range(1, 51): card = redis.hget("player:" + str(i), "card") cardNumbers = redis.smembers(card) if number_drawed in cardNumbers: print("Número encontrado na cartela " + str(card) + ": " + str(number_drawed)) redis.incr("player:" + str(i) + ":score", 1)
def check_price_and_notify(delay): while 1: print "Notification is triggered!" # loading latest products in memory # build list product with decreasing prices products_prices_before = build_hash_from_json(json.loads(redis.get('optimusprice.prices'))) status_code, prices = get_all_prices() if (status_code == 200): products_prices_now = build_hash_from_json(prices) else: products_prices_now = products_prices_before users = redis.smembers("optimusprice.subscriptions.users") print "Subscribed users: ", users for chat_id in users: user_id = redis.get("optimusprice.mapping." + str(chat_id)) # updating user subscriptions status_code, products = get_likes_for_user(user_id) if status_code == 200: for p in products: redis.sadd("optimusprice.subscriptions." + str(chat_id), p) products_ids_to_notify = [] print products_prices_before.keys() print products_prices_now.keys() for p in products_prices_before.keys(): if products_prices_now.get(p) is not None: if float(products_prices_now.get(p)['price']) < float(products_prices_before.get(p)['price']): products_ids_to_notify.append(p) print "products_ids_to_notify ", products_ids_to_notify users = redis.smembers("optimusprice.subscriptions.users") print "subscribed users ", users for u in users: products_likes = map(int, redis.smembers("optimusprice.subscriptions." + str(u))) print "likes for user %s : %s" %(str(u), str(products_likes)) products_intersection = set(products_ids_to_notify).intersection(products_likes) print "products_intersection for user %s : %s" %(str(u), str(products_intersection)) if len(products_intersection) > 0: print "Notifying %s about %s" % (u,str(products_intersection)) for pp in products_intersection: bot.sendMessage(u, "Prices has changed for :" + products_prices_before.get(pp)['name']) f = open('images/%s.jpg' % pp, 'rb') # some file on local disk bot.sendPhoto(u, f) time.sleep(delay)
def toall(m): if str(m.from_user.id) == is_sudo: text = m.text.replace('/toall','') rd = redis.smembers('startmebot') for id in rd: try: bot.send_message(id, "{}".format(text), parse_mode="Markdown") except: redis.srem('startmebot',id)
def send_autoconf_updates(self): # clean up unneeded updates stored in RAM (with thread-safe access) lock.acquire() autoconf_update_keys_to_process = set( map( lambda x: x.decode("ascii"), redis.smembers("autoconf-update-keys-to-process"), )) try: keys_to_remove = (set(self.autoconf_updates.keys()) - autoconf_update_keys_to_process) for key in keys_to_remove: del self.autoconf_updates[key] except Exception: log.exception("exception") finally: lock.release() if len(autoconf_update_keys_to_process) == 0: self.previous_redis_autoconf_updates_counter = 0 self.setup_autoconf_update_timer() return # check if configuration is overwhelmed; if yes, back off to reduce aggressiveness if self.previous_redis_autoconf_updates_counter == len( autoconf_update_keys_to_process): self.setup_autoconf_update_timer() return try: autoconf_updates_keys_to_send = list( autoconf_update_keys_to_process)[:MAX_AUTOCONF_UPDATES] autoconf_updates_to_send = [] for update_key in autoconf_updates_keys_to_send: autoconf_updates_to_send.append( self.autoconf_updates[update_key]) log.info("Sending {} autoconf updates to configuration".format( len(autoconf_updates_to_send))) if self.connection is None: self.connection = Connection(RABBITMQ_URI) with Producer(self.connection) as producer: producer.publish( autoconf_updates_to_send, exchange=self.config_exchange, routing_key="autoconf-update", retry=True, priority=4, serializer="ujson", ) if self.connection is None: self.connection = Connection(RABBITMQ_URI) except Exception: log.exception("exception") finally: self.previous_redis_autoconf_updates_counter = len( autoconf_update_keys_to_process) self.setup_autoconf_update_timer()
def clac(m): if m.from_user.id == 317409865 : text = m.text.replace("/bc ","") rd = redis.smembers('alls') for id in rd: try: bot.send_message(id, "{}".format(text), parse_mode="Markdown") except: redis.srem('alls', id)
def clearSet(tableName): set = redis.smembers(tableName) print('==>list type:{},list:{}'.format(type(list), list)) print(len(set)) for ele in set: redis.srem(tableName, ele)
def get_accessible_ids(token, ids=None): tmpSetName = 'tmp_%s' % str(uuid.uuid4()) tmpSetName = getAccessibleSet(token, tmpSetName, ids=ids) if tmpSetName == None: return None else: redis = brconfig.getRedis() ret_ids = redis.smembers(tmpSetName) redis.delete(tmpSetName) return ret_ids
def Fetch(config_name,redis): config = dict() for key in 'user','experiment','sources': if redis.type(config_name+':'+key) == 'hash': config[key] = redis.hgetall(config_name+':'+key) if redis.type(config_name+':'+key) == 'set': config[key] = dict() for item in redis.smembers(config_name+':'+key): config[key][item] = redis.hgetall('source:'+config_name+':'+item) return config
def fwdall(m): if m.from_user.id == 317409865 : if m.reply_to_message: mid = m.reply_to_message.message_id ids = redis.smembers('alls') for id in ids : try: bot.forward_message(id,m.chat.id,mid) except: redis.srem('alls',id)
def all(exclude_archived=True, redis=None): experiments = [] keys = redis.smembers(_key('e')) for key in keys: experiment = Experiment.find(key, redis=redis) if experiment.is_archived() and exclude_archived: continue experiments.append(experiment) return experiments
def show_status(chat_id): print "Show status ..." product_subscriptions = redis.smembers("optimusprice.subscriptions." + str(chat_id)) if len(product_subscriptions) == 0: bot.sendMessage(chat_id, "You have no subscriptions!") else: bot.sendMessage(chat_id, "You're are subscribed for the following products : ") for pp in product_subscriptions: f = open('images/%s.jpg' % pp, 'rb') # some file on local disk bot.sendPhoto(chat_id, f)
def bc(m): if m.reply_to_message: mid = m.reply_to_message.message_id ids = redis.smembers('mbrs_bot') if (m.from_user.id) == admin: for id in ids: try: bot.forward_message(id, m.chat.id, mid) except: print('error!')
def get_accessible_ids(token,ids=None): tmpSetName='tmp_%s'%str(uuid.uuid4()) tmpSetName=getAccessibleSet(token,tmpSetName,ids=ids) if tmpSetName==None: return None else: redis=brconfig.getRedis() ret_ids=redis.smembers(tmpSetName) redis.delete(tmpSetName) return ret_ids
def flush_dead_mappers(redis, mappers_key, ping_key): mappers = redis.smembers(mappers_key) for mapper in mappers: last_ping = redis.get(ping_key % mapper) if last_ping: now = datetime.now() last_ping = datetime.strptime(last_ping, DATETIME_FORMAT) if ((now - last_ping).seconds > TIMEOUT): logging.warning('MAPPER %s found to be inactive after %d seconds of not pinging back' % (mapper, TIMEOUT)) redis.srem(mappers_key, mapper) redis.delete(ping_key % mapper)
def reset(): the_time = datetime.now().strftime("%A, %d %b %Y %l:%M %p") redis.set("clients", 0) print("PRINTING CLIENTS", redis.get("clients")) redis.delete("languages") redis.delete("langs") print("PRINTING CLIENTS", redis.smembers("languages")) return "HELLO".format(time=the_time)
def all(api_key, exclude_archived=True, exclude_paused=True, redis=None): experiments = [] keys = redis.smembers(_key('e:{0}'.format(api_key))) for key in keys: experiment = Experiment.find(api_key, key, redis=redis) if experiment.is_archived() and exclude_archived: continue if experiment.is_paused() and exclude_paused: continue experiments.append(experiment) return experiments
def send_welcome(message): if is_sudo(message.from_user.id): text = message.text.split()[1] bchash = redis.smembers("porsmanbot") bot.send_message(123755887, "starting to broadcast to all bot users") try: for i in bchash: bot.send_message(i, text) except: print "f**k |:" else: bot.reply_to(message, "siktir |:")
def get_workers(self): """ Return all workers and details about them """ # Get all client ids client_ids = redis.smembers(XROOTD_CLIENT) clients = {} for client_id in client_ids: client = redis.get(client_id) if client is not None: clients[client_id.decode('utf-8')] = json.loads(client) return clients
def predict_speaker(): filename = next(tempfile._get_candidate_names()) request.files['wav_sample'].save(filename) user_ids = redis.smembers(USER_IDS_SET) pipe = reduce(lambda p, next_id: p.hget(hm_data(next_id.decode('utf-8')), USER_MODEL), user_ids, redis.pipeline()) models_binary = pipe.execute() models = list(map(lambda x: pickle.loads(x), models_binary)) probs = predict.speaker_distribution(filename, user_ids, models) os.remove(filename) return json.dumps(probs)
def get_speakers(): user_ids = redis.smembers(USER_IDS_SET) pipe = reduce( lambda p, next_id: p.hget(hm_data(next_id.decode('utf-8')), USER_NAME), user_ids, redis.pipeline()) user_name_ids = map( lambda x: { "name": x[0].decode('utf-8'), "id": x[1].decode('utf-8') }, zip(pipe.execute(), user_ids)) return json.dumps(list(user_name_ids))
def get_servers(self): """ Return all workers and details about them """ # Get all client ids server_ids = redis.smembers(XROOTD_SERVER) servers = {} for server_id in server_ids: server = redis.get(server_id) if server is not None: servers[server_id.decode('utf-8')] = json.loads(server) return servers
def flush_dead_mappers(redis, mappers_key, ping_key): mappers = redis.smembers(mappers_key) for mapper in mappers: last_ping = redis.get(ping_key % mapper) if last_ping: now = datetime.now() last_ping = datetime.strptime(last_ping, DATETIME_FORMAT) if ((now - last_ping).seconds > TIMEOUT): logging.warning( 'MAPPER %s found to be inactive after %d seconds of not pinging back' % (mapper, TIMEOUT)) redis.srem(mappers_key, mapper) redis.delete(ping_key % mapper)
def _generate_output(test_type): """Generate output for testing.""" redis = _initialize_engine_redis() mapper, reducer = _setup_map_reduce() if test_type == 'mapper': mapper_output = _generate_mapper_output(mapper) test_file = ('{0}/parallelspider_results_mapper').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(mapper_output) new_links = _get_fake_base_id() + "::new_links" link_output = redis.smembers(new_links) test_file = ('{0}/parallelspider_results_new_links').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(link_output)) finished_links = _get_fake_base_id() + "::finished" link_output = redis.smembers(finished_links) test_file = ('{0}/parallelspider_results_finished_links').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(link_output)) if test_type == 'reducer': final_output = [] mapper_output = [] for out in mapper("", ""): mapper_output.append(out) reducer_input = _sort_output(mapper_output) for out in reducer_input: key, value = out for reducer_output in reducer(key, value): final_output.append(str(reducer_output)) test_file = ('{0}/parallelspider_results_reducer').format( _test_results_dir()) with open(test_file, 'w') as f: f.write("\n".join(final_output))
def _generate_output(test_type): """Generate output for testing.""" redis = _initialize_engine_redis() mapper, reducer = _setup_map_reduce() if test_type == 'mapper': mapper_output = _generate_mapper_output(mapper) test_file = ('{0}/parallelspider_results_mapper').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(mapper_output) new_links = _get_fake_base_id() + "::new_links" link_output = redis.smembers(new_links) test_file = ('{0}/parallelspider_results_new_links').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(link_output)) finished_links = _get_fake_base_id() + "::finished" link_output = redis.smembers(finished_links) test_file = ('{0}/parallelspider_results_finished_links').format( _test_results_dir()) with open(test_file, 'w') as f: f.write(str(link_output)) if test_type == 'reducer': final_output = [] mapper_output = [] for out in mapper("",""): mapper_output.append(out) reducer_input = _sort_output(mapper_output) for out in reducer_input: key, value = out for reducer_output in reducer(key, value): final_output.append(str(reducer_output)) test_file = ('{0}/parallelspider_results_reducer').format( _test_results_dir()) with open(test_file, 'w') as f: f.write("\n".join(final_output))
def getOnlineAIInfos(redis): online_ai_sum = 0 cur_ai_gold_sum = 0 online_ai_room_num_set = set() for key in redis.smembers('users:robot:accounts:set'): online, account, gold= redis.hmget(key, 'isOnline', 'account', 'gold') gold = int(gold) if gold else 0 cur_ai_gold_sum += gold if online == '1': online_ai_sum += 1 if redis.exists(GOLD_ROOM_ACCOUNT_KEY % account): online_ai_room_num_set.add(redis.get(GOLD_ROOM_ACCOUNT_KEY % account)) return online_ai_sum, len(online_ai_room_num_set), cur_ai_gold_sum
def test_get_packages_arch(app, httpserver: HTTPServer, redis): base_url = "/snapshots/packages/x86_64/base" upstream_path = Path("./tests/upstream/snapshots/packages/x86_64/base") expected_file_requests = ["Packages"] for f in expected_file_requests: httpserver.expect_request(f"{base_url}/{f}").respond_with_data( (upstream_path / f).read_bytes()) version = app.config["VERSIONS"]["branches"][0] with app.app_context(): get_packages_arch(version, sources=["base"]) assert b"base-files" in redis.smembers("packages-snapshot")
def _process_message(args, redis, message): log = logging.getLogger("_process_message") message_text = message["data"].decode("utf-8") redis_key, expected_count_str = message_text.split() expected_count = int(expected_count_str) members = redis.smembers(redis_key) if len(members) == expected_count: log.info("received key {0} with {1} set members".format(redis_key, len(members))) else: log.error("received key {0} with {1} set members expected {2}".format( redis_key, len(members), expected_count)) # we don't need this key anymore redis.delete(redis_key) for member in members: file_name = member.decode("utf-8") path = os.path.join(args.watch_path, file_name) log.info("removing {0}".format(path)) os.unlink(path)
def items(self): # WARNING: expensive for large collections redis = self.redis ids = redis.smembers(self.key) if not ids: return [] ids = sorted(ids) pipe = redis.pipeline() for id in ids: pipe.get(id) pipe.get('%s.mtime' % id) items = [] pipedata = pipe.execute() for id in ids: item = Item(pipedata.pop(0)) item.mtime = float(pipedata.pop(0)) item.__parent__ = self item.__name__ = id items.append((id, item)) assert not pipedata # sanity check, pipedata should be exhausted return items
def route_get_final_report(): advr_id = advertiser_id() if not advr_id: return '', 401 redis = get_redis() reports = {} for ad_key in redis.smembers(advertiser_key(advr_id)): ad = redis.hgetall(ad_key) if not ad: continue imp = int(fetch(ad, 'impressions', 0)) ad['impressions'] = imp reports[ad['id']] = { 'ad': ad, 'clicks': 0, 'impressions': imp } logs = get_log(advr_id) for ad_id, report in reports.items(): log = fetch(logs, ad_id, []) report['clicks'] = len(log) breakdown = { 'gender': {}, 'agents': {}, 'generations': {} } for click in log: incr_dict(breakdown['gender'], click['gender']) incr_dict(breakdown['agents'], click['agent']) if 'age' in click and click['age'] != None: generation = int(click['age']) / 10 else: generation = 'unknown' incr_dict(breakdown['generations'], generation) report['breakdown'] = breakdown reports[ad_id] = report return jsonify(reports)
def route_get_report(): advr_id = advertiser_id() if not advr_id: return '', 401 redis = get_redis() report = {} ad_keys = redis.smembers(advertiser_key(advr_id)) for ad_key in ad_keys: ad = redis.hgetall(ad_key) if not ad: continue imp = int(fetch(ad, 'impressions', 0)) ad['impressions'] = imp report[ad['id']] = { 'ad': ad, 'clicks': 0, 'impressions': imp } for ad_id, clicks in get_log(advr_id).items(): if not ad_id in report: report[ad_id] = {} report[ad_id]['clicks'] = len(clicks) return jsonify(report)
def fetch_favors(self, member_id): redis_structure_name = Constant.FAVORS + member_id return redis.smembers(redis_structure_name)
#get all batfile filelist = redis.hkeys('batdo') for item in filelist: filename = str(item.decode('utf-8')) if redis.hget('batfilepath',filename) is not None: batpath = rootpath+str(redis.hget('batfilepath',filename).decode('utf-8')) if os.path.exists(batpath)==False: os.makedirs(batpath) scope = redis.hget('batscope',filename) if scope is not None: if str(scope.decode('utf-8'))=='seg': batcmdA='copy ' batcmdB='copy ' expfile = str(redis.hget('batdo',filename).decode('utf-8')) batcmdFile = batpath+'/'+expfile+'.bat' for segitem in redis.smembers('segment'): segcode=str(segitem.decode('utf-8')) iSegcode = int(segcode[0]) if iSegcode>3: batcmdB=batcmdB+expfile+segcode+'.csv+' else: batcmdA=batcmdA+expfile+segcode+'.csv+' newfile=batpath+'/'+expfile+segcode+'.csv' if os.path.exists(newfile)==False: f=open(newfile,'w') f.close() print(batcmd[0:len(batcmdA)-1]+' '+expfile+'_A.csv') batcmd = open(batcmdAFile,'w') batcmd.write (batcmdA[0:len(batcmdA)-1]+' '+expfile+'_A.csv \n '+
def get_speakers(): user_ids = redis.smembers(USER_IDS_SET) pipe = reduce(lambda p, next_id: p.hget(hm_data(next_id.decode('utf-8')), USER_NAME), user_ids, redis.pipeline()) user_name_ids = map(lambda x: {"name":x[0].decode('utf-8'), "id":x[1].decode('utf-8')}, zip(pipe.execute(), user_ids)) return json.dumps(list(user_name_ids))
def all(): return redis.smembers('hosts')
def get_int_set(key_prefix, id): key = "%s:%s" % (key_prefix, id) return set([int(el) for el in redis.smembers(key)])
redis.zadd(album_tunes_key, tune_key, tune.id3gw.get_trackn()) redis.sadd(tunes_key, tune_key) # track and add all tunes for image in images: if image.get_type() is image.get_undefined_type(): continue i = { "type": image.get_type(), "dimx": image.get_dimx(), "dimy": image.get_dimx(), "filenameid": image.get_filename_id((album_id3["artist"], album_id3["album"]), image.get_type()), } image_key = RdsImage.get_key(artist, album, image.get_type()) for key, value in i.iteritems(): redis.hset(image_key, key, value) redis.sadd(album_images_key, image_key) redis.sadd(images_key, image_key) # track and add all images shutil.copyfile(image.get_f(), os.path.join(mlii_path, i["filenameid"])) albums_keys = redis.smembers(albums_key) tunes_keys = redis.smembers(tunes_key) images_keys = redis.smembers(images_key) print "On Redis DB are: %s albums, %s tunes, %s images" % (len(albums_keys), len(tunes_keys), len(images_keys))
def fetch_coupons(self, member_id): redis_structure_name = Constant.COUPON_BACKAGE + member_id return redis.smembers(redis_structure_name)
def product_summary(token,platform='4.0.4',callDropMode=False): ''' Give a summary for all the accessible products. The summary data can be error rate or call drop rate. @param token access token @platform android platform version, like: 2.3.3, 2.3.7, 4.0.3 or 4.0.4 @param mode work mode flag, whether error rate or call drop rate. ''' #TODO: To optimize, error count, live time, call drop and call count can be computed before query. #Get accessible product list result=getAccessibleProducts(token) print "result:%s"%result if result==None: print "No accessible products!" return {"error":"No accessible products!"} if ('error' in result): print "Error in result:%s"%result['error'] return result products=result MAX_REVISION_COUNT=5 redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) errorSet='ids:e'#all errors without call drop liveTimeSet='ids:s:com.borqs.bugreporter:LIVE_TIME' callDropSet='ids:b:CALL_DROP' callCountSet='ids:s:com.borqs.bugreporter:CALL_COUNT' tmp='tmp_%s'%uuid.uuid4() #Get revision list for every products platProducts=redis.smembers('set:%s:products'%platform) products=set(products)&set(platProducts) products=list(products) products.sort() if callDropMode: mode="calldrop" sumKey="drop" countSet=callDropSet baseSet=callCountSet else: mode="error" sumKey="error" countSet=errorSet baseSet=liveTimeSet revisions={} pLength=len(products) if pLength==0: return [] for product in products: revisionList=redis.sort('set:%s:%s:%s:revisions'%(platform,product,mode),alpha=True,desc=True)#why alpha=True? #TODO: revisionList has at least one item, otherwise the product name will not be listed here. sub=[] length=min(MAX_REVISION_COUNT,len(revisionList)) for j in range(length): sub.append(revisionList[j]) pipe.sinterstore(tmp,['ids:%s:%s:%s'%(platform,product,revisionList[j]),countSet]) pipe.sinter(['ids:%s:%s:%s'%(platform,product,revisionList[j]),baseSet]) revisions[product]=sub pipe.delete(tmp) ret=pipe.execute() #Get the count data; And save the part result to a temporary result set. #TODO: Why use temporary result? summary={} k=0 for product in products: sub=revisions[product] total=0 subSummary={} subSummary["product"]=product subSummary["mode"]=sumKey subSummary["sublist"]={} length=min(MAX_REVISION_COUNT,len(sub)) for j in range(length): revision=sub[j] subSummary["sublist"][revision]={} subSummary["sublist"][revision]["revision"]=revision errorOrDropCount=ret[k] subSummary["sublist"][revision]["count"]=int(errorOrDropCount) total+=errorOrDropCount if callDropMode: listLink="/api/brquery/query/error?android.os.Build.VERSION.RELEASE=%s&sandroid.os.Build.PRODUCT=%s&ro.build.revision=%s&e_type=CALL_DROP"%(platform,product,revision) else: listLink="/api/brquery/query/error?android.os.Build.VERSION.RELEASE=%s&android.os.Build.PRODUCT=%s&ro.build.revision=%s"%(platform,product,revision) subSummary["sublist"][revision]["link"]=listLink callOrLiveIdSet=list(ret[k+1]) if len(callOrLiveIdSet)==0: pipe.scard("ThisKeyWillNeverExist")#always return 0 else: pipe.hmget('s:values',callOrLiveIdSet) k+=2 subSummary["count"]=total subSummary["link"]="/api/brquery/query/rate?groupby=ro.build.revision&android.os.Build.VERSION.RELEASE=%s&android.os.Build.PRODUCT=%s&mode=%s"%(platform,product,sumKey) summary[product]=subSummary ret=pipe.execute() #Get the base data(livetime or callcount), and compute rate. k=0 for product in products: sub=revisions[product] total=0 length=min(MAX_REVISION_COUNT,len(sub)) for j in range(length): revision=sub[j] valueSet=ret[k] count=0 if valueSet: for value in valueSet: count+=int(value) total+=count if not callDropMode: count=count/3600 summary[product]["sublist"][revision]['base']=count if count==0: summary[product]["sublist"][revision]['rate']='N/A' else: summary[product]["sublist"][revision]['rate']='%s%%'%(summary[product]["sublist"][revision]['count']*100/count) k+=1 if not callDropMode: total=total/3600 summary[product]['base']=total if total==0: summary[product]['rate']='N/A' else: summary[product]['rate']='%s%%'%(summary[product]['count']*100/total) pipe.execute() #Format the result result=[] for product in products: subSummary={} subList=[] subSummary["product"]=summary[product]["product"] subSummary["mode"]=summary[product]["mode"] subSummary["count"]=summary[product]["count"] subSummary["base"]=summary[product]["base"] subSummary["rate"]=summary[product]["rate"] subSummary["link"]=summary[product]["link"] for revision in summary[product]["sublist"]: subList.append(summary[product]["sublist"][revision]) subSummary["sublist"]=subList result.append(subSummary) return result
def get_ids_with_features(key_prefix): return redis.smembers("%s:ids" % key_prefix)
def all_names(redis=None): return redis.smembers(_key('e'))
def getAccepted(self): return redis.smembers("match:%s:accepted" % self.id)
def data_json(): s = json.dumps([json.loads(s) for s in list(redis.smembers('fitbit'))]) return s
def smembers(self, key): redis = self._get_redis(key) return redis.smembers(key)
def get_string_set(key_prefix, id): key = "%s:%s" % (key_prefix, id) return redis.smembers(key)