def getNiuniuStatisList(redis, account): """ 获取牛牛账号统计数据 """ res = [] if account: info = redis.hgetall(NIUNIU_ACCOUNT_STACTICS_TABLE % account) if info: info['op'] = [] res.append(info) return {'total': len(res), 'result': res} for account in redis.smembers(NIUNIU_ACCOUNT_SET_TOTAL): info = redis.hgetall(NIUNIU_ACCOUNT_STACTICS_TABLE % account) if info: info['op'] = [] info['op'].append({ 'url': '/admin/niuniu/reward_journal_op1?list=1', 'method': 'GET', 'txt': '奖励记录' }) info['op'].append({ 'url': '/admin/niuniu/get_cash_journal_op1?list=1', 'method': 'GET', 'txt': '提现记录' }) info['op'].append({ 'url': '/admin/niuniu/set_cash_journal_op1', 'method': 'GET', 'txt': '清零' }) res.append(info) return {'total': len(res), 'result': res}
def hashes_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("HASH") print("-------------") #hmset(name, mapping) hget(name, key) hgetall(name) redis.hmset("user.1", {"name": "peter", "email": "*****@*****.**"}) print("map.1: ", redis.hgetall("user.1")) print("name.1:", redis.hget("user.1", "name")) print("email.1:", redis.hget("user.1", "email")) print("-------------") #hset(key, field, value) hget() redis.hset("user.2", "name1", "peter") print("map.2: ", redis.hgetall("user.2")) print("type map.2: ", redis.type("user.2")) print("name.2:", redis.hget("user.2", "name1")) print("-------------") #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def _create_channel(self, connection, name): db = redis.Redis[self._db] try: collection = redis.sort(redis.hgetall(TO_FIELD: self.get_id()),("_id", ASC)) # TODO: improve this catch. It should be more specific, but pymongo # behavior doesn't match its documentation, so we are being dirty. except: pass
def Fetch(config_name,redis): config = dict() for key in 'user','experiment','sources': if redis.type(config_name+':'+key) == 'hash': config[key] = redis.hgetall(config_name+':'+key) if redis.type(config_name+':'+key) == 'set': config[key] = dict() for item in redis.smembers(config_name+':'+key): config[key][item] = redis.hgetall('source:'+config_name+':'+item) return config
def merge(): topic_count = defaultdict(int) f = "word2count.txt" keys = redis.keys("*") for pos, key in enumerate(keys): l = redis.hgetall(key) print "1",pos, key for k,v in l.iteritems(): topic_count[int(k)]+=int(v) #word_topic_freq = defaultdict(list) with open("word_tf.txt", "w") as word_freq: for pos, word in enumerate(keys): tf = [] l = redis.hgetall(word) for topic, freq in l.iteritems(): topic = int(topic) count = topic_count[topic] if count < 10000: continue freq = int(freq)*500000/count if freq > 0: tf.append((topic, freq)) fcount = sum(i[1] for i in tf) tf = dict(tf) id = NAME2ID.get(name_tidy(word), 0) if id: t = tf.get(id,0) diff = fcount - t tf[id] = fcount fcount += diff if not fcount: continue t = [] for topic, f in tf.iteritems(): rank = int(f*10000/fcount) if rank: t.append((topic, rank)) if t: word_freq.write( dumps([word, t])+"\n" )
def _clean_up_entries_from_shard(self, object_ids, task_ids, shard_index): redis = self.state.redis_clients[shard_index] # Clean up (in the future, save) entries for non-empty objects. object_ids_locs = set() object_ids_infos = set() for object_id in object_ids: # OL. obj_loc = redis.zrange(OBJECT_LOCATION_PREFIX + object_id, 0, -1) if obj_loc: object_ids_locs.add(object_id) # OI. obj_info = redis.hgetall(OBJECT_INFO_PREFIX + object_id) if obj_info: object_ids_infos.add(object_id) # Form the redis keys to delete. keys = [TASK_TABLE_PREFIX + k for k in task_ids] keys.extend([OBJECT_LOCATION_PREFIX + k for k in object_ids_locs]) keys.extend([OBJECT_INFO_PREFIX + k for k in object_ids_infos]) if not keys: return # Remove with best effort. num_deleted = redis.delete(*keys) log.info( "Removed {} dead redis entries of the driver from redis shard {}.". format(num_deleted, shard_index)) if num_deleted != len(keys): log.warning( "Failed to remove {} relevant redis entries" " from redis shard {}.".format(len(keys) - num_deleted))
def dss_isa_callback(id): ''' This is the call back end point that other USSes in the DSS network call once a subscription is updated ''' if requires_scope('dss.write.identification_service_areas'): new_flights_url = request.args.get('flights_url', 0) try: assert new_flights_url != 0 redis = redis.Redis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT']) # Get the flights URL from the DSS and put it in flights_dict = redis.hgetall("all_uss_flights") all_flights_url = flights_dict['all_flights_url'] all_flights_url = all_flights_url.append(new_flights_url) flights_dict["all_uss_flights"] = all_flights_url redis.hmset("all_uss_flights", flights_dict) except AssertionError as ae: return Response("Incorrect data in the POST URL", status=400, mimetype='application/json') else: # All OK return a empty response return Response("", status=204, mimetype='application/json') raise AuthError( { "code": "Unauthorized", "description": "You don't have access to this resource" }, 403)
def login_user(): if (request.method == 'POST'): data = request.get_json() user = redis.hgetall(data['username']) dataDecoded = { key.decode('utf-8'): value.decode('utf-8') for (key, value) in user.items() } spotlights = dataDecoded['spotlights'].replace(" ", "").split(',') spotlightsAndStatus = [] params = ('living_room', 'kitchen', 'bathroom', 'bedroom') idents = ('1', '2', '3', '4') for x in range(0, len(spotlights)): for y in range(0, len(params)): if params[y] == spotlights[x]: spotlightsAndStatus.append(spotlights[x]) spotlightsAndStatus.append( redis.hget(idents[y], 'status').decode('utf-8')) if sha256.verify(data['password'], dataDecoded['password']): spotlightsAndStatus = str(spotlightsAndStatus).strip('[]').replace( "'", "").replace(" ", "") return jsonify({ 'message': '{}'.format(data['username']), 'spotlightsAndStatus': spotlightsAndStatus }), 200 else: return jsonify({'message': 'Wrong access'}), 401
def getNiuniuOperateList(redis, start_date, end_date): """ 获取某个时间段牛牛运营报表 """ try: startDate = datetime.strptime(start_date, '%Y-%m-%d') endDate = datetime.strptime(end_date, '%Y-%m-%d') except: weekDelTime = timedelta(7) weekBefore = datetime.now() - weekDelTime startDate = weekBefore endDate = datetime.now() deltaTime = timedelta(1) res = [] while startDate <= endDate: dateStr = startDate.strftime('%Y-%m-%d') if redis.exists(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr): info = redis.hgetall(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr) info['date'] = dateStr info['op'] = [] info['op'].append({ 'url': '/admin/niuniu/tile_type_op1?list=1', 'method': 'GET', 'txt': '查看牌型统计表' }) res.append(info) startDate += deltaTime res.reverse() return res
def getNiuniuTileTypeList(redis, start_date, end_date): """ 获取某个时间段牛牛牌型统计表 """ try: startDate = datetime.strptime(start_date, '%Y-%m-%d') endDate = datetime.strptime(end_date, '%Y-%m-%d') except: weekDelTime = timedelta(7) weekBefore = datetime.now() - weekDelTime startDate = weekBefore endDate = datetime.now() deltaTime = timedelta(1) res = [] while startDate <= endDate: dateStr = startDate.strftime('%Y-%m-%d') if redis.exists(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr): info = redis.hgetall(NIUNIU_GAME_OPERATE_BY_DAY_TABLE % dateStr) info['date'] = dateStr info['op'] = [] total = 0 for k, v in info.iteritems(): if k[:5] == 'bull_': total += int(v) info['total'] = total res.append(info) startDate += deltaTime res.reverse() return res
def handle_privmsg(client: IrcClient, ident, channel_name, msg): channel_name = normalize_channel_name(channel_name) if channel_name in channels and msg.lower().startswith('!news '): channel = channels[channel_name] while len(channel.newses) > MAX_NEWS_ENTRY_COUNT: channel.newses.pop() username = extract_username(ident) if redis.exists(str(username)): user = redis.hgetall(str(username)) display_name = user['display_name'] else: display_name = username channel.newses.appendleft(f'!news {display_name}: {msg[6:]}') try: if channel_language[channel] == "kr": client.privmsg(channel_name, f'@{username} 잠시후 메세지가 게임내 표시됩니다.') else: client.privmsg( channel_name, f'@{username} Your message will soon be displayed in few minutes.' ) except Exception: client.privmsg(channel_name, f'@{username} 잠시후 메세지가 게임내 표시됩니다.') channel_language[channel] = "kr"
def GetCacheAccounts(redis, accountMap): # GET ENABLE CACHE try: enableList = redis.lrange('APO_ENABLE_LIST', 0, -1) for ins in enableList: apoInfo = json.loads(ins) if not apoInfo: continue brief = apoInfo['account_brief'] accountID = apoInfo['account_id'] if accountMap[brief]: accountMap[brief].append(accountID) else: accountMap[brief] = [] accountMap[brief].append(accountID) doingList = redis.hgetall('APO_DOING_LIST') for key in doingList: apoInfo = json.loads(doingList[key]) if not apoInfo: continue brief = apoInfo['account_brief'] accountID = apoInfo['account_id'] if accountMap[brief]: accountMap[brief].append(accountID) else: accountMap[brief] = [] accountMap[brief].append(accountID) return accountMap except Exception, e: print " ATTACH CACHE ACCOUNTS FAILED:%r" % e traceback.print_exc() return False
def comandos(msg): if 'text' in msg: if msg['text'].startswith('/run'): if msg['from']['id'] == config.sudo: cmd = msg['text'].replace('/run ','') redis.hset('run', 'status', '{}'.format(cmd)) sendMessage(msg['chat']['id'], 'Alterando as configurações para *{}*'.format(cmd),"markdown") return if redis.hgetall('run')['status'] == 'on': regras.regras(msg) # modulo de regras myid.my_id(msg) # modulo do comando /id newmember.welcome(msg) # modulo de boas vindas promover.promover(msg) # mudulo de divulgação start.start(msg) # modulo de inicialização banhammer.banhammer(msg) # modulo de administração printar.printar(msg) # modulo do comando /print comands.comands(msg) # modulo do comando /comandos conversa(msg) # modulo de inteligencia artificial tradutor.traduzir(msg) #modulo de tradução qr.make_qr(msg) # modulo co comando /qr
def generate_result_list(number): result_value_list = redis.hvals('res_' + number) result_value_list.sort() result_value_list.reverse() max_val = result_value_list[0] ret_str = [] added_count = 0 loop_count = 0 while added_count < 3: elem_str, count = generate_member_list_from_value( redis.hgetall('res_' + number), max_val, number) ret_str.append(elem_str) added_count += count loop_count += 1 if count < len(result_value_list): max_val = result_value_list[int(count)] else: added_count = 3 #exit from loop if loop_count == 1: ret_str.append('該当者なし') ret_str.append('該当者なし') elif loop_count == 2: ret_str.append('該当者なし') return ret_str
def wrapper(*args, **kwargs): if len(args) == 1: handler = args[0] blog_id = handler.get_argument("blog_id", None) if not blog_id: blog_id = redis.zrange('blog.list', -1, -1) if blog_id: blog_id = redis.zrange('blog.list', -1, -1)[0] else: handler.write("请先在管理后台添加预发布的文章") handler.finish() return else: blog_id = args[1] blog = redis.hgetall(blog_id) if not blog: with open(utils.decrypt(blog_id), 'r') as p: lines = p.readlines() or [''] title = lines[0] create_at = int(os.path.getctime(utils.decrypt(blog_id)) or 1504724902) content = utils.md_parse(''.join(lines[2:-1])) redis.hset(blog_id, 'title', title) redis.hset(blog_id, 'create_at', create_at) redis.hset(blog_id, 'content', content) return func(*args, **kwargs)
def wrapper(*args, **kwargs): if len(args) == 1: handler = args[0] blog_id = handler.get_argument("blog_id", None) if not blog_id: blog_id = redis.zrange('blog.list', -1, -1) if blog_id: blog_id = redis.zrange('blog.list', -1, -1)[0] else: handler.write("请先在管理后台添加预发布的文章") handler.finish() return else: blog_id = args[1] blog = redis.hgetall(blog_id) if not blog: with open(utils.decrypt(blog_id), 'r') as p: lines = p.readlines() or [''] title = lines[0] create_at = int( os.path.getctime(utils.decrypt(blog_id)) or 1504724902) content = utils.md_parse(''.join(lines[2:-1])) redis.hset(blog_id, 'title', title) redis.hset(blog_id, 'create_at', create_at) redis.hset(blog_id, 'content', content) return func(*args, **kwargs)
def off(request): #f=redis.get("bing").decode('UTF-8') f=redis.hgetall("publisher2pd") # get python dic # for load data from serialized data--------------- #read_dict = redis.get('mydict') #f = pickle.loads(read_dict) #------------------------------------ print(f) obj= led.objects.get(id=1) obj.order='off' obj.position='off' obj2= status.objects.get(id=1) #obj.slug='on to off' obj.save( ) # post=led.objects.create(order='off',position='off') # post.save() content={ 'order': obj.order , 'position': obj.position, 'title': obj2.title , 'state': obj2.moisture, 'test':f } return render(request,'delta/show.html',content)
def _entries_for_driver_in_shard(self, driver_id, redis_shard_index): """Collect IDs of control-state entries for a driver from a shard. Args: driver_id: The ID of the driver. redis_shard_index: The index of the Redis shard to query. Returns: Lists of IDs: (returned_object_ids, task_ids, put_objects). The first two are relevant to the driver and are safe to delete. The last contains all "put" objects in this redis shard; each element is an (object_id, corresponding task_id) pair. """ # TODO(zongheng): consider adding save & restore functionalities. redis = self.state.redis_clients[redis_shard_index] task_table_infos = {} # task id -> TaskInfo messages # Scan the task table & filter to get the list of tasks belong to this # driver. Use a cursor in order not to block the redis shards. for key in redis.scan_iter(match=TASK_TABLE_PREFIX + b"*"): entry = redis.hgetall(key) task_info = ray.gcs_utils.TaskInfo.GetRootAsTaskInfo( entry[b"TaskSpec"], 0) if driver_id != task_info.DriverId(): # Ignore tasks that aren't from this driver. continue task_table_infos[task_info.TaskId()] = task_info # Get the list of objects returned by these tasks. Note these might # not belong to this redis shard. returned_object_ids = [] for task_info in task_table_infos.values(): returned_object_ids.extend([ task_info.Returns(i) for i in range(task_info.ReturnsLength()) ]) # Also record all the ray.put()'d objects. put_objects = [] for key in redis.scan_iter(match=OBJECT_INFO_PREFIX + b"*"): entry = redis.hgetall(key) if entry[b"is_put"] == "0": continue object_id = key.split(OBJECT_INFO_PREFIX)[1] task_id = entry[b"task"] put_objects.append((object_id, task_id)) return returned_object_ids, task_table_infos.keys(), put_objects
def merge(): topic_count = defaultdict(int) f = "word2count.txt" keys = redis.keys("*") for pos, key in enumerate(keys): l = redis.hgetall(key) print "1", pos, key for k, v in l.iteritems(): topic_count[int(k)] += int(v) #word_topic_freq = defaultdict(list) with open("word_tf.txt", "w") as word_freq: for pos, word in enumerate(keys): tf = [] l = redis.hgetall(word) for topic, freq in l.iteritems(): topic = int(topic) count = topic_count[topic] if count < 10000: continue freq = int(freq) * 500000 / count if freq > 0: tf.append((topic, freq)) fcount = sum(i[1] for i in tf) tf = dict(tf) id = NAME2ID.get(name_tidy(word), 0) if id: t = tf.get(id, 0) diff = fcount - t tf[id] = fcount fcount += diff if not fcount: continue t = [] for topic, f in tf.iteritems(): rank = int(f * 10000 / fcount) if rank: t.append((topic, rank)) if t: word_freq.write(dumps([word, t]) + "\n")
def _entries_for_driver_in_shard(self, driver_id, redis_shard_index): """Collect IDs of control-state entries for a driver from a shard. Args: driver_id: The ID of the driver. redis_shard_index: The index of the Redis shard to query. Returns: Lists of IDs: (returned_object_ids, task_ids, put_objects). The first two are relevant to the driver and are safe to delete. The last contains all "put" objects in this redis shard; each element is an (object_id, corresponding task_id) pair. """ # TODO(zongheng): consider adding save & restore functionalities. redis = self.state.redis_clients[redis_shard_index] task_table_infos = {} # task id -> TaskInfo messages # Scan the task table & filter to get the list of tasks belong to this # driver. Use a cursor in order not to block the redis shards. for key in redis.scan_iter(match=TASK_TABLE_PREFIX + b"*"): entry = redis.hgetall(key) task_info = TaskInfo.GetRootAsTaskInfo(entry[b"TaskSpec"], 0) if driver_id != task_info.DriverId(): # Ignore tasks that aren't from this driver. continue task_table_infos[task_info.TaskId()] = task_info # Get the list of objects returned by these tasks. Note these might # not belong to this redis shard. returned_object_ids = [] for task_info in task_table_infos.values(): returned_object_ids.extend([ task_info.Returns(i) for i in range(task_info.ReturnsLength()) ]) # Also record all the ray.put()'d objects. put_objects = [] for key in redis.scan_iter(match=OBJECT_INFO_PREFIX + b"*"): entry = redis.hgetall(key) if entry[b"is_put"] == "0": continue object_id = key.split(OBJECT_INFO_PREFIX)[1] task_id = entry[b"task"] put_objects.append((object_id, task_id)) return returned_object_ids, task_table_infos.keys(), put_objects
def export_to_json(): ''' 导出去重后的数据 :param spider: :return: ''' import json import re redis = get_redis() scan = redis.scan_iter citys = {} for city in CITY: citys[city] = {} for kw, filter in KEYWORDS: citys[city][kw] = {"count":0, "number": 0, "min_sum":0.0, "max_sum":0.0, "skip": 0} a = 1 for key in scan("jobs_*"): task = redis.hgetall(key) if task["repetition"] == "False": number = 1 if task["source"] == "zl": if u"若干" in task["number"].decode("utf-8"): number = 3 else: number = int(task["number"][0]) elif task["source"] == "qc": number = re.match(r".*?(\d+).*", task["number"]) if number: number = int(number.group(1)) else: number = 3 a += 1 print(task["city"].decode("utf-8"), task["keyword"].decode("utf-8"), key) if not citys[task["city"].decode("utf-8")].get(task["keyword"].decode("utf-8")): print("~~~~~~~~") continue citys[task["city"].decode("utf-8")][task["keyword"].decode("utf-8")]["number"] += number citys[task["city"].decode("utf-8")][task["keyword"].decode("utf-8")]["count"] += 1 if "-" in task["salary"]: min, max = map(lambda x: re.findall(r"(\d+)", x)[0], task["salary"].split("-")) citys[task["city"].decode("utf-8")][task["keyword"].decode("utf-8")]["min_sum"] += int(min) citys[task["city"].decode("utf-8")][task["keyword"].decode("utf-8")]["max_sum"] += int(max) else: citys[task["city"].decode("utf-8")][task["keyword"].decode("utf-8")]["skip"] += 1 print(a) for city in CITY: for kw, filter in KEYWORDS: print(city), print(kw), print(citys[city][kw]["count"]), print(citys[city][kw]["number"]), print(citys[city][kw]["min_sum"]), print(citys[city][kw]["max_sum"]) with open("city.json", 'w') as f: f.write(json.dumps(citys))
def get_user_info(redis, account): """ 获取玩家信息""" info = {} user_table = redis.get(FORMAT_ACCOUNT2USER_TABLE % account) if not user_table: return info info = redis.hgetall(user_table) info['uid'] = user_table.split(':')[1] return info
def publishUsers(): users = redis.hgetall("users") sortedUsers = collections.OrderedDict(sorted(users.items())) output_msg = "" for k, v in sortedUsers.items(): output_msg += "\n" + k.decode('utf-8') + " " + v.decode('utf-8') redis.publish("user_list", output_msg)
def tags(): if request.method == 'POST': redis.hmset(HASH, {k: v for (k, v) in request.form.items() if k in KEYS}) return "success" elif request.method == 'GET': return repr(redis.hgetall(HASH)) else: return "error"
def show_key(key): return json.dumps ( definite_strategy ( { 'string': lambda x: redis.get(x) , 'hash': lambda x: redis.hgetall(x) , 'list': lambda x: redis.lrange(x,0,-1) } , redis.type(key) , key ) )
def get_file(self, fid, stop_event, driver=None, restart=False): """Transfers a file from a Driver to another. """ redis = self.plug.redis metadata = Metadata.get_by_id(self.plug, fid) filename = metadata.filename transfer_key = 'drivers:{}:transfers:{}'.format(self.plug.name, fid) if driver: redis.sadd('drivers:{}:transfers'.format(self.plug.name), fid) redis.hmset(transfer_key, {'from': driver, 'offset': 0}) offset = 0 self.logger.info("Starting to get '{}' from {}", filename, driver) else: transfer = redis.hgetall(transfer_key) driver = transfer['from'] offset = int(transfer['offset']) self.logger.info("Restarting transfer of '{}' from {}", filename, driver) dealer = self.context.socket(zmq.DEALER) port = redis.get('drivers:{}:router'.format(driver)) dealer.connect('tcp://localhost:{}'.format(port)) end = metadata.size chunk_size = self.plug.options.get('chunk_size', 1 * 1024 * 1024) if not restart: self._call('start_upload', metadata) while offset < end: if stop_event.is_set(): # another transaction for the same file has # probably started self.logger.info("Aborting transfer of '{}' from {}", filename, driver) return dealer.send_multipart((filename, str(offset), str(chunk_size))) chunk = dealer.recv() self.logger.debug("Received chunk of size {} from {} for '{}'", len(chunk), driver, filename) self._call('upload_chunk', filename, offset, chunk) offset = redis.hincrby(transfer_key, 'offset', len(chunk)) self._call('end_upload', metadata) redis.delete(transfer_key) redis.srem('drivers:{}:transfers'.format(self.plug.name), fid) self.logger.info("Transfer of '{}' from {} successful", filename, driver)
def left(cls, redis, userkey, money, cond=2): """ 剩余金额 :param redis: :param userkey: :param money: :return: """ t = redis.hgetall(userkey) used = round(sum(map(float, t.values())), cond) left = round(money - used, cond) return left
def getRedisKey(request): result = redis.hgetall(KEYWORDS) result = sorted(result.items(), key=lambda item: item[1], reverse=True) # result = result.sort(key=lambda x : x[1], reverse=True) if len(result) > 5: result = result[:5] keys = [] for item in result: keys.append(item[0]) keys = json.dumps(keys) return HttpResponse(keys)
def test_hash(redis, keys, fields, retry): print "generating data..." for i in range(fields): redis.hset("key", ''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]), 1) for i in range(keys - 1): redis.hset(''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]), ''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]), 1) delays = [] print "testing..." for i in range(retry): t1 = datetime.datetime.now() redis.hgetall("key") t2 = datetime.datetime.now() td = t2 - t1 delays.append(td.days * 24 * 3600 * 1000 + td.seconds * 1000 + td.microseconds / 1000.0) result = pd.Series(delays) result.to_csv("hash_%d_%d.csv" % (fields, retry)) print result.describe()
def top(cls, redis, userkey): """ 运气王(过期时间内查询) :param redis: :param userkey: :param cond: :return: """ t = redis.hgetall(userkey) lst = map(lambda x: (int(x[0]), float(x[1])), t.items()) lst = sorted(lst, key=lambda x: x[1]) return lst[-1]
def fetch_all(type_name, hashcode): namehash = "%s:%s" % (type_name, hashcode) metahash = "%s:linked_fields:%s" % (type_name, hashcode) try: return redis.get(namehash) except Exception, e: try: current_level_fields = redis.hgetall(namehash) linked_fields = redis.hgetall(metahash) for key in linked_fields: obj = linked_fields[key] if obj == "0": current_level_fields[key] = [] else: current_level_fields[key] = fetch_all(type_name, obj) except Exception, e: current_level_fields = redis.lrange(namehash, 0, -1) linked_fields = redis.lrange(metahash, 0, -1) for index_item in linked_fields: i = int(index_item) list_item_hash = current_level_fields[i] current_level_fields[i] = fetch_all(type_name, list_item_hash)
def main(): global drugs drugs = redis.hgetall(redisHash) if len(drugs) == 0: url = 'https://www.yaofangwang.com/yaodian/379739/medicines.html' soup = getSoup(url) count = soup.select_one('.tabnav .count b').string.strip() bar = Bar('正在获取商品编号...', max=int(count)) while True: li = soup.select('ul.goods3 li') for i in li: detailUrl = 'https:' + i.a['href'] # https://www.yaofangwang.com/detail-xxxxxxxx.html detailSoup = getSoup(detailUrl) try: approvalNum = detailSoup.select_one('head title').text.split(',')[-1].split('_')[0] ourPrice = detailSoup.select_one('#pricedl .money .num').string.strip() drugId = detailSoup.select_one('#aFavorite')['data-mid'] ourStock = detailSoup.select_one('#reserve').string.strip() except AttributeError: pass except IndexError: pass else: redis.hset(redisHash, drugId, ourPrice + ':' + approvalNum + ':' + ourStock) bar.next() nextPage = soup.select_one('div.pager div.list a.next') if nextPage == None: break url = 'https://www.yaofangwang.com' + nextPage['href'] soup = getSoup(url) bar.finish() drugs = redis.hgetall(redisHash) bar = Bar('正在抓取商品信息...', max=len(drugs)) for i in drugs: # getPrice(i) getInfo(i) bar.next() bar.finish() redis.delete(redisHash)
def get_groupon_by_id(self, coupon_id): """ 得到一个优惠券的详细信息 :param coupon_id: :return: 优惠券的map """ result = {} redis_structure_name = Constant.DISCOUNT + Constant.COLON + coupon_id coupon = redis.hgetall(redis_structure_name) for (key, value) in coupon.items(): result[key.decode('utf-8')] = value.decode('utf-8') return result
def logout(): redis.delete(session['current_user']) sids=redis.hgetall("sids") session.pop('current_user', None) # Clear session stored data session.clear() session['current_user'] = None session['logged_in'] = None session.pop('current_user', None) # Redirect user to logout endpoint params = {'returnTo': url_for('sign', _external=True), 'client_id': 'xaTwJSSL6BkiDhFLFxY8okLueLSLcAqd'} return redirect(auth0.api_base_url + '/v2/logout?' + urlencode(params))
def command_sequence(script_name, command, redis): command = command.encode('utf-8') #retrieving existing data s_dict = redis.hgetall(script_name) if s_dict is not None: s_dict['msg'] = command else: s_dict = {} s_dict['msg'] = command #set new message for script queue redis.hmset(script_name, s_dict)
def get_counter(redis, name, precision): """ :param redis: :param name: :param precision: :return: """ hash = '%s:%s' % (precision, name) data = redis.hgetall('test:count:' + hash) ret = [(int(key), int(value)) for key, value in data.iteritems()] ret.sort() return ret
def genenate_voting_result_message(key): data = redis.hgetall(key) tmp = generate_voting_result_image(data) buttons_template = ButtonsTemplate( title='ポーカー結果', text='そろいましたか?', thumbnail_image_url='https://scrummasterbot.herokuapp.com/images/tmp/' + tmp + '/result_11.png', actions=[MessageTemplateAction(label='もう1回', text='プラポ')]) template_message = TemplateSendMessage(alt_text='結果', template=buttons_template) return template_message
def schedule(self): while True: time.sleep(60 * 60) for name, value in redis.hgetall('schedule').items(): user_id = self.get_user_id_by_alias(name) if user_id and value not in [ '1', '0' ] and datetime.datetime.now().hour in [9, 15, 21]: self.send_msg_by_uid( self.auto_reply(user_id, value.decode('utf-8'), msg_from='contact'), user_id)
def f(key): return ( definite_strategy ( { 'list': lambda x: reduce(lambda x,y: x+y, [ f(a) if a[0:2] == '::' or a[0:2] == ':#' else [a] for a in redis.lrange(x, 0, -1) ]) , 'hash': lambda x: [redis.hgetall(x)] , 'none': lambda x: [] , 'string': lambda x: [ f(a) if a[0:2] == '::' else a for a in [redis.get(x)] ] } , redis.type(key) , key , default_strategy = 'hash' ) )
def clear_sub_nodes(type_name, hashcode): namehash = "%s:%s" % (type_name, hashcode) metahash = "%s:linked_fields:%s" % (type_name, hashcode) m_type = redis.type(metahash) if m_type == "list": for index in redis.lrange(metahash, 0, -1): i = int(index) clear_sub_nodes(type_name, redis.lindex(namehash, i)) elif m_type == "hash": meta_dictionary = redis.hgetall(namehash) for key in meta_dictionary: linked_node_hash = meta_dictionary[key] clear_sub_nodes(type_name, linked_node_hash) redis.delete(namehash) redis.delete(metahash)
def queued_job_info(): """Provides metadata for all known jobs. Returns a list of dictionaries: [ {job_id, request_url, submitted, page_title, status}, ..., ]""" jobs = [] # Show the ten most recent jobs for job_id in redis.lrange(joblist, 0, 9): job = rqueue.fetch_job(job_id) if job is None: continue # don't bother showing the 'deleted' jobs job_details = redis.hgetall(jobkey(job_id)) job_details['submitted'] = nicetimedelta(job_details['submitted']) job_details['status'] = job.get_status() jobs.append(job_details) return jobs
def tweet_and_shout(api_session, redis, key, timeout=600): """""" for tweet_id in redis.lrange("%s:%s" % (LOLCOIFFEURS_LIST, key), 0, -1): tweet_dict = redis.hgetall("%s:tweet:%s" % (LOLCOIFFEURS_LIST, tweet_id)) # Tracking answered tweets in a brand new set, and posting # a reply to it print "replying tweet : %s" % (tweet_id) redis.sadd((LOLCOIFFEURS_LIST + ":%s:answered" % (key)), tweet_id) # api_session.PostUpdate("@%s %s" % (tweet_dict["username"], RESPONSE), in_reply_to_status_id=tweet_id) # Popping out element from the left of the list # as we answer it redis.rpop("%s:%s" % (LOLCOIFFEURS_LIST, key)) # Wait timeout before replying again sleep(timeout) return
def get_players(cache_ignore=False): if not cache_ignore: players_by_region = redis.hgetall('players_by_region') if players_by_region and len(players_by_region.keys()) == 3: players_by_region = {x: json.loads(y) for x,y in players_by_region.iteritems()} if all([len(y) > 0 for x,y in players_by_region.iteritems()]): return players_by_region players_by_region = {NORTH_AMERICA: [], KOREA: [], EUROPE_WEST: []} for region in players_by_region.keys(): players = [] challengers = riot.get_challenger(region=region) masters = riot.get_master(region=region) for p in challengers['entries'] + masters['entries']: player = {'name': p['playerOrTeamName'], 'id': p['playerOrTeamId']} players.append(player) redis.hset('players_by_region', region, json.dumps(players)) players_by_region[region] = players return players_by_region
def route_get_final_report(): advr_id = advertiser_id() if not advr_id: return '', 401 redis = get_redis() reports = {} for ad_key in redis.smembers(advertiser_key(advr_id)): ad = redis.hgetall(ad_key) if not ad: continue imp = int(fetch(ad, 'impressions', 0)) ad['impressions'] = imp reports[ad['id']] = { 'ad': ad, 'clicks': 0, 'impressions': imp } logs = get_log(advr_id) for ad_id, report in reports.items(): log = fetch(logs, ad_id, []) report['clicks'] = len(log) breakdown = { 'gender': {}, 'agents': {}, 'generations': {} } for click in log: incr_dict(breakdown['gender'], click['gender']) incr_dict(breakdown['agents'], click['agent']) if 'age' in click and click['age'] != None: generation = int(click['age']) / 10 else: generation = 'unknown' incr_dict(breakdown['generations'], generation) report['breakdown'] = breakdown reports[ad_id] = report return jsonify(reports)
def route_get_report(): advr_id = advertiser_id() if not advr_id: return '', 401 redis = get_redis() report = {} ad_keys = redis.smembers(advertiser_key(advr_id)) for ad_key in ad_keys: ad = redis.hgetall(ad_key) if not ad: continue imp = int(fetch(ad, 'impressions', 0)) ad['impressions'] = imp report[ad['id']] = { 'ad': ad, 'clicks': 0, 'impressions': imp } for ad_id, clicks in get_log(advr_id).items(): if not ad_id in report: report[ad_id] = {} report[ad_id]['clicks'] = len(clicks) return jsonify(report)
def status(): host = request.args.get('host') try: port = int(request.args.get('port', default=25565)) except Exception, e: port = 25565 test = (host == 'test.mc.glassmoon.ru') if test: return jsonify(players=['doge', 'such', 'wow', 'diamonds']) result = None if redis: key = "%s:%s" % (host, str(port)) result = redis.hgetall(key) if not result: try: result = MinecraftQuery(host, port).get_rules() except Exception as e: result = {'message': 'No response from the Minecraft server'} if redis: redis.hmset(key, result) redis.pexpire(key, 30000) # 30s return jsonify(**result) if __name__ == '__main__': app.run(debug=options.debug, port=options.port)
def get_json(self): return json.dumps(redis.hgetall(self.key))
# -*- coding: utf-8 -*- """Provides functions to fetch Twitter keys from the Redis key store. Functions provided: get_twitter_consumer_key() get_twitter_consumer_secret() get_twitter_oauth_token() get_twitter_oauth_token_secret() """ import redis redis = redis.Redis() keys = redis.hgetall('twitter_keys') def get_twitter_consumer_key(): """Returns the Twitter consumer key as a string. Accepts no agurments""" return keys['consumer_key'] def get_twitter_consumer_secret(): """Returns the Twitter consumer secret as a string, Accepts no agruments.""" return keys['consumer_secret'] def get_twitter_oauth_token(): """Returns the Twitter oauth token as a string. Accepts no arguments.""" return keys['oauth_token']
return self.redis.keys( match_key ) def delete_all(self): #tested keys = self.redis.keys(self.sep+"*") for i in keys: self.redis.delete(i) if __name__ == "__main__": # test driver redis = redis.StrictRedis( host = "127.0.0.1", port=6379, db = 11 ) common = Redis_Graph_Common( redis) redis_key, new_namespace =common.construct_node( [], "","head","head" ) print redis_key,new_namespace print redis.hgetall(redis_key) redis_key, new_namespace =common.construct_node( new_namespace,"relation 1","level_one","h1" ) print redis_key,new_namespace print redis.hgetall(redis_key) redis_key, new_namespace =common.construct_node( new_namespace,"relation 2","level_two","h2" ) print redis_key,new_namespace print redis.hgetall(redis_key) print "simple match" print common.match( "relation 2","level_two","h2") print "starting match" print common.match( "*","level_two","h2",[["","head","head"]]) print "all the keys" print redis.keys("*")
def hgetall(self, key): redis = self._get_redis(key) return redis.hgetall(key)
def repr_hash(key): hash_ = redis.hgetall(key) if 'template' in hash_.keys(): return dict ( [ (key, repr(hash_[key])) for key in hash_.keys() ] ) else: return hash_
def render_hash_values(key): return redis.hgetall(key)
def get_dict(key_prefix, id, key_type=int, val_type=int): key = "%s:%s" % (key_prefix, id) return dict([(key_type(a), val_type(b)) for (a, b) in redis.hgetall(key).items()])
def get_user_ratings(user_id): if not user_id: return dict() user_key = "user:%s:ratings" % user_id return dict((int(id), int(r)) for (id, r) in redis.hgetall(user_key).items())
def get_ratings(film_id=None, actor_id=None, director_id=None, type=1): key = "ratings:%s" % _rating_key(film_id, actor_id, director_id, type) return dict((int(id), int(r)) for (id, r) in redis.hgetall(key).items())
def memory_finish(): step = request.form['step'] if step > 0: redis.hincrby('memory_ranks', step, 1) ranks = redis.hgetall('memory_ranks') return jsonify(ranks)
cf.add_remote( name="satellite_1",modbus_address=100,irrigation_station_number=44, card_dict={"open":"Remote 1 Open Wire","short":"Remote 1 Shorted Selenoid","connectivity":"Remote 1 Connectivity"}) cf.add_remote( name="satellite_2",modbus_address=125 ,irrigation_station_number=22,card_dict={"open":"Remote 2 Open Wire","short":"Remote 2 Shorted Selenoid","connectivity":"Remote 2 Connectivity"}) cf.add_remote( name="satellite_3",modbus_address=170,irrigation_station_number=22,card_dict={"open":"Remote 3 Open Wire","short":"Remote 3 Shorted Selenoid","connectivity":"Remote 3 Connectivity"}) cf.end_rtu_interface() cf.end_udp_io_server() cf.end_controller() cf.end_site() cf.end_system() keys = redis.keys("*") for i in keys: print "+++++++++++++:" print i temp = i.split( common.sep) print len(temp) print redis.hgetall(i) print "----------------" print "lenght",len(keys) print "testing query functions" print qc.match_labels( "CONTROLLER" ) # match single item temp = qc.match_labels( "REMOTE" ) # match single item print len(temp),temp print qc.match_relationship( "CONTROLLER" ) # match single item temp = qc.match_relationship( "REMOTE" ) # match single item print len(temp),temp temp = qc.match_label_property( "REMOTE", "name", "satellite_1") print len(temp),temp
def find(host_id): name = 'host:' + host_id host = redis.hgetall(name) if not host: return host.update(name=host_id) return Host(**host)