def redis_get_all_compound_keys(redis): """Extracts all (tag, keyword) pairs from redis database.""" keys = defaultdict(list) for key in redis.scan_iter("*"): for key in redis.scan_iter("*"): key_pair = tuple(key.decode("utf-8").split(':')) if len(key_pair) == 2: # omit keys corresponding to schema keys[key_pair[0]].append(key_pair[1]) return keys def read_one_from_redis(tag, keyword): """Given a tag and a keyword, extracts the corresponding value (time series). Converts to months""" redis_hash = str(tag) + ":" + str(keyword) read = redis_connect().hgetall(redis_hash) dates_extract = list(read.values())[0].decode("utf-8") # extract bytecode dates_only = dates_extract[13:-1] # Removes WrappedArray(...) bytecode dates_split = [s.strip() for s in dates_only.split(',')] dates_formatted = ['{:%Y-%m}'.format(datetime.strptime(date, '%Y-%m-%d')) for date in dates_split] return dates_formatted def datetime_x_y(dates_with_repetitions): """Give sorted datetimes and associated counts of posts""" dates_counter = dict(Counter(dates_with_repetitions)) dates_sorted = sorted(list(set(dates_with_repetitions))) counts = [dates_counter[date] for date in dates_sorted] return dates_sorted, counts
def output(message): try: workbook = openpyxl.loads_workbook('output.xlsx') sheet = workbook.active except: workbook = Workbook() sheet = workbook.active pass finally: print(message.from_user.first_name, " is in alpha office") bot.reply_to( message, emojize(':open_file_folder:', use_aliases=True) + 'Hi mr/mis {} you are in alpha office enter password to receive results' .format(message.from_user.first_name), reply_markup=mksighup) for key in redis.scan_iter(): try: value1 = redis.hget(key, 'firstname') value2 = redis.hget(key, 'lastname') value3 = redis.hget(key, 'phonenumber') value4 = redis.hget(key, 'meli') value5 = redis.hget(key, 'point') value6 = redis.hget(key, 'state') new_row = [key, value1, value2, value3, value4, value5, value6] sheet.append(new_row) except: pass workbook.save(filename="output.xlsx")
def hashes_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("HASH") print("-------------") #hmset(name, mapping) hget(name, key) hgetall(name) redis.hmset("user.1", {"name": "peter", "email": "*****@*****.**"}) print("map.1: ", redis.hgetall("user.1")) print("name.1:", redis.hget("user.1", "name")) print("email.1:", redis.hget("user.1", "email")) print("-------------") #hset(key, field, value) hget() redis.hset("user.2", "name1", "peter") print("map.2: ", redis.hgetall("user.2")) print("type map.2: ", redis.type("user.2")) print("name.2:", redis.hget("user.2", "name1")) print("-------------") #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def get_contacts(redis): keys = [] for key in redis.scan_iter(match='contacts:*'): keys.append(key[len('contacts:'):]) return jsonify({'contacts': keys})
def index(): encoding = 'utf-8' out = {} for key in redis.scan_iter('*'): out[key.decode(encoding)] = redis.get(key).decode(encoding) line_labels = list(out.keys()) line_values = out.values() values_btc = [] values_gbp = [] values_total = [] for line in line_values: j = json.loads(line) values_btc.append(j['rate'] * j['qty_btc']) values_gbp.append(j['qty_gbp']) values_total.append(j['rate'] * j['qty_btc'] + j['qty_gbp']) return render_template('chart.html', labels=line_labels, values_btc=values_btc, values_gbp=values_gbp, values_total=values_total)
def _entries_for_driver_in_shard(self, driver_id, redis_shard_index): """Collect IDs of control-state entries for a driver from a shard. Args: driver_id: The ID of the driver. redis_shard_index: The index of the Redis shard to query. Returns: Lists of IDs: (returned_object_ids, task_ids, put_objects). The first two are relevant to the driver and are safe to delete. The last contains all "put" objects in this redis shard; each element is an (object_id, corresponding task_id) pair. """ # TODO(zongheng): consider adding save & restore functionalities. redis = self.state.redis_clients[redis_shard_index] task_table_infos = {} # task id -> TaskInfo messages # Scan the task table & filter to get the list of tasks belong to this # driver. Use a cursor in order not to block the redis shards. for key in redis.scan_iter(match=TASK_TABLE_PREFIX + b"*"): entry = redis.hgetall(key) task_info = ray.gcs_utils.TaskInfo.GetRootAsTaskInfo( entry[b"TaskSpec"], 0) if driver_id != task_info.DriverId(): # Ignore tasks that aren't from this driver. continue task_table_infos[task_info.TaskId()] = task_info # Get the list of objects returned by these tasks. Note these might # not belong to this redis shard. returned_object_ids = [] for task_info in task_table_infos.values(): returned_object_ids.extend([ task_info.Returns(i) for i in range(task_info.ReturnsLength()) ]) # Also record all the ray.put()'d objects. put_objects = [] for key in redis.scan_iter(match=OBJECT_INFO_PREFIX + b"*"): entry = redis.hgetall(key) if entry[b"is_put"] == "0": continue object_id = key.split(OBJECT_INFO_PREFIX)[1] task_id = entry[b"task"] put_objects.append((object_id, task_id)) return returned_object_ids, task_table_infos.keys(), put_objects
def _entries_for_driver_in_shard(self, driver_id, redis_shard_index): """Collect IDs of control-state entries for a driver from a shard. Args: driver_id: The ID of the driver. redis_shard_index: The index of the Redis shard to query. Returns: Lists of IDs: (returned_object_ids, task_ids, put_objects). The first two are relevant to the driver and are safe to delete. The last contains all "put" objects in this redis shard; each element is an (object_id, corresponding task_id) pair. """ # TODO(zongheng): consider adding save & restore functionalities. redis = self.state.redis_clients[redis_shard_index] task_table_infos = {} # task id -> TaskInfo messages # Scan the task table & filter to get the list of tasks belong to this # driver. Use a cursor in order not to block the redis shards. for key in redis.scan_iter(match=TASK_TABLE_PREFIX + b"*"): entry = redis.hgetall(key) task_info = TaskInfo.GetRootAsTaskInfo(entry[b"TaskSpec"], 0) if driver_id != task_info.DriverId(): # Ignore tasks that aren't from this driver. continue task_table_infos[task_info.TaskId()] = task_info # Get the list of objects returned by these tasks. Note these might # not belong to this redis shard. returned_object_ids = [] for task_info in task_table_infos.values(): returned_object_ids.extend([ task_info.Returns(i) for i in range(task_info.ReturnsLength()) ]) # Also record all the ray.put()'d objects. put_objects = [] for key in redis.scan_iter(match=OBJECT_INFO_PREFIX + b"*"): entry = redis.hgetall(key) if entry[b"is_put"] == "0": continue object_id = key.split(OBJECT_INFO_PREFIX)[1] task_id = entry[b"task"] put_objects.append((object_id, task_id)) return returned_object_ids, task_table_infos.keys(), put_objects
def upload(): with open(outputName, 'w', encoding='utf-8') as outfile: for key in redis.scan_iter(match='product:*'): data = redis.get(key).decode('utf-8') json.dump(json.loads(data), outfile) outfile.write('\n') redis.delete(key) outfile.close()
def read_redis(data): import redis #redis = redis.Redis(host = 'redis',port = 6379, decode_responses=True) redis = redis.Redis() for d in redis.scan_iter(): if data == d: return redis.get(data) return redis.get(data)
def list_maps(): """View all saved maps""" # Redis docs recommend not returning the entire list of keys, instead # scanning small chunks at a time iterator = redis.scan_iter() name_list = [key.decode("utf-8") for key in iterator] # Avoiding duplicates may be necessary if the project scales up very far # name_set = set(name_list) # Turn each map into an object so that previews can be displayed map_list = [] for name in name_list: map_list.append(pickle.loads(redis.get(name))) return render_template('map_list.html', maps=map_list)
def sets_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("SETS") print("-------------") #sadd(name, *values) redis.sadd("telephone", 938293287, 329832932) print(redis.smembers("telephone")) #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def list_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("LIST") print("-------------") #lpush(name, *values) - in initial position redis.lpush("names", "pedro" " ana" " mara") print("names: ", redis.lrange("names", 0, 2)) #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def beginner(message): keys = [] for data8 in redis.scan_iter(): keys.append(data8.decode('utf-8')) if message.from_user.username in keys: if redis.hget(message.from_user.username, 'state') == b'finish': bot.reply_to(message, 'شما شانس خود را قبلا استفاده کرده اید\n/home') elif redis.hget(message.from_user.username, 'state') == b'on match': bot.reply_to(message, 'شما هم اکنون در آزمون حضور دارید') redis.hset(message.from_user.username, 'timer', time.time()) getquestion(message) elif redis.hget(message.from_user.username, 'state') == b'alive': redis.hset(message.from_user.username, 'state', 'on match') redis.hset(message.from_user.username, 'timer', time.time()) getquestion(message) else: bot.reply_to(message, 'لطفا ابتدا ثبت نام کنید \n/start & /begin')
def repet_change(): a = 0 b = 0 for key in redis.scan_iter("jobs_*"): item = redis.hgetall(key) string = item["job_name"]+item["company_name"]+item["office_address"]+item["description"] sha256 = hashlib.sha256(string).hexdigest() ret = redis.sadd("check_repetition", sha256) if ret == 0: b += 1 print("find one repetition") item["repetition"] = True redis.hmset(key, item) a += 1 if a in range(0, 1000000, 1000): print(a) print(a) print(b)
def strings_redis(): import redis #charset="utf-8", decode_responses=True => avoid b' in redis python redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("STRINGS") print("-------------") #info() print(redis.info()) print("-------------") #monitor() print(redis.monitor()) print("-------------") #set() redis.set("name", "javier") redis.set("name", "jaime") print("key: ", redis.get("name")) print("-------------") print("all keys: ", redis.keys()) print("keys with a 'name...': ", redis.keys("name*")) print("keys with a 'e': ", redis.keys("*e*")) print("-------------") #setnx(name, value) redis.set("name", "javier") #mset(name, value) redis.mset({"name": "peter", "name": "david"}) print("name: ", redis.mget("name")) print("-------------") #getrange(name, start, end) - substrings of the value print("range : ", redis.getrange("name", 0, 3)) #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def sorted_sets_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("SORTED_SETS") print("-------------") #zadd(name,mapping) zrangebyscore(name, min, max) redis.zadd("country.user", {392832938: 0, 34340923233: 1}) print(redis.zrangebyscore("country.user", 0, 1)) #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key) #clean data redis.flushdb
elif sys.argv[1] == 'get_settings': if len(sys.argv) < 3: print('Invalid arguments') exit(1) job_id = sys.argv[2] data = redis.hgetall(job_id) print('Data for job ident {}:'.format(job_id)) print('{}'.format( json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))) exit(0) elif sys.argv[1] == 'get_all_pending_queues': for name in redis.scan_iter('pending:*'): print('{}'.format(name)) exit(0) elif sys.argv[1] == 'dump_working_queue': pprint('{}'.format(redis.lrange('working', 0, -1))) exit(0) elif sys.argv[1] == 'reset_job_counters': if len(sys.argv) < 3: print('Invalid arguments') exit(1) job_id = sys.argv[2]
PORT = 6379 PASSWORD = '' DATABASE = 0 PREFIX = '' redis = redis.Redis( host=IP, port=PORT, password=PASSWORD, db=DATABASE, socket_timeout=None, ssl_ca_certs= '/home/dev/PycharmProjects/CommonExploits/db_dumpers/ca-certificate.crt') for key in redis.scan_iter(match=f'{PREFIX}*'): print(key, flush=True) key_type = redis.type(key) try: if key_type == b'string': result = redis.get(key) elif key_type == b'set': result = redis.smembers(key) elif key_type == b'hash': result = redis.hgetall(key) elif key_type == b'list': result = redis.lrange(key, 0, -1) elif key_type == b'zset': result = redis.zrange(key, 0, -1) else: result = redis.get(key)
global whitelist rospy.init_node("log_bridge") redis = bt.redis_client() redis_key = bt.namespace_key("Log") redis_key2 = bt.namespace_key("Log_Settings") whitelist = bt.load_json_file("log_whitelist.json", dict()) special_log_keys = set(bt.load_json_file("special_log_keys.json", [])) roslog_sub = rospy.Subscriber('/rosout', Log, log_cb) reset_sub = rospy.Subscriber( "/reset", Empty, reset_cb ) # this node does not use bt.establish_reset() because it handles too many keys bt.establish_pulse() rate = rospy.Rate(1) while not rospy.is_shutdown(): keys = redis.scan_iter( redis_key2 + "*") # get all the keys under the ns/Log_Settings key domain update = False for k in keys: name = k.replace(redis_key2, '') # strips <ns>/Log_Settings value = redis.get(k) curr_value = whitelist.get(name, -1) if value != curr_value: # check the redis value against the stored whitelist value. if they differ, then update the whitelist. try: assert (value in ["0", "1", "2"]) whitelist[name] = int(value) update = True except: rospy.logerr( "invalid whitelist code [{}] given for node {}".format( value, name))
def get_all_keys(redis: redis.Redis): try: for key in redis.scan_iter(): print(key) except Exception as e: error_message(e)
target_img_path = "target.png" target_img = functions.preprocess_face(target_img_path, target_size = (160, 160)) plt.imshow(target_img[0][:,:,::-1]) plt.axis('off') plt.show() target_embedding = model.predict(target_img)[0].tolist() #---------------------------- #redis server #redis = redis.Redis(host='localhost', port=6379, db=0) redis = redis.StrictRedis(host='localhost', port=6379, db=0) for key in redis.scan_iter("embedding:*"): redis.delete(key) for key in redis.scan_iter("photo:*"): redis.delete(key) #---------------------------- #store local db in redis #Ref: https://github.com/serengil/deepface/tree/master/tests/dataset local_db = { 'angelina': 'deepface/tests/dataset/img2.jpg', 'jennifer': 'deepface/tests/dataset/img56.jpg', 'scarlett': 'deepface/tests/dataset/img49.jpg', 'katy': 'deepface/tests/dataset/img42.jpg', 'marissa': 'deepface/tests/dataset/img23.jpg'
print(f"client_id:{redis.client_id()}") print(f"client_list:{redis.client_list()}") print(f"lastsave:{redis.lastsave()}") pprint.pprint(f"memory_stats:{redis.memory_stats()}") print(f"time:{redis.time()}") elif args.method.lower() == "key": if args.key != None: print(f"key:{args.key} value:{redis.mget(args.key)}") print(f"strlen:{redis.strlen(args.key)}") print(f"ttl:{redis.ttl(args.key)}") else: print("No key specified") elif args.method.lower() == "scan": words = 0 match = "*" if args.match == None else args.match for key in redis.scan_iter(match=match,count=1000): words += 1 print(key) print(f"{words} in set") #keys = redis.scan(cursor=0, match=".", count=1000, _type="SET") #print(keys) else: print(f"Method {args.method} not recognised") ############################################################ # watch ############################################################ if args.action == "watch": pubsub = redis.pubsub() pubsub.psubscribe('__keyspace@0__:*') print('Starting message loop')