def load_rdb_stream(file_name, filters=None): r = MockRedis() parser = RdbParser(r, filters) parser.parse_fd( open(os.path.join(os.path.dirname(__file__), 'dumps', file_name), 'rb')) return r
def one_time_migration(file, args_): redis_host = args_.redis_host redis_port = args_.redis_port redis_password = args_.redis_db_password print("-----One Time Migration Started---------") start_time = time.time() print("Dump file download Started") download_rdb(file, redis_host, redis_port, redis_password) print("Dump file download Completed") callback = JSONCallback(0) print("Parsing Started") parser_ = RdbParser(callback) parser_.parse(file) print("Parsing Completed") print("Table Creation Started") tables_ = create_tables_from_stored_rules() print("Table Creation Completed") print("Querying...") data = old_key_value bulk_insertion(data, tables_) print("Query Completed") Session.commit() print("Target database commit") end_time = time.time() print( "Execution Time For One Time Migration {0} Seconds".format(end_time - start_time)) return tables_
def load_rdb(target, rdb_filename, appkeys): class MyCallback(RdbCallback) : ''' Simple example to show how callback works. See RdbCallback for all available callback methods. See JsonCallback for a concrete example ''' def set(self, key, value, expiry): key = key # print('%s = %s' % (str(key), str(value))) def hset(self, key, field, value): key = key # print('%s.%s = %s' % (str(key), str(field), str(value))) def sadd(self, key, member): # print('%s has {%s}' % (str(key), str(member))) # target.sadd(key, member) set_op_wrapper(target, "sadd", key, member, appkeys) def rpush(self, key, value) : key = key # print('%s has [%s]' % (str(key), str(value))) def zadd(self, key, score, member): key = key # print('%s has {%s : %s}' % (str(key), str(member), str(score))) callback = MyCallback() parser = RdbParser(callback) parser.parse(rdb_filename)
def main(): parser = optparse.OptionParser() parser.add_option('-i', '--input', dest='rdb_file', help="input rdb file [REQUIRED]") parser.add_option('-o', '--output', dest='csv_file', help = "output csv file [REQUIRED]") parser.add_option('-c', '--column_delimiter', dest='column_delimiter', help="column delimiter. Default is ctrl-A ('\\001')") parser.add_option('-l', '--line_delimiter', dest='line_delimiter', help="row delimiter. Default is newline ('\\n')") parser.add_option('-p', '--pre_key', dest='pre_key', help="columns before hash key. Default is empty") parser.add_option('-P', '--post_key', dest='post_key', help="columns after hash key. Default is 'views clicks'") parser.set_defaults(column_delimiter='\001') parser.set_defaults(line_delimiter='\n') parser.set_defaults(pre_key='') parser.set_defaults(post_key='views clicks') options, remainder = parser.parse_args() if options.csv_file == None or options.rdb_file == None: parser.print_help() sys.exit(1) outf = open(options.csv_file, 'w') callback = CsvCallback(outf, options.pre_key.split(), options.post_key.split(), options.column_delimiter, options.line_delimiter) parser = RdbParser(callback) parser.parse(options.rdb_file) outf.close()
def test_skip_expiry(self): expected = (b'*2\r\n$6\r\nSELECT\r\n$1\r\n0\r\n' b'*3\r\n$3\r\nSET\r\n$20\r\nexpires_ms_precision\r\n' b'$27\r\n2022-12-25 10:11:12.573 UTC\r\n') buf = BytesIO() parser = RdbParser(ProtocolCallback(buf, emit_expire=False)) parser.parse(self.dumpfile) self.assertEquals(buf.getvalue(), expected)
def get_csv(dump_file_name): buff = BytesIO() callback = MemoryCallback(PrintAllKeys(buff, None, None), 64) parser = RdbParser(callback) parser.parse( os.path.join(os.path.dirname(__file__), 'dumps', dump_file_name)) csv = buff.getvalue().decode() return csv
def load_rdb(rdb_file_name): output_path = os.path.join(EXPORT_FOLDER, "out.json") with open(output_path, "w") as out: callback = JSONCallback(out) parser = RdbParser(callback) parser.parse(rdb_file_name) with open(output_path, "r") as out: return json.loads(out.read())
def get_csv(dump_file_name): buff = BytesIO() callback = MemoryCallback(PrintAllKeys(buff, None, None), 64) parser = RdbParser(callback) parser.parse(os.path.join(os.path.dirname(__file__), 'dumps', dump_file_name)) csv = buff.getvalue().decode() return csv
def rdb_handler(rdb): print("rdb: ", rdb) buf = io.BytesIO() callback = ProtocolCallback(out=buf) parser = RdbParser(callback) mem_fd = io.BytesIO(rdb) parser.parse_fd(mem_fd) print('rdb data', buf.getvalue()) redirect_cmd(buf.getvalue()) print("parse rdb end")
def test_amend_expiry(self): expected = (b'*2\r\n$6\r\nSELECT\r\n$1\r\n0\r\n' b'*3\r\n$3\r\nSET\r\n$20\r\nexpires_ms_precision\r\n' b'$27\r\n2022-12-25 10:11:12.573 UTC\r\n' b'*3\r\n$8\r\nEXPIREAT\r\n$20\r\nexpires_ms_precision\r\n' b'$10\r\n1671965072\r\n') buf = BytesIO() parser = RdbParser(ProtocolCallback(buf, amend_expire=2000)) parser.parse(self.dumpfile) self.assertEquals(buf.getvalue(), expected)
def retrieve_data_at_time(log_folder, time): """ Analyze log data for information at a specific time :param log_folder: Absolute path (to make things easy) of log's data folder :param time: Time at which to extract information :return: """ rdb_fpath = get_rdb_file(log_folder, time) parser = RdbParser(JSONCallback(open("ss", "wb"))) return parser.parse(rdb_fpath)
def __init__(self): self._parser = Parser() self._rdbparser = RdbParser(self._parser) self._rdbparser.parse("dump.rdb") self._bDict = self._parser.bigDict self._mydb = False # conversion table for values self._conversionTable = {} self._conversionTable["HEALTHY"] = 0 self._conversionTable["REGISTERING"] = 1 self._conversionTable["IDLE"] = 0
def test_all_dumps(self): """Run callback with all test dumps intercepting incidental crashes.""" if self._callback_class is None: return # Handle unittest discovery attempt to test with this "abstract" class. for dump_name in glob.glob(os.path.join(os.path.dirname(__file__), TEST_DUMPS_DIR, '*.rdb')): callback = self._callback_class(out=self._out) parser = RdbParser(callback) try: parser.parse(dump_name) except Exception as err: raise self.failureException("%s on %s - %s: %s" % ( self._callback_class.__name__, os.path.basename(dump_name), type(err).__name__, str(err))) self._out.seek(0) self._out.truncate()
def print_memory_for_key(key, host="localhost", port=6379, db=0, password=None): redis = connect_to_redis(host, port, db, password) reporter = PrintMemoryUsage() callback = MemoryCallback(reporter, 64) parser = RdbParser(callback, filters={}) parser._key = key raw_dump = redis.execute_command("dump", key) if not raw_dump: sys.stderr.write("Key %s does not exist\n" % key) sys.exit(-1) stream = StringIO(raw_dump) data_type = read_unsigned_char(stream) parser.read_object(stream, data_type)
def __main__(): total_cores = cpu_count() #初始化日志服务 cur_path = os.path.dirname(os.path.abspath(__file__)) log_path = os.path.join(cur_path, 'log') bin_path = os.path.join(cur_path, "bin") pidfile = os.path.join(bin_path, "server.pid") cur_pid = os.getpid() exec_command = "echo %d > %s" % (cur_pid, pidfile) os.system(exec_command) if not Logger.init(log_path): return False data_file = os.path.join(cur_path, "hash.txt") # # if not os.path.exists(data_file): # Logger.logger.error("file not exists!!") # return redis_manage_obj = RedisManage(Logger.logger) redis_manage_obj.init_redis_pool() test_callbak = ParseCallback(Logger.logger, redis_manage_obj, data_file) test_callbak.load_file() test_callbak.push_test_data_to_redis() # jobs = list() # for x in xrange(total_cores): # p = Process(target = func, args=(Logger.logger,redis_manage_obj)) # p.start() # jobs.append(p) # # for job in jobs: # job.join() filters = dict() filters['types'] = list() filters['types'].append('hash') parser = RdbParser(test_callbak, filters=filters) dump_file = '/data/temp/dump.rdb' parser.parse(dump_file)
def rdb_allvalues_concurrency(self, filename): callback = ParserRDBCallback() parser = RdbParser(callback) self.callback_from.show_msg("[%s]parse the rdb start\n" % (get_time_stamp())) parser.parse(filename) head_info_l, hash_keyvalue_l, zset_keyvalue_l, set_keyvalue_l, list_keyvalue_l, str_keyvalue_l = callback.get_values( ) head_info = "\n".join(str(head) for head in head_info_l) self.callback_from.show_msg(head_info + "\n") self.callback_from.show_msg("[%s]end parse \n" % (get_time_stamp())) self.callback_from.get_end(hash_keyvalue_l, zset_keyvalue_l, set_keyvalue_l, list_keyvalue_l, str_keyvalue_l)
def memory_profiler(dump_file, bytes, largest, output_file=None): out_file_obj = None try: if output_file: out_file_obj = open(output_file, "wb") else: # Prefer not to depend on Python stdout implementation for writing binary. out_file_obj = os.fdopen(sys.stdout.fileno(), 'wb') callback = MemoryCallback(MyPrintAllKeys(out_file_obj, bytes, largest), 64, string_escape=None) parser = RdbParser(callback, filters={}) parser.parse(dump_file) finally: if output_file and out_file_obj is not None: out_file_obj.close()
class WebAppIntegration: def __init__(self): self._parser = Parser() self._rdbparser = RdbParser(self._parser) self._rdbparser.parse("dump.rdb") self._bDict = self._parser.bigDict self._mydb = False # conversion table for values self._conversionTable = {} self._conversionTable["HEALTHY"] = 0 self._conversionTable["REGISTERING"] = 1 self._conversionTable["IDLE"] = 0 def importInflux(self): """ Import file to InfluxDB """ cmd = "curl -i -XPOST 'http://*****:*****@influximport.txt" subprocess.call(cmd, shell=True) def create_db(self): """ Create a database called mydb if it doesn't exist """ cmd = "curl -i XPOST http://localhost:8086/query --data-urlencode 'q=CREATE DATABASE mydb'" if not self._mydb: subprocess.call(cmd, shell=True) self._mydb = True else: print("MyDB exists.") def writeToFile(self): """ Create a file called influximport.txt in compatibility with InfluxDB """ filename = open("influximport.txt", "w") currentTime = time.time() for item, value in self._bDict.items(): writeLine = "machine,machine_name=" + item + " " for kee, val in value.items(): if val in self._conversionTable: writeLine += kee + "=" + str(self._conversionTable[val]) else: writeLine += kee + "=" + str(-1) if kee != list(value.keys())[-1]: writeLine += "," writeLine += " " + str(currentTime).split(".")[0] filename.write(writeLine + "\n") filename.close()
def print_memory_for_key(key, host='localhost', port=6379, db=0, password=None): redis = connect_to_redis(host, port, db, password) reporter = PrintMemoryUsage() callback = MemoryCallback(reporter, 64) parser = RdbParser(callback, filters={}) parser._key = key raw_dump = redis.execute_command('dump', key) if not raw_dump: sys.stderr.write('Key %s does not exist\n' % key) sys.exit(-1) stream = StringIO(raw_dump) data_type = read_unsigned_char(stream) parser.read_object(stream, data_type)
def test_all_dumps(self): """Run callback with all test dumps intercepting incidental crashes.""" if self._callback_class is None: return # Handle unittest discovery attempt to test with this "abstract" class. for dump_name in glob.glob( os.path.join(os.path.dirname(__file__), TEST_DUMPS_DIR, '*.rdb')): callback = self._callback_class(out=self._out) parser = RdbParser(callback) try: parser.parse(dump_name) except Exception as err: raise self.failureException("%s on %s - %s: %s" % (self._callback_class.__name__, os.path.basename(dump_name), type(err).__name__, str(err))) self._out.seek(0) self._out.truncate()
def escape_test_helper(self, escape_name): if self._callback_class is None: return # Handle unittest discovery attempt to test with this "abstract" class. escape = getattr(encodehelpers, escape_name) callback = self._callback_class(out=self._out, string_escape=escape) parser = RdbParser(callback) parser.parse(os.path.join(os.path.dirname(__file__), TEST_DUMPS_DIR, self._fixture['escape_db_file'])) result = self._out.getvalue() # print('\n%s escape method %s' % (self._callback_class.__name__, escape_name)) # print("\t\tself._fixture['escape_out_%s'] = %s" % (escape, repr(result))) # try: # print(result.decode('utf8')) # except UnicodeDecodeError: # print(result.decode('latin-1')) self.assertEqual(result, self._fixture['escape_out_' + escape], msg='\n%s escape method %s' % (self._callback_class.__name__, escape_name) )
def print_memory_for_key(key, host='localhost', port=6379, db=0, password=None): redis = connect_to_redis(host, port, db, password) reporter = PrintMemoryUsage() callback = MemoryCallback(reporter, 64) parser = RdbParser(callback, filters={}) # DUMP command only return the key data, so we hack RdbParser to inject key name as parsed bytes. parser._key = key.encode('utf-8') raw_dump = redis.execute_command('dump', key) if not raw_dump: sys.stderr.write('Key %s does not exist\n' % key) sys.exit(-1) stream = BytesIO(raw_dump) data_type = read_unsigned_char(stream) parser.read_object(stream, data_type)
def __main__(): #初始化日志服务 cur_path = os.path.dirname(os.path.abspath(__file__)) log_path = os.path.join(cur_path, 'log') if not Logger.init(log_path): return False #callback = JSONCallback(sys.stdout) test_callbak = ParseCallback(Logger.logger) filters = dict() filters['types'] = list() filters['types'].append('hash') parser = RdbParser(test_callbak, filters=filters) dump_file='/data/temp/dump.rdb' parser.parse(dump_file)
def parse_rdb_to_mysql(cls, rdb_file_path, mysql_server, mysql_table_name, min_key_length, mysql_batch_insert_rows=100, mysql_batch_sleep_seconds=0.01): redis_arch = 64 string_escape = "raw" stream_handler = MySQLCallback( mysql_server=mysql_server, mysql_table_name=mysql_table_name, mysql_batch_insert_rows=mysql_batch_insert_rows, mysql_batch_sleep_seconds=mysql_batch_sleep_seconds, min_key_length=min_key_length) stream_handler.init_env() parser = RdbParser( MemoryCallback(stream_handler, redis_arch, string_escape=string_escape)) parser.parse(rdb_file_path)
def escape_test_helper(self, escape_name): if self._callback_class is None: return # Handle unittest discovery attempt to test with this "abstract" class. escape = getattr(encodehelpers, escape_name) callback = self._callback_class(out=self._out, string_escape=escape) parser = RdbParser(callback) parser.parse( os.path.join(os.path.dirname(__file__), TEST_DUMPS_DIR, self._fixture['escape_db_file'])) result = self._out.getvalue() # print('\n%s escape method %s' % (self._callback_class.__name__, escape_name)) # print("\t\tself._fixture['escape_out_%s'] = %s" % (escape, repr(result))) # try: # print(result.decode('utf8')) # except UnicodeDecodeError: # print(result.decode('latin-1')) self.assertEqual(result, self._fixture['escape_out_' + escape], msg='\n%s escape method %s' % (self._callback_class.__name__, escape_name))
def main(): """ Usage: redis-expired-key-liberator [rdb_path] """ print len(sys.argv) if len(sys.argv) == 1: rdb_path = '/var/lib/redis/dump-master.rdb' elif len(sys.argv) == 2: rdb_path = sys.argv[1] else: print 'Usage: redis-expired-key-liberator [rdb_path]' exit(2) if not os.path.isfile(rdb_path): print "Error: could not read file:", rdb_path exit(1) callback = KeyCallback() parser = RdbParser(callback) parser.parse(rdb_path)
def main(): usage = """usage: %prog [options] /path/to/dump.rdb Example 1 : %prog -k "user.*" -k "friends.*" -f memoryreport.html /var/redis/6379/dump.rdb Example 2 : %prog /var/redis/6379/dump.rdb""" parser = OptionParser(usage=usage) parser.add_option("-f", "--file", dest="output", help="Output file", metavar="FILE") parser.add_option( "-k", "--key", dest="keys", action="append", help= "Keys that should be grouped together. Multiple regexes can be provided" ) (options, args) = parser.parse_args() if len(args) == 0: parser.error("Redis RDB file not specified") dump_file = args[0] if not options.output: output = "redis_memory_report.html" else: output = options.output stats = StatsAggregator() callback = MemoryCallback(stats, 64) parser = RdbParser(callback) parser.parse(dump_file) stats_as_json = stats.get_json() with open(os.path.join(TEMPLATES_DIR, "report.html.template"), 'r') as t: report_template = Template(t.read()) html = report_template.substitute(REPORT_JSON=stats_as_json) with open(output, 'w') as f: f.write(html)
def load_rdb(file_name, filters=None): r = MockRedis() parser = RdbParser(r, filters) parser.parse(os.path.join(os.path.dirname(__file__), 'dumps', file_name)) return r
def batch_migration(args_): """ :param args_: command line arguments :return: None """ # First Time CallBack # do one time migration to create tables tables = migration.one_time_migration("dump.rdb", args_) # capture current snapshot of database migration.download_rdb("./dump.rdb") callback = migration.JSONCallback(1) while True: print("Sleep") time.sleep(args_.time_interval) print("-------Batch Migrations Started---------------") start_time = time.time() migration.download_rdb("./dump.rdb", args_.redis_host, args_.redis_port, args_.redis_db_password) parser_ = RdbParser(callback) parser_.parse("./dump.rdb") delete_and_update_keys = migration.old_hash_values - migration.new_hash_values insert_and_update_keys = migration.new_hash_values - migration.old_hash_values new_key_name = set() old_key_name = set() for hash_ in delete_and_update_keys: key_name = migration.old_hash_table[hash_] old_key_name.add(key_name) for hash_ in insert_and_update_keys: key_name = migration.new_hash_table[hash_] new_key_name.add(key_name) deleted_keys = old_key_name - new_key_name insert_keys = new_key_name - old_key_name update_keys = new_key_name & old_key_name print("update keys", len(update_keys)) print("insert keys", len(insert_keys)) print("deleted keys", len(deleted_keys)) keys_delete = {} for key_name_delete in deleted_keys: migration.get_dependency_updates(key_name_delete, tables, False) keys_delete = {key_name_delete: migration.old_key_value[key_name_delete], **keys_delete} migration.bulk_deletion(keys_delete, tables) keys = {} for key_name in insert_keys: # migration.get_dependency_updates(key_name, tables, True) keys = {key_name: migration.new_key_value[key_name], **keys} migration.bulk_insertion(keys, tables) for key_name in update_keys: migration.get_dependency_updates(key_name, tables, True) for table in tables.keys(): regex = tables[table]["regex"] if re.match(regex, key_name): if tables[table]["format"] == "multi_row": update_values = set(migration.new_key_value[key_name]) - set(migration.old_key_value[key_name]) for update_value in update_values: migration.update({key_name: update_value}, tables[table]) else: migration.update({key_name: migration.new_key_value[key_name]}, tables[table]) break migration.Session.commit() migration.old_hash_values = migration.new_hash_values migration.old_hash_table = migration.new_hash_table migration.old_key_value = migration.new_key_value migration.old_key_name = migration.new_key_name migration.new_key_name = set() migration.new_key_value = {} migration.new_hash_values = set() migration.new_hash_table = {} end_time = time.time() print("Batch Migrations Processing Time {0} Seconds" .format(end_time-start_time))
def main(): usage = """usage: %prog [options] /path/to/dump.rdb Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb""" parser = OptionParser(usage=usage) parser.add_option( "-c", "--command", dest="command", help="Command to execute. Valid commands are json or diff", metavar="FILE") parser.add_option("-f", "--file", dest="output", help="Output file", metavar="FILE") parser.add_option( "-n", "--db", dest="dbs", action="append", help= "Database Number. Multiple databases can be provided. If not specified, all databases will be included." ) parser.add_option("-k", "--key", dest="keys", default=None, help="Keys to export. This can be a regular expression") parser.add_option( "-t", "--type", dest="types", action="append", help= """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. If not specified, all data types will be returned""") (options, args) = parser.parse_args() if len(args) == 0: parser.error("Redis RDB file not specified") dump_file = args[0] filters = {} if options.dbs: filters['dbs'] = [] for x in options.dbs: try: filters['dbs'].append(int(x)) except ValueError: raise Exception('Invalid database number %s' % x) if options.keys: filters['keys'] = options.keys if options.types: filters['types'] = [] for x in options.types: if not x in VALID_TYPES: raise Exception( 'Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES)))) else: filters['types'].append(x) # TODO : Fix this ugly if-else code if options.output: with open(options.output, "wb") as f: if 'diff' == options.command: callback = DiffCallback(f) elif 'json' == options.command: callback = JSONCallback(f) elif 'memory' == options.command: reporter = PrintAllKeys(f) callback = MemoryCallback(reporter, 64) else: raise Exception('Invalid Command %s' % options.output) parser = RdbParser(callback) parser.parse(dump_file) else: if 'diff' == options.command: callback = DiffCallback(sys.stdout) elif 'json' == options.command: callback = JSONCallback(sys.stdout) elif 'memory' == options.command: reporter = PrintAllKeys(sys.stdout) callback = MemoryCallback(reporter, 64) else: raise Exception('Invalid Command %s' % options.output) parser = RdbParser(callback, filters=filters) parser.parse(dump_file)
def load_rdb(file_name, filters=None) : r = MockRedis() parser = RdbParser(r, filters) parser.parse(os.path.join(os.path.dirname(__file__), 'dumps', file_name)) return r
def get_stats(file_name): stats = Stats() callback = MemoryCallback(stats, 64) parser = RdbParser(callback) parser.parse(os.path.join(os.path.dirname(__file__), 'dumps', file_name)) return stats.records
def sadd(self, key, member): # print "sadd|{}|{}".format(key, member) self.p.sadd(key, member) self.execute() def rpush(self, key, value): # print "rpush|{}|{}".format(key, value) self.p.rpush(key, value) self.execute() def zadd(self, key, score, member): # print "zadd|{}|{}|{}".format(key, member, score) self.p.zadd(key, member, score) self.execute() def end_rdb(self): self.p.execute() def execute(self): self.count = (self.count + 1) % 10000 if not self.count: self.p.execute() r = redis.Redis.from_url("redis://127.0.0.1:6400") callback = MyCallback(r) parser = RdbParser(callback, filters={"dbs": [0]}) print("start {}".format(now())) parser.parse("/home/xinming.pan/dump.rdb") print("stop {}".format(now()))
class MyCallback(RdbCallback): ''' Simple example to show how callback works. See RdbCallback for all available callback methods. See JsonCallback for a concrete example ''' def aux_field(self, key, value): print('aux:[%s:%s]' % (key, value)) def db_size(self, db_size, expires_size): print('db_size: %s, expires_size %s' % (db_size, expires_size)) def set(self, key, value, expiry, info): print('%s = %s' % (str(key), str(value))) def hset(self, key, field, value): print('%s.%s = %s' % (str(key), str(field), str(value))) def sadd(self, key, member): print('%s has {%s}' % (str(key), str(member))) def rpush(self, key, value): print('%s has [%s]' % (str(key), str(value))) def zadd(self, key, score, member): print('%s has {%s : %s}' % (str(key), str(member), str(score))) callback = MyCallback(None) parser = RdbParser(callback) parser.parse('dump.rdb')
import sys import MySQLdb from rdbtools import RdbParser, RdbCallback db = MySQLdb.connect(host="10.73.45.56", user="******", passwd="dddd!!@@##$$", db="shortener") sur = db.cursor() sql = "delete from redis" sur.execute(sql) class MyCallback(RdbCallback) : def insert(test,query) : cur = db.cursor() cur.execute(query) db.commit() def hset(self, key, field, value): if(key=="URLShortener") : sql = "INSERT into redis(redis_key, redis_value) values(\'%d\', \'%s\')" % (int(field), value) self.insert(sql) callback = MyCallback() parser = RdbParser(callback) parser.parse('/home/next/data/dump.rdb') db.close()
def load_rdb(filename, filters=None): r = MockRedis() parser = RdbParser(r, filters) parser.parse(filename) return r
def load_rdb_stream(file_name, filters=None) : r = MockRedis() parser = RdbParser(r, filters) parser.parse_fd(open(os.path.join(os.path.dirname(__file__), 'dumps', file_name), 'rb')) return r
def main(): global targethost targethost = sys.argv[1] for filename in sys.argv[2].split(','): parser = RdbParser(MyCallback(None)) parser.parse(filename)
def main(): usage = """usage: %prog [options] /path/to/dump.rdb Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb""" parser = OptionParser(usage=usage) parser.add_option( "-c", "--command", dest="command", help= "Command to execute. Valid commands are json, diff, justkeys, justkeyvals and protocol", metavar="FILE") parser.add_option("-f", "--file", dest="output", help="Output file", metavar="FILE") parser.add_option( "-n", "--db", dest="dbs", action="append", help= "Database Number. Multiple databases can be provided. If not specified, all databases will be included." ) parser.add_option("-k", "--key", dest="keys", default=None, help="Keys to export. This can be a regular expression") parser.add_option( "-o", "--not-key", dest="not_keys", default=None, help="Keys Not to export. This can be a regular expression") parser.add_option( "-t", "--type", dest="types", action="append", help= """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. If not specified, all data types will be returned""") parser.add_option( "-b", "--bytes", dest="bytes", default=None, help= "Limit memory output to keys greater to or equal to this value (in bytes)" ) parser.add_option( "-l", "--largest", dest="largest", default=None, help="Limit memory output to only the top N keys (by size)") parser.add_option( "-e", "--escape", dest="escape", choices=ESCAPE_CHOICES, help="Escape strings to encoding: %s (default), %s, %s, or %s." % tuple(ESCAPE_CHOICES)) (options, args) = parser.parse_args() if len(args) == 0: parser.error("Redis RDB file not specified") dump_file = args[0] filters = {} if options.dbs: filters['dbs'] = [] for x in options.dbs: try: filters['dbs'].append(int(x)) except ValueError: raise Exception('Invalid database number %s' % x) if options.keys: filters['keys'] = options.keys if options.not_keys: filters['not_keys'] = options.not_keys if options.types: filters['types'] = [] for x in options.types: if not x in VALID_TYPES: raise Exception( 'Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES)))) else: filters['types'].append(x) out_file_obj = None try: if options.output: out_file_obj = open(options.output, "wb") else: # Prefer not to depend on Python stdout implementation for writing binary. out_file_obj = os.fdopen(sys.stdout.fileno(), 'wb') try: callback = { 'diff': lambda f: DiffCallback(f, string_escape=options.escape), 'json': lambda f: JSONCallback(f, string_escape=options.escape), 'justkeys': lambda f: KeysOnlyCallback(f, string_escape=options.escape), 'justkeyvals': lambda f: KeyValsOnlyCallback(f, string_escape=options.escape), 'memory': lambda f: MemoryCallback(PrintAllKeys(f, options.bytes, options .largest), 64, string_escape=options.escape), 'protocol': lambda f: ProtocolCallback(f, string_escape=options.escape) }[options.command](out_file_obj) except: raise Exception('Invalid Command %s' % options.command) parser = RdbParser(callback, filters=filters) parser.parse(dump_file) finally: if options.output and out_file_obj is not None: out_file_obj.close()
def main(): usage = """usage: %(prog)s [options] /path/to/dump.rdb Example : %(prog)s --command json -k "user.*" /var/redis/6379/dump.rdb""" parser = ArgumentParser(prog='rdb', usage=usage) parser.add_argument("-c", "--command", dest="command", required=True, help="Command to execute. Valid commands are json, diff, justkeys, justkeyvals, memory and protocol", metavar="CMD") parser.add_argument("-f", "--file", dest="output", help="Output file", metavar="FILE") parser.add_argument("-n", "--db", dest="dbs", action="append", help="Database Number. Multiple databases can be provided. If not specified, all databases will be included.") parser.add_argument("-k", "--key", dest="keys", default=None, help="Keys to export. This can be a regular expression") parser.add_argument("-o", "--not-key", dest="not_keys", default=None, help="Keys Not to export. This can be a regular expression") parser.add_argument("-t", "--type", dest="types", action="append", help="""Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. If not specified, all data types will be returned""") parser.add_argument("-b", "--bytes", dest="bytes", default=None, help="Limit memory output to keys greater to or equal to this value (in bytes)") parser.add_argument("-l", "--largest", dest="largest", default=None, help="Limit memory output to only the top N keys (by size)") parser.add_argument("-e", "--escape", dest="escape", choices=ESCAPE_CHOICES, help="Escape strings to encoding: %s (default), %s, %s, or %s." % tuple(ESCAPE_CHOICES)) expire_group = parser.add_mutually_exclusive_group(required=False) expire_group.add_argument("-x", "--no-expire", dest="no_expire", default=False, action='store_true', help="With protocol command, remove expiry from all keys") expire_group.add_argument("-a", "--amend-expire", dest="amend_expire", default=0, type=int, metavar='N', help="With protocol command, add N seconds to key expiry time") parser.add_argument("dump_file", nargs=1, help="RDB Dump file to process") options = parser.parse_args() filters = {} if options.dbs: filters['dbs'] = [] for x in options.dbs: try: filters['dbs'].append(int(x)) except ValueError: raise Exception('Invalid database number %s' %x) if options.keys: filters['keys'] = options.keys if options.not_keys: filters['not_keys'] = options.not_keys if options.types: filters['types'] = [] for x in options.types: if not x in VALID_TYPES: raise Exception('Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES)))) else: filters['types'].append(x) out_file_obj = None try: if options.output: out_file_obj = open(options.output, "wb") else: # Prefer not to depend on Python stdout implementation for writing binary. out_file_obj = os.fdopen(sys.stdout.fileno(), 'wb') try: callback = { 'diff': lambda f: DiffCallback(f, string_escape=options.escape), 'json': lambda f: JSONCallback(f, string_escape=options.escape), 'justkeys': lambda f: KeysOnlyCallback(f, string_escape=options.escape), 'justkeyvals': lambda f: KeyValsOnlyCallback(f, string_escape=options.escape), 'memory': lambda f: MemoryCallback(PrintAllKeys(f, options.bytes, options.largest), 64, string_escape=options.escape), 'protocol': lambda f: ProtocolCallback(f, string_escape=options.escape, emit_expire=not options.no_expire, amend_expire=options.amend_expire ) }[options.command](out_file_obj) except: raise Exception('Invalid Command %s' % options.command) if not PYTHON_LZF_INSTALLED: eprint("WARNING: python-lzf package NOT detected. " + "Parsing dump file will be very slow unless you install it. " + "To install, run the following command:") eprint("") eprint("pip install python-lzf") eprint("") parser = RdbParser(callback, filters=filters) parser.parse(options.dump_file[0]) finally: if options.output and out_file_obj is not None: out_file_obj.close()
def main(): usage = """usage: %prog [options] /path/to/dump.rdb Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb""" parser = OptionParser(usage=usage) parser.add_option( "-c", "--command", dest="command", help="Command to execute. Valid commands are json or diff", metavar="FILE") parser.add_option("-f", "--file", dest="output", help="Output file", metavar="FILE") parser.add_option( "-n", "--db", dest="dbs", action="append", help= "Database Number. Multiple databases can be provided. If not specified, all databases will be included." ) parser.add_option("-k", "--key", dest="keys", default=None, help="Keys to export. This can be a regular expression") parser.add_option( "-t", "--type", dest="types", action="append", help= """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. If not specified, all data types will be returned""") (options, args) = parser.parse_args() if len(args) == 0: parser.error("Redis RDB file not specified") dump_file = args[0] filters = {} if options.dbs: filters['dbs'] = [] for x in options.dbs: try: filters['dbs'].append(int(x)) except ValueError: raise Exception('Invalid database number %s' % x) if options.keys: filters['keys'] = options.keys if options.types: filters['types'] = [] for x in options.types: if not x in VALID_TYPES: raise Exception( 'Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES)))) else: filters['types'].append(x) destination = sys.stdout if options.output: desitnaion = open(options.output, 'wb') cmds = { 'diff': DiffCallback, 'json': JSONCallback, 'memory': lambda r: MemoryCallback(r, 64) } for key, cb in cmds.items(): if key != options.command: continue with destination as f: parser = RdbParser(cb(f), filters=filters) parser.parse(dump_file) return True raise Exception('Invalid Caommand %s' % options.output)