コード例 #1
0
    def test_insert_cont(self):

        key_name = "questions:666665:answers"
        value2 = {}
        value1 = set()
        value3 = set()
        value2[key_name] = "9"
        value1.add("9")
        print("value1")
        print(value1)

        keys = {}
        keys = {key_name: value2[key_name], **keys}
        migration.bulk_insertion(keys, self.tables)
        migration.Session.commit()

        for table in self.tables.keys():
            regex = self.tables[table]["regex"]
            if re.match(regex, key_name):
                query_table = self.tables[table]["object"]()
                primary_keys = migration.get_primary_key_value(
                    self.tables[table], value2)
                where_clause_parameter = [
                    str(query_table.__table__.c[k[5:]] == migration.bindparam(
                        k)) for k in primary_keys.keys()
                ]
                where_clause_parameter = " ".join(where_clause_parameter)
                stmt = query_table.__table__.select() \
                    .where(where_clause_parameter)
                conn = migration.engine.connect()
                res = conn.execute(stmt, **primary_keys)
                for i in res:
                    print("res")
                    print(i)
                    k = i[1].split(" ")
                    for j in k:
                        value3.add(j)
                print("value3")
                print(value3)
                break
        self.assertEqual(value3, value1)
コード例 #2
0
ファイル: aof_parser.py プロジェクト: nikitha444/redis-sync
def sorted_sets(result):
    key_name = result[1]
    value2 = {result[1]: {}}
    value1 = set()
    for table in tables.keys():
            regex = tables[table]["regex"]
            if re.match(regex, key_name):
                query_table = tables[table]["object"]()
                primary_keys = migration.get_primary_key_value(tables[table], value2)
                where_clause_parameter = [str(query_table.__table__.c[k[5:]] == migration.bindparam(k)) for k in
                                          primary_keys.keys()]
                where_clause_parameter = " ".join(where_clause_parameter)
                stmt = query_table.__table__.select() \
                    .where(where_clause_parameter)
                conn = migration.engine.connect()
                res = conn.execute(stmt, **primary_keys)
                for sorted_sets_name in res:
                    k = sorted_sets_name[1].split(" ")
                    value1 = set()
                    for j in k:
                        value1.add(j)
                if value1 == set():
                    break
                else:
                    if len(result) == 4:
                        value1.add(result[2])
                    else:
                        new = result[2:]
                        eff_length = len(new)
                        wanted_parts = eff_length // 2
                        result1 = split_list(new, wanted_parts)
                        for item in result1:
                            item.insert(0, key_name)
                            value1.add(item[1])
                    value2[result[1]] = list(value1)
                    keys = {}
                    keys = {key_name: value2[key_name], **keys}
                    migration.update(keys, tables[table])
                    print("{} key is changed".format(key_name))
                    return value2

    if len(result) == 4:
        value2[result[1]][result[3]] = result[2]
        keys = {}
        keys = {key_name: value2[result[1]], **keys}
        migration.bulk_insertion(keys, tables)
        print("{} key is changed".format(key_name))
        return value2

    else:
        new = result[2:]
        eff_length = len(new)
        wanted_parts = eff_length // 2
        result1 = split_list(new, wanted_parts)

        for item in result1:

            value2[result[1]][str(item[1])] = item[0]
            item.insert(0, key_name)

        keys = {}
        keys = {key_name: value2[result[1]], **keys}
        migration.bulk_insertion(keys, tables)
        migration.Session.commit()
        print("{} key is changed".format(key_name))
        return value2
コード例 #3
0
ファイル: aof_parser.py プロジェクト: nikitha444/redis-sync
def lists(result):
    key_name = result[1]
    command = result[0]
    value2 = {result[1]: []}
    for table in tables.keys():
            regex = tables[table]["regex"]
            if re.match(regex, key_name):
                value2[result[1]] = [result[2]]
                query_table = tables[table]["object"]()
                primary_keys = migration.get_primary_key_value(tables[table], value2)
                where_clause_parameter = [str(query_table.__table__.c[k[5:]] == migration.bindparam(k)) for k in
                                          primary_keys.keys()]
                where_clause_parameter = " ".join(where_clause_parameter)
                stmt = query_table.__table__.select() \
                    .where(where_clause_parameter)
                conn = migration.engine.connect()
                res = conn.execute(stmt, **primary_keys)
                for i in res:
                    k = i[1].split(" ")
                    value1 = set()
                    for j in k:
                        value1.add(j)
                    if value1 == set():
                        break
                    else:
                        if command == "lrem":
                            value = set()
                            number = (int(result[2]))
                            while number <= 0:
                                value.add(result[3])
                                number = number + 1
                            value2[result[1]] = list(value1-value)
                            keys_delete = {}
                            keys_delete = {result[1]: value2[result[1]], **keys_delete}
                            migration.bulk_insertion(keys_delete, tables)
                            print("{} key is changed".format(key_name))
                            return value2

                        if len(result) == 3:
                            value1.add(result[2])
                        else:
                            new = result[2:]
                            for list_name in new:
                                value1.add(list_name)
                        value2[result[1]] = list(value1)
                        keys = {}
                        keys = {key_name: value2[key_name], **keys}
                        migration.update(keys, tables[table])
                        print("{} key is changed".format(key_name))
                        return value2

    if len(result) == 3:
        value2[result[1]] = [result[2]]
        keys = {}
        keys = {result[1]: value2[result[1]], **keys}
        migration.bulk_insertion(keys, tables)
        print("{} key is changed".format(key_name))
        return value2

    key_name = result[1]

    new = result[2:]
    value1 = set()

    for i in new:
        value1.add(i)
    value2[key_name] = list(value1)
    keys = {}
    keys = {key_name: value2[result[1]], **keys}
    migration.bulk_insertion(keys, tables)
    print("{} key is changed".format(key_name))
    return value2
コード例 #4
0
ファイル: aof_parser.py プロジェクト: nikitha444/redis-sync
def hashes(result):
    key_name = result[1]
    command = result[0]
    value2 = {result[1]: {}}
    if command == 'hdel':
        if len(result) == 3:

            value2[result[1]] = result[2]
            keys_delete = {}
            keys_delete = {result[1]: value2[result[1]], **keys_delete}
            migration.bulk_deletion(keys_delete, tables)
            print("%s key is deleted", key_name)
            return value2

        else:
            result1 = result[2:]
            value1 = set()
            for i in result1:
                value1.add(i)
            value2[key_name] = list(value1)
            keys_delete = {}
            keys_delete = {result[1]: value2[key_name], **keys_delete}
            migration.bulk_deletion(keys_delete, tables)
            print("{} key is deleted".format(key_name))
            return value2

    for table in tables.keys():
        regex = tables[table]["regex"]
        if re.match(regex, key_name):
            query_table = tables[table]["object"]()
            primary_keys = migration.get_primary_key_value(tables[table], value2)
            where_clause_parameter = [str(query_table.__table__.c[k[5:]] == migration.bindparam(k)) for k in
                                      primary_keys.keys()]
            where_clause_parameter = " ".join(where_clause_parameter)
            stmt = query_table.__table__.select() \
                .where(where_clause_parameter)
            conn = migration.engine.connect()
            res = conn.execute(stmt, **primary_keys)
            for hash_name in res:
                hash_names = hash_name[1].split(" ")
                if set(hash_names) == set():
                    break
                else:
                    if len(result) == 3:
                        value2[result[1]][result[2]] = result[3]
                    else:
                        new = result[2:]
                        eff_length = len(new)
                        wanted_parts = eff_length // 2
                        result1 = split_list(new, wanted_parts)

                        for item in result1:
                            value2[result[1]][str(item[0])] = item[1]
                            item.insert(0, key_name)

                    keys = {}
                    keys = {key_name: value2[key_name], **keys}
                    update_cont(keys, tables[table])
                    print("{} key is updated".format(key_name))
                    return value2

    if len(result) == 4:
            value2[result[1]][result[2]] = result[3]
            keys = {}
            keys = {key_name: value2[result[1]], **keys}
            migration.bulk_insertion(keys, tables)
            print("{} key is inserted".format(key_name))
            return value2

    else:
            new = result[2:]
            eff_length = len(new)
            wanted_parts = eff_length // 2
            result1 = split_list(new, wanted_parts)

            for item in result1:
                value2[result[1]][str(item[0])] = item[1]
                item.insert(0, key_name)

            keys = {}
            keys = {key_name: value2[result[1]], **keys}
            migration.bulk_insertion(keys, tables)
            print("{} key is inserted".format(key_name))
            return value2
コード例 #5
0
def batch_migration(args_):
    """

    :param args_: command line arguments
    :return: None
    """
    # First Time CallBack
    # do one time migration to create tables
    tables = migration.one_time_migration("dump.rdb", args_)
    # capture current snapshot of database
    migration.download_rdb("./dump.rdb")
    callback = migration.JSONCallback(1)
    while True:
        print("Sleep")
        time.sleep(args_.time_interval)
        print("-------Batch Migrations Started---------------")
        start_time = time.time()
        migration.download_rdb("./dump.rdb", args_.redis_host, args_.redis_port, args_.redis_db_password)
        parser_ = RdbParser(callback)
        parser_.parse("./dump.rdb")
        delete_and_update_keys = migration.old_hash_values - migration.new_hash_values
        insert_and_update_keys = migration.new_hash_values - migration.old_hash_values
        new_key_name = set()
        old_key_name = set()
        for hash_ in delete_and_update_keys:
            key_name = migration.old_hash_table[hash_]
            old_key_name.add(key_name)

        for hash_ in insert_and_update_keys:
            key_name = migration.new_hash_table[hash_]
            new_key_name.add(key_name)

        deleted_keys = old_key_name - new_key_name
        insert_keys = new_key_name - old_key_name
        update_keys = new_key_name & old_key_name
        print("update keys", len(update_keys))
        print("insert keys", len(insert_keys))
        print("deleted keys", len(deleted_keys))
        keys_delete = {}
        for key_name_delete in deleted_keys:
            migration.get_dependency_updates(key_name_delete, tables, False)
            keys_delete = {key_name_delete: migration.old_key_value[key_name_delete], **keys_delete}
        migration.bulk_deletion(keys_delete, tables)
        keys = {}

        for key_name in insert_keys:
            # migration.get_dependency_updates(key_name, tables, True)
            keys = {key_name: migration.new_key_value[key_name], **keys}

        migration.bulk_insertion(keys, tables)

        for key_name in update_keys:
            migration.get_dependency_updates(key_name, tables, True)
            for table in tables.keys():
                regex = tables[table]["regex"]
                if re.match(regex, key_name):
                    if tables[table]["format"] == "multi_row":
                        update_values = set(migration.new_key_value[key_name]) - set(migration.old_key_value[key_name])
                        for update_value in update_values:
                            migration.update({key_name: update_value}, tables[table])
                    else:
                        migration.update({key_name: migration.new_key_value[key_name]}, tables[table])
                    break

        migration.Session.commit()
        migration.old_hash_values = migration.new_hash_values
        migration.old_hash_table = migration.new_hash_table
        migration.old_key_value = migration.new_key_value
        migration.old_key_name = migration.new_key_name
        migration.new_key_name = set()
        migration.new_key_value = {}
        migration.new_hash_values = set()
        migration.new_hash_table = {}
        end_time = time.time()
        print("Batch Migrations Processing Time {0} Seconds" .format(end_time-start_time))