def store_latest(self, servers, buckets, backup_num, backup_validation_path, get_replica=False, mode="memory", backup_type="backup"): """ Collects the latest key value pairs from a cluster after a restore :param servers: servers in the cluster :param buckets: buckets in the cluster :param backup_num: current backup number :param backup_validation_path: where to store the data :param get_replica: bool to decide if replica data to be copied :param mode: where to get the items from - can be "disk" or "memory" :param backup_type: backup or restore """ data_collector = DataCollector() info, complete_map = data_collector.collect_data(servers, buckets, userId=servers[0].rest_username, password=servers[0].rest_password, perNode=False, getReplica=get_replica, mode=mode) for bucket in complete_map.keys(): file_name = "{0}-{1}-{2}-{3}.json".format(bucket, "key_value", backup_type, backup_num) file_path = os.path.join(backup_validation_path, file_name) data = complete_map[bucket] for key in data: value = data[key] value = ",".join(value.split(',')[4:5]) data[key] = value with open(file_path, 'w') as f: json.dump(data, f)
def store_keys(self, servers, buckets, backup_num, backup_validation_path, mode="memory", backup_type="backup"): """ Collects the latest keys from a cluster after a restore :param servers: servers in the cluster :param buckets: buckets in the cluster :param backup_num: current backup number :param backup_validation_path: where to store the data :param get_replica: bool to decide if replica data to be copied :param mode: where to get the items from - can be "disk" or "memory" :param backup_type: backup or restore """ data_collector = DataCollector() info, complete_map = data_collector.collect_data( servers, buckets, userId=servers[0].rest_username, password=servers[0].rest_password, perNode=False, mode=mode) for bucket in list(complete_map.keys()): file_name = "{0}-{1}-{2}.json".format(bucket, "keys", backup_num) file_path = os.path.join(backup_validation_path, file_name) keys = list(complete_map[bucket].keys()) with open(file_path, 'w') as f: json.dump({"keys": keys}, f)
def validate_restore(self, backup_number, backup_vbucket_seqno, restored_vbucket_seqno, compare_uuid=False, compare="==", get_replica=False, mode="memory"): """ Validates ep-engine stats and restored data :param backup_number: backup end number :param backup_vbucket_seqno: vbucket map of all backups :param restored_vbucket_seqno: vbucket map of restored cluster :param compare_uuid: bool to decide whether to compare uuid or not :param compare: comparator function :param get_replica: bool to decide whether to compare replicas or not :param mode: where to get the items from - can be "disk" or "memory" :return: status and message """ self.log.info("backup start: " + str(self.backupset.start)) self.log.info("backup end: " + str(self.backupset.end)) success_msg = "" if not self.backupset.force_updates: status, msg = self.compare_vbucket_stats(backup_vbucket_seqno[backup_number - 1], restored_vbucket_seqno, compare_uuid=compare_uuid, seqno_compare=compare) if not status: return status, msg success_msg = "{0}\n".format(msg) for bucket in self.buckets: kv_file_name = "{0}-{1}-{2}-{3}.json".format(bucket.name, "key_value", "backup", backup_number) kv_file_path = os.path.join(self.backup_validation_path, kv_file_name) backedup_kv = {} if os.path.exists(kv_file_path): try: with open(kv_file_path, 'r') as f: backedup_kv = json.load(f) except Exception, e: raise e data_collector = DataCollector() info, restored_data = data_collector.collect_data(self.restore_cluster, [bucket], userId=self.restore_cluster[0].rest_username, password=self.restore_cluster[0].rest_password, perNode=False, getReplica=get_replica, mode=mode) data = restored_data[bucket.name] for key in data: value = data[key] value = ",".join(value.split(',')[4:5]) data[key] = value is_equal, not_equal, extra, not_present = \ self.compare_dictionary(backedup_kv, data) status, msg = self.compare_dictionary_result_analyser(is_equal, not_equal, extra, not_present, "{0} Data".format(bucket.name)) if not status: return status, msg success_msg += "{0}\n".format(msg)
def validate_restore(self, backup_number, backup_vbucket_seqno, restored_vbucket_seqno, compare_uuid=False, compare="==", get_replica=False, mode="memory"): """ Validates ep-engine stats and restored data :param backup_number: backup end number :param backup_vbucket_seqno: vbucket map of all backups :param restored_vbucket_seqno: vbucket map of restored cluster :param compare_uuid: bool to decide whether to compare uuid or not :param compare: comparator function :param get_replica: bool to decide whether to compare replicas or not :param mode: where to get the items from - can be "disk" or "memory" :return: status and message """ self.log.info("backup start: " + str(self.backupset.start)) self.log.info("backup end: " + str(self.backupset.end)) success_msg = "" if not self.backupset.force_updates: status, msg = self.compare_vbucket_stats(backup_vbucket_seqno[backup_number - 1], restored_vbucket_seqno, compare_uuid=compare_uuid, seqno_compare=compare) if not status: return status, msg success_msg = "{0}\n".format(msg) for bucket in self.buckets: kv_file_name = "{0}-{1}-{2}-{3}.json".format(bucket.name, "key_value", "backup", backup_number) kv_file_path = os.path.join(self.backup_validation_path, kv_file_name) backedup_kv = {} if os.path.exists(kv_file_path): try: with open(kv_file_path, 'r') as f: backedup_kv = json.load(f) except Exception, e: raise e data_collector = DataCollector() info, restored_data = data_collector.collect_data(self.restore_cluster, [bucket], userId=self.restore_cluster[0].rest_username, password=self.restore_cluster[0].rest_password, perNode=False, getReplica=get_replica, mode=mode) data = restored_data[bucket.name] for key in data: value = data[key] value = ",".join(value.split(',')[4:5]) data[key] = value is_equal, not_equal, extra, not_present = self.compare_dictionary(backedup_kv, data) status, msg = self.compare_dictionary_result_analyser(is_equal, not_equal, extra, not_present, "{0} Data".format(bucket.name)) if not status: return status, msg success_msg += "{0}\n".format(msg)
def store_keys(self, servers, buckets, backup_num, backup_validation_path, mode="memory", backup_type="backup"): """ Collects the latest keys from a cluster after a restore :param servers: servers in the cluster :param buckets: buckets in the cluster :param backup_num: current backup number :param backup_validation_path: where to store the data :param get_replica: bool to decide if replica data to be copied :param mode: where to get the items from - can be "disk" or "memory" :param backup_type: backup or restore """ data_collector = DataCollector() info, complete_map = data_collector.collect_data(servers, buckets, userId=servers[0].rest_username, password=servers[0].rest_password, perNode=False, mode=mode) for bucket in complete_map.keys(): file_name = "{0}-{1}-{2}.json".format(bucket, "keys", backup_num) file_path = os.path.join(backup_validation_path, file_name) keys = complete_map[bucket].keys() with open(file_path, 'w') as f: json.dump({"keys": keys}, f)
def validate_restore(self, backup_number, backup_vbucket_seqno, restored_vbucket_seqno, compare_uuid=False, compare="==", get_replica=False, mode="memory", backups_on_disk=None): """ Validates ep-engine stats and restored data :param backup_number: backup end number :param backup_vbucket_seqno: vbucket map of all backups :param restored_vbucket_seqno: vbucket map of restored cluster :param compare_uuid: bool to decide whether to compare uuid or not :param compare: comparator function :param get_replica: bool to decide whether to compare replicas or not :param mode: where to get the items from - can be "disk" or "memory" :return: status and message """ self.log.info("backup start: " + str(self.backupset.start)) self.log.info("backup end: " + str(self.backupset.end)) success_msg = "" if not self.backupset.force_updates: status, msg = self.compare_vbucket_stats( backup_vbucket_seqno[backup_number - 1], restored_vbucket_seqno, compare_uuid=compare_uuid, seqno_compare=compare, mapBucket=self.backupset.map_buckets) if not status: return status, msg success_msg = "{0}\n".format(msg) for bucket in self.buckets: kv_file_name = "{0}-{1}-{2}-{3}.json".format( bucket.name, "key_value", "backup", backup_number) kv_file_path = os.path.join(self.backup_validation_path, kv_file_name) if backup_number == self.backupset.end and backup_number == backups_on_disk: backup_files = [ f"{self.backup_validation_path}/" + f for f in listdir(self.backup_validation_path) if f"{bucket.name}-key_value-backup" in f ] backup_files.sort(key=os.path.getmtime) try: kv_file_path = backup_files[-1] except IndexError as e: if self.backupset.deleted_buckets: return True, "Deleted bucket, will not check" else: raise e backedup_kv = {} if os.path.exists(kv_file_path): try: with open(kv_file_path, 'r') as f: backedup_kv = json.load(f) except Exception as e: raise e else: raise Exception(f"{kv_file_path} is missing!") data_collector = DataCollector() info, restored_data = data_collector.collect_data( self.restore_cluster, [bucket], userId=self.restore_cluster[0].rest_username, password=self.restore_cluster[0].rest_password, perNode=False, getReplica=get_replica, mode=mode) data = restored_data[bucket.name] for key in data: value = data[key] if '"b\'' in value: value = ",".join(value.split(',')[4:8]) else: value = ",".join(value.split(',')[4:5]) value = value.replace('""', '"') if value.startswith('"b\''): value = value[3:-2] elif value.startswith("b"): value = value.split(',')[0] data[key] = value self.log.info("Compare backup data in bucket %s " % bucket.name) is_equal, not_equal, extra, not_present = \ self.compare_dictionary(backedup_kv, data) status, msg = self.compare_dictionary_result_analyser( is_equal, not_equal, extra, not_present, "{0} Data".format(bucket.name)) if not status: return status, msg success_msg += "{0}\n".format(msg) success_msg += "Data validation success" return True, success_msg