def get_backup_logs(data, backup_id, log_type, page_size=5, show_all=False): endpoint = "/api/v1/backup/" + str(backup_id) + "/" + log_type baseurl = common.create_baseurl(data, endpoint) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Operation", "") == "list": log["Data"] = "Expunged" else: log["Data"] = json.loads(log.get("Data", "{}")) size = helper.format_bytes(log["Data"].get("Size", 0)) log["Data"]["Size"] = size if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) log["Timestamp"] = datetime.datetime.fromtimestamp( int(log.get("Timestamp", 0))).strftime("%I:%M:%S %p %d/%m/%Y") logs.append(log) message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def backup_filter(data, json_input): backup_list = [] for key in json_input: backup = key.pop("Backup", {}) metadata = backup.pop("Metadata", {}) backup_name = backup.pop("Name", {}) backup = { "ID": backup.get("ID", ""), "Local database": backup.get("DBPath", ""), } backup["Versions"] = int(metadata.get("BackupListCount", 0)) backup["Last run"] = { "Duration": helper.format_duration(metadata.get("LastBackupDuration", "0")), "Started": helper.format_time(data, metadata.get("LastBackupStarted", "0")), "Stopped": helper.format_time(data, metadata.get("LastBackupFinished", "0")), } backup["Size"] = { "Local": metadata.get("SourceSizeString", ""), "Backend": metadata.get("TargetSizeString", "") } schedule = key.get("Schedule", None) if schedule is not None: next_run = helper.format_time(data, schedule.pop("Time", "")) if next_run is not None: schedule["Next run"] = next_run last_run = helper.format_time(data, schedule.pop("LastRun", "")) if last_run is not None: schedule["Last run"] = last_run schedule.pop("AllowedDays", None) schedule.pop("ID", None) schedule.pop("Rule", None) schedule.pop("Tags", None) backup["Schedule"] = schedule progress_state = key.get("Progress", None) if progress_state is not None: state = progress_state.get("Phase", None) speed = progress_state.get("BackendSpeed", 0) progress = { "State": state, "Counting files": progress_state.get("StillCounting", False), "Backend": { "Action": progress_state.get("BackendAction", 0) }, "Task ID": progress_state.get("TaskID", -1), } if speed > 0: readable_speed = helper.format_bytes(speed) + "/s" progress["Backend"]["Speed"] = readable_speed # Display item only if relevant if not progress_state.get("StillCounting", False): progress.pop("Counting files") # Avoid 0 division file_count = progress_state.get("ProcessedFileCount", 0) total_file_count = progress_state.get("TotalFileCount", 0) processing = state == "Backup_ProcessingFiles" if file_count > 0 and total_file_count > 0 and processing: processed = "{0:.2f}".format(file_count / total_file_count * 100) progress["Processed files"] = processed + "%" # Avoid 0 division data_size = progress_state.get("ProcessedFileSize", 0) total_data_size = progress_state.get("TotalFileSize", 0) processing = state == "Backup_ProcessingFiles" if data_size > 0 and total_data_size > 0 and processing: # Calculate percentage processed = "{0:.2f}".format(data_size / total_data_size * 100) # Format text "x% (y GB of z GB)" processed += "% (" + str(helper.format_bytes(data_size)) processed += " of " processed += str(helper.format_bytes(total_data_size)) + ")" progress["Processed data"] = processed # Avoid 0 division current = progress_state.get("BackendFileProgress", 0) total = progress_state.get("BackendFileSize", 0) if current > 0 and total > 0: backend_progress = "{0:.2f}".format(current / total * 100) progress["Backend"]["Progress"] = backend_progress + "%" backup["Progress"] = progress key = {backup_name: backup} backup_list.append(key) return backup_list