def update_backup(data, backup_id, backup_config, import_meta=True): common.verify_token(data) # Strip metadata if requested if import_meta is not None and not import_meta: backup_config.get("Backup", {}).pop("Metadata", None) baseurl = common.create_baseurl(data, "/api/v1/backup/" + str(backup_id)) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) payload = json.dumps(backup_config, default=str) r = requests.put(baseurl, headers=headers, cookies=cookies, data=payload, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Backup not found", True, r.status_code) return elif r.status_code != 200: common.log_output("Error updating backup", True, r.status_code) return common.log_output("Backup updated", True, 200)
def delete_database(data, backup_id, confirm=False, recreate=False): common.verify_token(data) # Check if the backup exists result = fetch_backups(data, [backup_id], "get") if result is None or len(result) == 0: return if not confirm: # Confirm deletion with user name = next(iter(result[0])) message = 'Delete database ' + str(backup_id) message += ' belonging to "' + name + '"?' options = '[y/N]:' agree = input(message + ' ' + options) if agree.lower() not in ["y", "yes"]: common.log_output("Database not deleted", True) return baseurl = common.create_baseurl( data, "/api/v1/backup/" + str(backup_id) + "/deletedb") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error deleting database", True, r.status_code) return common.log_output("Database deleted", True, 200) if recreate: repair_database(data, backup_id)
def get_live_logs(data, level, page_size, first_id, output_type): baseurl = common.create_baseurl(data, "/api/v1/logdata/poll") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'level': level, 'id': first_id, 'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: log["When"] = helper.format_time(data, log.get("When", "")) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return helper.output_dump(logs, output_type)
def delete_backup(data, backup_id, confirm=False, delete_db=False): common.verify_token(data) # Check if the backup exists result = fetch_backups(data, [backup_id], "get") if result is None or len(result) == 0: return if not confirm: # Confirm deletion with user name = next(iter(result[0])) message = 'Delete "' + name + '"? (ID:' + str(backup_id) + ')' options = '[y/N]:' agree = input(message + ' ' + options) if agree.lower() not in ["y", "yes"]: common.log_output("Backup not deleted", True) return baseurl = common.create_baseurl(data, "/api/v1/backup/" + str(backup_id)) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # We cannot delete remote files because the captcha is graphical payload = {'delete-local-db': delete_db, 'delete-remote-files': False} r = requests.delete(baseurl, headers=headers, cookies=cookies, params=payload, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error deleting backup", True, r.status_code) return common.log_output("Backup deleted", True, 200)
def fetch_notifications(data, notification_ids, method): common.verify_token(data) common.log_output("Fetching notifications from API...", False) baseurl = common.create_baseurl(data, "/api/v1/notifications") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) notification_list = [] r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: id_list = ', '.join(notification_ids) message = "Error getting notifications " + id_list common.log_output(message, True, r.status_code) else: data = r.json() for notification in data: notification_id = notification.get("ID", -1) if notification_id in notification_ids: notification_list.append(notification) # Only get uses a filter if method == "get": notification_list = notification_filter(notification_list) return notification_list
def create_backup_export(data, backup_id, output=None, path=None, timestamp=False): baseurl = common.create_baseurl( data, "/api/v1/backup/" + str(backup_id) + "/export?export-passwords=true") common.log_output("Fetching backup data from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Backup not found", True, r.status_code) sys.exit(2) elif r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) backup = r.json() name = backup['Backup']['Name'] # YAML or JSON? if output in ["YAML", "yaml"]: filetype = ".yml" else: filetype = ".json" # Decide on where to output file if timestamp: stamp = datetime.datetime.now().strftime("%d.%m.%Y_%I.%M_%p") file_name = name + "_" + str(stamp) + filetype else: file_name = name + filetype if path is None: path = file_name else: path = common.ensure_trailing_slash(path) path = os.path.dirname(expanduser(path)) + "/" + file_name # Check if output folder exists directory = os.path.dirname(path) if directory != '' and not os.path.exists(directory): message = "Created directory \"" + directory + "\"" common.log_output(message, True) os.makedirs(directory) # Check if output file exists if os.path.isfile(path) is True: agree = input('File already exists, overwrite? [Y/n]:') if agree not in ["Y", "y", "yes", "YES", ""]: return with open(path, 'w') as file: if filetype == ".json": file.write(json.dumps(backup, indent=4, default=str)) else: file.write(yaml.dump(backup, default_flow_style=False)) common.log_output("Created " + path, True, 200)
def get_backup_logs(data, backup_id, log_type, page_size=5, show_all=False): endpoint = "/api/v1/backup/" + str(backup_id) + "/" + log_type baseurl = common.create_baseurl(data, endpoint) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Operation", "") == "list": log["Data"] = "Expunged" else: log["Data"] = json.loads(log.get("Data", "{}")) size = helper.format_bytes(log["Data"].get("Size", 0)) log["Data"]["Size"] = size if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) log["Timestamp"] = datetime.datetime.fromtimestamp( int(log.get("Timestamp", 0))).strftime("%I:%M:%S %p %d/%m/%Y") logs.append(log) message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def fetch_server_state(data): baseurl = common.create_baseurl(data, "/api/v1/serverstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: server_state = {} else: server_state = r.json() return server_state
def call_backup_subcommand(data, url, fail_message, success_message): common.verify_token(data) baseurl = common.create_baseurl(data, url) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output(fail_message, True, r.status_code) return common.log_output(success_message, True, 200)
def fetch_resource_list(data, resource): baseurl = common.create_baseurl(data, "/api/v1/" + resource) common.log_output("Fetching " + resource + " list from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) else: return r.json()
def get_stored_logs(data, page_size=5, show_all=False): baseurl = common.create_baseurl(data, "/api/v1/logdata/log") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def abort_task(data, task_id): common.verify_token(data) path = "/api/v1/task/" + str(task_id) + "/abort" baseurl = common.create_baseurl(data, path) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error aborting task ", True, r.status_code) return common.log_output("Task aborted", True, 200)
def run_backup(data, backup_id): common.verify_token(data) path = "/api/v1/backup/" + str(backup_id) + "/run" baseurl = common.create_baseurl(data, path) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error scheduling backup ", True, r.status_code) return common.log_output("Backup scheduled", True, 200)
def validate_database_exists(data, db_path): common.verify_token(data) # api/v1/filesystem/validate baseurl = common.create_baseurl(data, "/api/v1/filesystem/validate") cookies = common.create_cookies(data) headers = common.create_headers(data) payload = {'path': db_path} verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, params=payload, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: return False return True
def fetch_progress_state(data): baseurl = common.create_baseurl(data, "/api/v1/progressstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Check progress state and get info for the running backup r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: active_id = -1 progress_state = {} else: progress_state = r.json() active_id = progress_state.get("BackupID", -1) return progress_state, active_id
def delete_notification(data, notification_id): common.verify_token(data) url = "/api/v1/notification/" baseurl = common.create_baseurl(data, url + str(notification_id)) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.delete(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Notification not found", True, r.status_code) return elif r.status_code != 200: common.log_output("Error deleting notification", True, r.status_code) return common.log_output("Notification deleted", True, 200)
def set_password(data, password=None, disable_login=False, interactive=True): common.verify_token(data) if not disable_login: password = prompt_password(password, interactive) common.log_output("Setting server password...", False) baseurl = common.create_baseurl(data, "/api/v1/serversettings") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) if disable_login: password = None if password is None: salt = None hashed_password = None else: # Generate a salt salt = ''.join(random.choice(ALPHABET) for i in range(44)) # Hash the password and salt salt_password = password.encode() + base64.b64decode(salt) hashed_password = hashlib.sha256(salt_password).digest() hashed_password = base64.b64encode(hashed_password).decode('utf-8') payload = json.dumps({ 'server-passphrase-salt': salt, 'server-passphrase': hashed_password, 'has-asked-for-password-protection': 'true' }) r = requests.patch(baseurl, headers=headers, cookies=cookies, data=payload, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: message = "Error updating password settings" common.log_output(message, True, r.status_code) return common.log_output("Updated password settings", True, 200)
def fetch_progress_state(data): baseurl = common.create_baseurl(data, "/api/v1/progressstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Check progress state and get info for the running backup r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: active_id = -1 progress_state = {} else: progress_state = r.json() active_id = progress_state.get("BackupID", -1) # Don't show progress on finished tasks phase = progress_state.get("Phase", "") if phase in ["Backup_Complete", "Error"]: return {}, 0 return progress_state, active_id
def fetch_backups(data, backup_ids, method): common.verify_token(data) common.log_output("Fetching backups from API...", False) progress_state, active_id = fetch_progress_state(data) progress = progress_state.get("OverallProgress", 1) backup_list = [] baseurl = common.create_baseurl(data, "/api/v1/backup/") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Iterate over backup_ids and fetch their info for backup_id in backup_ids: r = requests.get(baseurl + str(backup_id), headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: message = "Error getting backup " + str(backup_id) common.log_output(message, True, r.status_code) continue data = r.json()["data"] item_id = data.get("Backup", {}).get("ID", 0) if active_id is not None and item_id == active_id and progress != 1: data["Progress"] = progress_state else: data["Progress"] = None backup_list.append(data) if len(backup_list) == 0: sys.exit(2) # Only get uses a filter if method == "get": backup_list = backup_filter(backup_list) return backup_list
def import_backup(data, import_file, backup_id=None, import_meta=None): # Don't load nonexisting files if os.path.isfile(import_file) is False: common.log_output(import_file + " not found", True) return # Load the import file with io.open(import_file, 'r', encoding="UTF-8") as file_handle: extension = splitext(import_file)[1] if extension.lower() in ['.yml', '.yaml']: try: backup_config = yaml.safe_load(file_handle) except yaml.YAMLError: common.log_output("Failed to load file as YAML", True) return elif extension.lower() == ".json": try: backup_config = json.load(file_handle) except Exception: common.log_output("Failed to load file as JSON", True) return # Determine if we're importing a new backup or updating an existing backup if backup_id is not None: return update_backup(data, backup_id, backup_config, import_meta) common.verify_token(data) # Strip metadata if requsted if import_meta is None or import_meta is not True: backup_config["Backup"]["Metadata"] = {} # Prepare the imported JSON object as a string backup_config = json.dumps(backup_config, default=str) # Upload our JSON string as a file with requests files = { 'config': ('backup_config.json', backup_config, 'application/json') } # Will eventually support passphrase encrypted configs, but we will # need to decrypt them in the client in order to convert them payload = { 'passphrase': '', 'import_metadata': import_meta, 'direct': True } cookies = common.create_cookies(data) baseurl = common.create_baseurl(data, "/api/v1/backups/import", True) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, files=files, cookies=cookies, data=payload, verify=verify) common.check_response(data, r.status_code) # Code for extracting error messages posted with inline javascript # and with 200 OK http status code, preventing us from detecting # the error otherwise. try: text = r.text start = text.index("if (rp) { rp('") + 14 end = text.index(", line ") error = text[start:end].replace("\\'", "'") + "." common.log_output(error, True) sys.exit(2) except ValueError: pass if r.status_code != 200: message = "Error importing backup configuration" common.log_output(message, True, r.status_code) sys.exit(2) common.log_output("Backup job created", True, 200)