def fetch_notifications(data, notification_ids, method): common.verify_token(data) common.log_output("Fetching notifications from API...", False) baseurl = common.create_baseurl(data, "/api/v1/notifications") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) notification_list = [] r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: id_list = ', '.join(notification_ids) message = "Error getting notifications " + id_list common.log_output(message, True, r.status_code) else: data = r.json() for notification in data: notification_id = notification.get("ID", -1) if notification_id in notification_ids: notification_list.append(notification) # Only get uses a filter if method == "get": notification_list = notification_filter(notification_list) return notification_list
def create_backup_export(data, backup_id, output=None, path=None, timestamp=False): baseurl = common.create_baseurl( data, "/api/v1/backup/" + str(backup_id) + "/export?export-passwords=true") common.log_output("Fetching backup data from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Backup not found", True, r.status_code) sys.exit(2) elif r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) backup = r.json() name = backup['Backup']['Name'] # YAML or JSON? if output in ["YAML", "yaml"]: filetype = ".yml" else: filetype = ".json" # Decide on where to output file if timestamp: stamp = datetime.datetime.now().strftime("%d.%m.%Y_%I.%M_%p") file_name = name + "_" + str(stamp) + filetype else: file_name = name + filetype if path is None: path = file_name else: path = common.ensure_trailing_slash(path) path = os.path.dirname(expanduser(path)) + "/" + file_name # Check if output folder exists directory = os.path.dirname(path) if directory != '' and not os.path.exists(directory): message = "Created directory \"" + directory + "\"" common.log_output(message, True) os.makedirs(directory) # Check if output file exists if os.path.isfile(path) is True: agree = input('File already exists, overwrite? [Y/n]:') if agree not in ["Y", "y", "yes", "YES", ""]: return with open(path, 'w') as file: if filetype == ".json": file.write(json.dumps(backup, indent=4, default=str)) else: file.write(yaml.dump(backup, default_flow_style=False)) common.log_output("Created " + path, True, 200)
def get_logs(data, log_type, backup_id, remote, follow, lines, show_all, output_type): common.verify_token(data) if log_type == "backup" and backup_id is None: common.log_output("A backup id must be provided with --id", True) sys.exit(2) # Treating functions as objects to allow any function to be "followed" if log_type == "backup" and remote: def function(): get_backup_logs(data, backup_id, "remotelog", lines, show_all, output_type) elif log_type == "backup" and not remote: def function(): get_backup_logs(data, backup_id, "log", lines, show_all, output_type) elif log_type in ["profiling", "information", "warning", "error"]: def function(): get_live_logs(data, log_type, lines, output_type) elif log_type == "stored": def function(): get_stored_logs(data, lines, show_all, output_type) # Follow the function or just run it once if follow: follow_function(data, function, 10) else: function()
def prompt_password(password, interactive): if password is None and interactive: common.log_output("Authentication required", False) password = getpass.getpass('Password:') elif password is None and not interactive: common.log_output("A password is required required", True) sys.exit(2) return password
def output_dump(data, output): if output is not None and output.lower() == "json": message = json.dumps(data, indent=4, default=str) else: message = yaml.safe_dump(data, default_flow_style=False, allow_unicode=True) common.log_output(message, True, 200)
def get_resources(data, resource_type, resource_id): if resource_type == "backup": result = fetch_backups(data, resource_id, "get") elif resource_type == "notification": result = fetch_notifications(data, resource_id, "get") message = yaml.safe_dump(result, default_flow_style=False) common.log_output(message, True, 200)
def describe_resource(data, resource_type, resource_id): if resource_type == "backup": result = fetch_backups(data, [resource_id], "describe") elif resource_type == "notification": result = fetch_notifications(data, [resource_id], "describe") # Must use safe_dump for python 2 compatibility message = yaml.safe_dump(result, default_flow_style=False) common.log_output(message, True, 200)
def get_backup_logs(data, backup_id, log_type, page_size, show_all, output_type): endpoint = "/api/v1/backup/" + str(backup_id) + "/" + log_type baseurl = common.create_baseurl(data, endpoint) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Operation", "") == "list": log["Data"] = "Expunged" else: log["Data"] = json.loads(log.get("Data", "{}")) size = helper.format_bytes(log["Data"].get("Size", 0)) log["Data"]["Size"] = size if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) log["Timestamp"] = datetime.datetime.fromtimestamp( int(log.get("Timestamp", 0))).strftime("%I:%M:%S %p %d/%m/%Y") logs.append(log) helper.output_dump(logs, output_type)
def describe_resources(data, resource_type, resource_ids): if resource_type == "backup": result = fetch_backups(data, resource_ids, "describe") elif resource_type == "notification": result = fetch_notifications(data, resource_ids, "describe") message = yaml.safe_dump(result, default_flow_style=False, allow_unicode=True) common.log_output(message, True, 200)
def follow_function(data, function, interval=5): try: while True: compatibility.clear_prompt() function() timestamp = helper.format_time(data, datetime.datetime.now()) common.log_output(timestamp, True) common.log_output("Press control+C to quit", True) time.sleep(interval) except KeyboardInterrupt: return
def fetch_resource_list(data, resource): baseurl = common.create_baseurl(data, "/api/v1/" + resource) common.log_output("Fetching " + resource + " list from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) else: return r.json()
def toggle_precise(data, mode=None): if mode == "enable": data["precise"] = True elif mode == "disable": data["precise"] = False else: data["precise"] = not data.get("precise", False) common.write_config(data) precise = data.get("precise", True) message = "precise mode: " + str(precise) common.log_output(message, True) return data
def call_backup_subcommand(data, url, fail_message, success_message): common.verify_token(data) baseurl = common.create_baseurl(data, url) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output(fail_message, True, r.status_code) return common.log_output(success_message, True, 200)
def get_live_logs(data, level, page_size=5, first_id=0): baseurl = common.create_baseurl(data, "/api/v1/logdata/poll") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'level': level, 'id': first_id, 'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: log["When"] = helper.format_time(log.get("When", ""), True) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return message = yaml.safe_dump(logs, default_flow_style=False) common.log_output(message, True)
def abort_task(data, task_id): common.verify_token(data) path = "/api/v1/task/" + str(task_id) + "/abort" baseurl = common.create_baseurl(data, path) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error aborting task ", True, r.status_code) return common.log_output("Task aborted", True, 200)
def run_backup(data, backup_id): common.verify_token(data) path = "/api/v1/backup/" + str(backup_id) + "/run" baseurl = common.create_baseurl(data, path) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error scheduling backup ", True, r.status_code) return common.log_output("Backup scheduled", True, 200)
def display_parameters(data): file = data.get("parameters_file", None) if file is None: return with io.open(file, 'r', encoding="UTF-8") as file_handle: try: parameters_file = yaml.safe_load(file_handle) output = yaml.dump(parameters_file, default_flow_style=False) common.log_output(output, True) return except Exception: message = "Could not load parameters file" common.log_output(message, True) return
def load_config(data, overwrite=False): # If the config file doesn't exist, create it if os.path.isfile(config.CONFIG_FILE) is False or overwrite is True: common.log_output("Creating config file", True) common.write_config(data) # Load the configuration from the config file with io.open(config.CONFIG_FILE, 'r', encoding="UTF-8") as file: try: data = yaml.safe_load(file) common.validate_config(data) return data except yaml.YAMLError as exc: common.log_output(exc, True) sys.exit(2)
def format_time(time_string, precise=True): # Ensure it's a string time_string = str(time_string) # Filter out "unset" time if time_string == "0001-01-01T00:00:00Z" or time_string == "0": return None # We want to fail silently if we're not provided a parsable time_string. try: datetime_object = dateparser.parse(time_string) except Exception as exc: common.log_output(exc, False) return None # Print a precise, but human readable string if precise is true if precise: datetime_object = datetime_object.replace(tzinfo=tz.tzutc()) datetime_object = datetime_object.astimezone(tz.tzlocal()) return datetime_object.strftime("%Y-%m-%d %H:%M:%S") # Now for comparison now = datetime.datetime.now() try: # Take care of timezones now = now.replace(tzinfo=tz.tzutc()) now = now.astimezone(tz.tzlocal()) datetime_object = datetime_object.replace(tzinfo=tz.tzutc()) datetime_object = datetime_object.astimezone(tz.tzlocal()) except Exception as exc: common.log_output(exc, False) return None # Get the delta if datetime_object > now: delta = (datetime_object - now) else: delta = (now - datetime_object) # Display hours if within 24 hours of now, else display dmy if abs(delta.days) > 1: return datetime_object.strftime("%d/%m/%Y") elif delta.days == 1: return "Yesterday " + datetime_object.strftime("%I:%M %p") elif delta.days == -1: return "Tomorrow " + datetime_object.strftime("%I:%M %p") else: return datetime_object.strftime("%I:%M %p")
def set_parameters_file(data, args, file=None): # Disable parameters file if requested if args.get("disable", False): data.pop("parameters_file", None) common.write_config(data) common.log_output("Disabling parameters-file", True) return data if file is None: return data data["parameters_file"] = file common.write_config(data) common.log_output("Setting parameters-file", True) return data
def list_resources(data, resource): common.verify_token(data) if resource == "backups": resource_list = fetch_backup_list(data) else: resource_list = fetch_resource_list(data, resource) resource_list = list_filter(resource_list, resource) if len(resource_list) == 0: common.log_output("No items found", True) sys.exit(2) # Must use safe_dump for python 2 compatibility message = yaml.safe_dump(resource_list, default_flow_style=False) common.log_output(message, True, 200)
def list_resources(data, resource, output_type): common.verify_token(data) if resource == "backups": resource_list = fetch_backup_list(data) elif resource == "databases": resource_list = fetch_database_list(data) else: resource_list = fetch_resource_list(data, resource) resource_list = list_filter(data, resource_list, resource) if len(resource_list) == 0: common.log_output("No items found", True) sys.exit(2) helper.output_dump(resource_list, output_type)
def dismiss_notifications(data, resource_id="all"): common.verify_token(data) id_list = [] if resource_id == "all": # Get all notification ID's notifications = fetch_resource_list(data, "notifications") for notification in notifications: id_list.append(notification["ID"]) else: id_list.append(resource_id) if len(id_list) == 0: common.log_output("No notifications", True) return for item in id_list: delete_resource(data, "notification", item, True)
def list_resources(data, resource): common.verify_token(data) if resource == "backups": resource_list = fetch_backup_list(data) elif resource == "databases": resource_list = fetch_database_list(data) else: resource_list = fetch_resource_list(data, resource) resource_list = list_filter(data, resource_list, resource) if len(resource_list) == 0: common.log_output("No items found", True) sys.exit(2) message = yaml.safe_dump(resource_list, default_flow_style=False, allow_unicode=True) common.log_output(message, True, 200)
def get_stored_logs(data, page_size=5, show_all=False): baseurl = common.create_baseurl(data, "/api/v1/logdata/log") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def update_backup(data, backup_id, backup_config, import_meta=True): common.verify_token(data) # Strip metadata if requested if import_meta is not None and not import_meta: backup_config.get("Backup", {}).pop("Metadata", None) baseurl = common.create_baseurl(data, "/api/v1/backup/" + str(backup_id)) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) payload = json.dumps(backup_config, default=str) r = requests.put(baseurl, headers=headers, cookies=cookies, data=payload, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Backup not found", True, r.status_code) return elif r.status_code != 200: common.log_output("Error updating backup", True, r.status_code) return common.log_output("Backup updated", True, 200)
def export_backup(data, backup_id, output=None, path=None): # Get backup config result = fetch_backups(data, [backup_id], "describe") if result is None or len(result) == 0: common.log_output("Could not fetch backup", True) return backup = result[0] # Strip DisplayNames and Progress # backup.pop("DisplayNames", None) backup.pop("Progress", None) # Fetch server version systeminfo = fetch_resource_list(data, "systeminfo") if systeminfo.get("ServerVersion", None) is None: common.log_output("Error exporting backup", True) sys.exit(2) backup["CreatedByVersion"] = systeminfo["ServerVersion"] # YAML or JSON? if output in ["JSON", "json"]: filetype = ".json" else: filetype = ".yml" # Decide on where to output file if path is None: time = datetime.datetime.now().strftime("%d.%m.%Y_%I.%M_%p") path = "backup_config_" + str(time) + filetype else: path = expanduser(path) # Check if output folder exists directory = os.path.dirname(path) if directory != '' and not os.path.exists(directory): message = "Created directory \"" + directory + "\"" common.log_output(message, True) os.makedirs(directory) # Check if output file exists if os.path.isfile(path) is True: agree = input('File already exists, overwrite? [Y/n]:') if agree not in ["Y", "y", "yes", "YES", ""]: return with open(path, 'w') as file: if filetype == ".json": file.write(json.dumps(backup, indent=4, default=str)) else: file.write(yaml.dump(backup, default_flow_style=False)) common.log_output("Created " + path, True, 200)
def delete_backup(data, backup_id, confirm=False, delete_db=False): common.verify_token(data) # Check if the backup exists result = fetch_backups(data, [backup_id], "get") if result is None or len(result) == 0: return if not confirm: # Confirm deletion with user name = next(iter(result[0])) message = 'Delete "' + name + '"? (ID:' + str(backup_id) + ')' options = '[y/N]:' agree = input(message + ' ' + options) if agree.lower() not in ["y", "yes"]: common.log_output("Backup not deleted", True) return baseurl = common.create_baseurl(data, "/api/v1/backup/" + str(backup_id)) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # We cannot delete remote files because the captcha is graphical payload = {'delete-local-db': delete_db, 'delete-remote-files': False} r = requests.delete(baseurl, headers=headers, cookies=cookies, params=payload, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error deleting backup", True, r.status_code) return common.log_output("Backup deleted", True, 200)
def delete_database(data, backup_id, confirm=False, recreate=False): common.verify_token(data) # Check if the backup exists result = fetch_backups(data, [backup_id], "get") if result is None or len(result) == 0: return if not confirm: # Confirm deletion with user name = next(iter(result[0])) message = 'Delete database ' + str(backup_id) message += ' belonging to "' + name + '"?' options = '[y/N]:' agree = input(message + ' ' + options) if agree.lower() not in ["y", "yes"]: common.log_output("Database not deleted", True) return baseurl = common.create_baseurl( data, "/api/v1/backup/" + str(backup_id) + "/deletedb") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.post(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error deleting database", True, r.status_code) return common.log_output("Database deleted", True, 200) if recreate: repair_database(data, backup_id)
def fetch_backups(data, backup_ids, method): common.verify_token(data) common.log_output("Fetching backups from API...", False) progress_state, active_id = fetch_progress_state(data) progress = progress_state.get("OverallProgress", 1) backup_list = [] baseurl = common.create_baseurl(data, "/api/v1/backup/") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Iterate over backup_ids and fetch their info for backup_id in backup_ids: r = requests.get(baseurl + str(backup_id), headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: message = "Error getting backup " + str(backup_id) common.log_output(message, True, r.status_code) continue data = r.json()["data"] item_id = data.get("Backup", {}).get("ID", 0) if active_id is not None and item_id == active_id and progress != 1: data["Progress"] = progress_state else: data["Progress"] = None backup_list.append(data) if len(backup_list) == 0: sys.exit(2) # Only get uses a filter if method == "get": backup_list = backup_filter(backup_list) return backup_list