def fetch_notifications(data, notification_ids, method): common.verify_token(data) common.log_output("Fetching notifications from API...", False) baseurl = common.create_baseurl(data, "/api/v1/notifications") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) notification_list = [] r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: id_list = ', '.join(notification_ids) message = "Error getting notifications " + id_list common.log_output(message, True, r.status_code) else: data = r.json() for notification in data: notification_id = notification.get("ID", -1) if notification_id in notification_ids: notification_list.append(notification) # Only get uses a filter if method == "get": notification_list = notification_filter(notification_list) return notification_list
def get_live_logs(data, level, page_size, first_id, output_type): baseurl = common.create_baseurl(data, "/api/v1/logdata/poll") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'level': level, 'id': first_id, 'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: log["When"] = helper.format_time(data, log.get("When", "")) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return helper.output_dump(logs, output_type)
def create_backup_export(data, backup_id, output=None, path=None, timestamp=False): baseurl = common.create_baseurl( data, "/api/v1/backup/" + str(backup_id) + "/export?export-passwords=true") common.log_output("Fetching backup data from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code == 404: common.log_output("Backup not found", True, r.status_code) sys.exit(2) elif r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) backup = r.json() name = backup['Backup']['Name'] # YAML or JSON? if output in ["YAML", "yaml"]: filetype = ".yml" else: filetype = ".json" # Decide on where to output file if timestamp: stamp = datetime.datetime.now().strftime("%d.%m.%Y_%I.%M_%p") file_name = name + "_" + str(stamp) + filetype else: file_name = name + filetype if path is None: path = file_name else: path = common.ensure_trailing_slash(path) path = os.path.dirname(expanduser(path)) + "/" + file_name # Check if output folder exists directory = os.path.dirname(path) if directory != '' and not os.path.exists(directory): message = "Created directory \"" + directory + "\"" common.log_output(message, True) os.makedirs(directory) # Check if output file exists if os.path.isfile(path) is True: agree = input('File already exists, overwrite? [Y/n]:') if agree not in ["Y", "y", "yes", "YES", ""]: return with open(path, 'w') as file: if filetype == ".json": file.write(json.dumps(backup, indent=4, default=str)) else: file.write(yaml.dump(backup, default_flow_style=False)) common.log_output("Created " + path, True, 200)
def get_backup_logs(data, backup_id, log_type, page_size=5, show_all=False): endpoint = "/api/v1/backup/" + str(backup_id) + "/" + log_type baseurl = common.create_baseurl(data, endpoint) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Operation", "") == "list": log["Data"] = "Expunged" else: log["Data"] = json.loads(log.get("Data", "{}")) size = helper.format_bytes(log["Data"].get("Size", 0)) log["Data"]["Size"] = size if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) log["Timestamp"] = datetime.datetime.fromtimestamp( int(log.get("Timestamp", 0))).strftime("%I:%M:%S %p %d/%m/%Y") logs.append(log) message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def fetch_server_state(data): baseurl = common.create_baseurl(data, "/api/v1/serverstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: server_state = {} else: server_state = r.json() return server_state
def fetch_resource_list(data, resource): baseurl = common.create_baseurl(data, "/api/v1/" + resource) common.log_output("Fetching " + resource + " list from API...", False) cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: common.log_output("Error connecting", True, r.status_code) sys.exit(2) else: return r.json()
def get_stored_logs(data, page_size=5, show_all=False): baseurl = common.create_baseurl(data, "/api/v1/logdata/log") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) params = {'pagesize': page_size} r = requests.get(baseurl, headers=headers, cookies=cookies, params=params, verify=verify) common.check_response(data, r.status_code) if r.status_code == 500: message = "Error getting log, " message += "database may be locked by backup" common.log_output(message, True) return elif r.status_code != 200: common.log_output("Error getting log", True, r.status_code) return result = r.json()[-page_size:] logs = [] for log in result: if log.get("Message", None) is not None: log["Message"] = log["Message"].split("\n") message_length = len(log["Message"]) if message_length > 15 and not show_all: log["Message"] = log["Message"][:15] lines = str(message_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Message"].append(hidden_message) if log.get("Exception", None) is not None: log["Exception"] = log["Exception"].split("\n") exception_length = len(log["Exception"]) if exception_length > 15 and not show_all: log["Exception"] = log["Exception"][:15] lines = str(exception_length - 15) hidden_message = lines + " hidden lines (show with --all)" log["Exception"].append(hidden_message) logs.append(log) if len(logs) == 0: common.log_output("No log entries found", True) return message = yaml.safe_dump(logs, default_flow_style=False, allow_unicode=True) common.log_output(message, True)
def fetch_progress_state(data): baseurl = common.create_baseurl(data, "/api/v1/progressstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Check progress state and get info for the running backup r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: active_id = -1 progress_state = {} else: progress_state = r.json() active_id = progress_state.get("BackupID", -1) return progress_state, active_id
def fetch_progress_state(data): baseurl = common.create_baseurl(data, "/api/v1/progressstate") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Check progress state and get info for the running backup r = requests.get(baseurl, headers=headers, cookies=cookies, verify=verify) if r.status_code != 200: active_id = -1 progress_state = {} else: progress_state = r.json() active_id = progress_state.get("BackupID", -1) # Don't show progress on finished tasks phase = progress_state.get("Phase", "") if phase in ["Backup_Complete", "Error"]: return {}, 0 return progress_state, active_id
def fetch_backups(data, backup_ids, method): common.verify_token(data) common.log_output("Fetching backups from API...", False) progress_state, active_id = fetch_progress_state(data) progress = progress_state.get("OverallProgress", 1) backup_list = [] baseurl = common.create_baseurl(data, "/api/v1/backup/") cookies = common.create_cookies(data) headers = common.create_headers(data) verify = data.get("server", {}).get("verify", True) # Iterate over backup_ids and fetch their info for backup_id in backup_ids: r = requests.get(baseurl + str(backup_id), headers=headers, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code != 200: message = "Error getting backup " + str(backup_id) common.log_output(message, True, r.status_code) continue data = r.json()["data"] item_id = data.get("Backup", {}).get("ID", 0) if active_id is not None and item_id == active_id and progress != 1: data["Progress"] = progress_state else: data["Progress"] = None backup_list.append(data) if len(backup_list) == 0: sys.exit(2) # Only get uses a filter if method == "get": backup_list = backup_filter(backup_list) return backup_list
def login(data, input_url=None, password=None, verify=True, interactive=True, basic_user=None, basic_pass=None): if input_url is None: input_url = "" # Split protocol, url, and port input_url = input_url.replace("/", "").replace("_", "") count = input_url.count(":") protocol = "" url = "" port = "" if count == 2: protocol, url, port = input_url.split(":") elif count == 1 and input_url.index(":") < 6: protocol, url = input_url.split(":") elif count == 1: url, port = input_url.split(":") elif count == 0: url = input_url else: common.log_output("Invalid URL", True) sys.exit(2) # Strip nondigits port = ''.join(re.findall(r'\d+', port)) # Default to config file values for any missing parameters if protocol is None or protocol.lower() not in ["http", "https"]: protocol = data["server"]["protocol"] if url is None or url == "": url = data["server"]["url"] if port is None or port == "": port = data["server"]["port"] # Update config data["server"]["protocol"] = protocol data["server"]["url"] = url data["server"]["port"] = port # Make the login attempt baseurl = common.create_baseurl(data, "") common.log_output("Connecting to " + baseurl + "...", False) r = requests.get(baseurl, allow_redirects=True, verify=verify) common.check_response(data, r.status_code) # Detect if we were redirected to https if "https://" in r.url and protocol != "https": data["server"]["protocol"] = "https" common.log_output("Redirected from http to https", True) # Detect if we're prompted for basic authentication auth_method = r.headers.get('WWW-Authenticate', False) if (auth_method): common.log_output('Basic authentication required...', False) if basic_user is None and interactive: basic_user = input('Basic username: '******'You must provide a basic auth username, --basic-user' common.log_output(message, True) sys.exit(2) if basic_pass is None and interactive: basic_pass = getpass.getpass('Basic password:'******'ascii')) # Create the authorization string basic_auth = "Basic " + secret.decode('utf-8') headers = {"Authorization": basic_auth} r = requests.get(baseurl, verify=verify, headers=headers, allow_redirects=True) common.check_response(data, r.status_code) if r.status_code == 200: common.log_output('Passed basic auth', False) # Update basic auth secret in config file data['authorization'] = basic_auth # Detect if we were prompted to login login_redirect = "/login.html" in r.url if r.status_code == 200 and not login_redirect: common.log_output("OK", False, r.status_code) token = urllib.parse.unquote(r.cookies["xsrf-token"]) elif r.status_code == 200 and login_redirect: password = prompt_password(password, interactive) common.log_output("Getting nonce and salt...", False) baseurl = common.create_baseurl(data, "/login.cgi") headers = common.create_headers(data) payload = {'get-nonce': 1} r = requests.post(baseurl, headers=headers, data=payload, verify=verify) if r.status_code != 200: common.log_output("Error getting salt from server", True, r.status_code) sys.exit(2) salt = r.json()["Salt"] data["nonce"] = urllib.parse.unquote(r.json()["Nonce"]) token = urllib.parse.unquote(r.cookies["xsrf-token"]) common.log_output("Hashing password...", False) salt_password = password.encode() + base64.b64decode(salt) saltedpwd = hashlib.sha256(salt_password).digest() nonce_password = base64.b64decode(data["nonce"]) + saltedpwd noncedpwd = hashlib.sha256(nonce_password).digest() common.log_output("Authenticating... ", False) payload = { "password": base64.b64encode(noncedpwd).decode('utf-8') } cookies = { "xsrf-token": token, "session-nonce": data.get("nonce", "") } r = requests.post(baseurl, headers=headers, data=payload, cookies=cookies, verify=verify) common.check_response(data, r.status_code) if r.status_code == 200: common.log_output("Connected", False, r.status_code) data["session-auth"] = urllib.parse.unquote( r.cookies["session-auth"]) else: message = "Error authenticating against the server" common.log_output(message, True, r.status_code) sys.exit(2) else: message = "Error connecting to server" common.log_output(message, True, r.status_code) sys.exit(2) # Update the config file with provided values data["token"] = token expiration = datetime.datetime.now() + datetime.timedelta(0, 600) data["token_expires"] = expiration data["last_login"] = datetime.datetime.now() common.write_config(data) common.log_output("Login successful", True) return data