def ssh_execute_commands(cls, ssh_clients: List[SshClient], ssh_type: SshTypes, command_list: List[SshCommand]) -> List[Tuple[str, List[Dict[str, Any]]]]: """ functions executes commands via ssh on several hosts. the hosts (other, vsnap, vadp) can be defined in the JSON configuation file commands which shall be executed on vsnap and / or vadp proxies in the dedicated ist of strings. 'otherCommands' is a list of commands which are executed on hosts which are not of type: vsnap | vadp. if any host are not reachable, they are skipped """ if(not command_list): LOGGER.debug("No commands specified, aborting command.") if(cls.verbose): LOGGER.info("No commands specified, aborting command.") return [] client_list = list(filter(lambda client: client.client_type is ssh_type, ssh_clients)) if(not client_list): LOGGER.debug(f"No {ssh_type.name} ssh client present. Aborting command") if(cls.verbose): LOGGER.info(f"No {ssh_type.name} ssh client present. Aborting command") return [] ssh_cmd_response_list = [] result_list = [] for client in client_list: if(cls.verbose): LOGGER.info(f">> executing {ssh_type.name} command(s) on host {client.host_name}") try: result_commands = client.execute_commands( commands=command_list, verbose=cls.verbose ) except ValueError as error: ExceptionUtils.exception_info(error=error, extra_message="Error when executing commands, skipping this client") continue for ssh_command in result_commands: insert_dict = {} insert_dict["host"] = ssh_command.host_name insert_dict["command"] = ssh_command.cmd insert_dict["output"] = json.dumps(ssh_command.result) insert_dict['ssh_type'] = ssh_type.name time_key, time_value = SppUtils.get_capture_timestamp_sec() insert_dict[time_key] = time_value ssh_cmd_response_list.append(insert_dict) try: table_result_tuple = ssh_command.parse_result(ssh_type=ssh_type) result_list.append(table_result_tuple) except ValueError as error: ExceptionUtils.exception_info(error=error, extra_message="Error when parsing result, skipping parsing of this result") result_list.append(("sshCmdResponse", ssh_cmd_response_list)) return result_list
def _parse_mpstat_cmd( ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `mpstat` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if (not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if (not ssh_type): raise ValueError("no sshtype given") pattern = re.compile( r"(.*)\s+\((.*)\)\s+(\d{2}\/\d{2}\/\d{4})\s+(\S*)\s+\((\d+)\sCPU\)" ) result_lines = ssh_command.result.splitlines() header = result_lines[2].split() # rename to make possible to identify header[0] = "time" header[1] = "am/pm" values: Dict[str, Any] = dict(zip(header, result_lines[3].split())) # drop, it is easier to use our own time values.pop('time') values.pop('am/pm') # set default needed fields values['hostName'] = ssh_command.host_name values['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() values[time_key] = time_value # zip between the exec information and the names for the matching group match = re.match(pattern, result_lines[0]) if (not match): raise ValueError( "the mpstat values are not in the expected pattern", result_lines, ssh_command, ssh_type) for (key, value) in zip( ["name", "host", "date", "system_type", "cpu_count"], match.groups()): values[key] = value # replace it with capture time values.pop('date') return (ssh_command.table_name, [values])
def get_vms_per_sla(self) -> List[Dict[str, Any]]: """retrieves and calculates all vmware per SLA.""" endpoint = "/ngp/slapolicy" white_list = ["name", "id"] array_name = "slapolicies" sla_policty_list = self.__rest_client.get_objects( endpoint=endpoint, white_list=white_list, array_name=array_name, add_time_stamp=False) result_list: List[Dict[str, Any]] = [] for sla_policty in sla_policty_list: try: sla_name: str = sla_policty["name"] except KeyError as error: ExceptionUtils.exception_info( error, extra_message="skipping one sla entry due missing name.") continue sla_id: Optional[str] = sla_policty.get("id", None) result_dict: Dict[str, Any] = {} ## hotadd: sla_name = urllib.parse.quote_plus(sla_name) endpoint = "/api/hypervisor/search" endpoint = ConnectionUtils.url_set_param(url=endpoint, param_name="resourceType", param_value="vm") endpoint = ConnectionUtils.url_set_param(url=endpoint, param_name="from", param_value="hlo") filter_str: str = '[{"property":"storageProfileName","value": "' + sla_name + '", "op":"="}]' endpoint = ConnectionUtils.url_set_param(url=endpoint, param_name="filter", param_value=filter_str) # note: currently only vmware is queried per sla, not hyperV # need to check if hypervisortype must be specified post_data = json.dumps({"name": "*", "hypervisorType": "vmware"}) response_json = self.__rest_client.post_data(endpoint=endpoint, post_data=post_data) result_dict["slaName"] = sla_name result_dict["slaId"] = sla_id result_dict["vmCountBySLA"] = response_json.get("total") time_key, time = SppUtils.get_capture_timestamp_sec() result_dict[time_key] = time result_list.append(result_dict) return result_list
def _parse_df_cmd(ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `df` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if (not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if (not ssh_type): raise ValueError("no sshtype given") if (not ssh_command.table_name): raise ValueError("need table name to insert parsed value") result_lines = ssh_command.result.splitlines() header = result_lines[0].split() # remove "on" header.pop() values: List[Dict[str, Any]] = list( map(lambda row: dict(zip(header, row.split())), result_lines[1:])) # type: ignore for row in values: if ("1G-blocks" in row): row["Size"] = row.pop("1G-blocks") row["Size"] = SppUtils.parse_unit(row['Size']) if ("Avail" in row): row["Available"] = row.pop("Avail") row["Available"] = SppUtils.parse_unit(row['Available']) row["Used"] = SppUtils.parse_unit(row['Used']) row["Use%"] = row["Use%"][:-1] # set default needed fields row['hostName'] = ssh_command.host_name row['ssh_type'] = ssh_type (time_key, time_value) = SppUtils.get_capture_timestamp_sec() row[time_key] = time_value return (ssh_command.table_name, values)
def __init__(self): self.log_path: str = "" """path to logger, set in set_logger.""" self.pid_file_path: str = "" """path to pid_file, set in check_pid_file.""" # String, cause of days etc self.job_log_retention_time = "60d" """Configured spp log rentation time, logs get deleted after this time.""" self.set_logger() if (not self.check_pid_file()): ExceptionUtils.error_message( "Another instance of sppmon with the same args is running") self.exit(ERROR_CODE_CMD_LINE) # everything is option, otherwise its a typo. if (len(ARGS) > 0): ExceptionUtils.error_message( f"CAREFUL: ARG DETECTED, probably typing in programm call: {ARGS}" ) time_stamp_name, time_stamp = SppUtils.get_capture_timestamp_sec() self.start_counter = time.perf_counter() LOGGER.debug("\n\n") LOGGER.debug(f"running script version: {VERSION}") LOGGER.debug(f"cmdline options: {OPTIONS}") LOGGER.debug(f"{time_stamp_name}: {time_stamp}") LOGGER.debug("") if (not OPTIONS.confFileJSON): ExceptionUtils.error_message("missing config file, aborting") self.exit(error_code=ERROR_CODE_CMD_LINE) try: config_file = SppUtils.read_conf_file( config_file_path=OPTIONS.confFileJSON) except ValueError as error: ExceptionUtils.exception_info( error=error, extra_message="Syntax Error in Config file, unable to read") self.exit(error_code=ERROR_CODE_CMD_LINE) self.setup_args() self.set_critial_configs(config_file) self.set_optional_configs(config_file)
def _parse_free_cmd( ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `free` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if (not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if (not ssh_type): raise ValueError("no sshtype given") if (not ssh_command.table_name): raise ValueError("need table name to insert parsed value") result_lines = ssh_command.result.splitlines() header = result_lines[0].split() header.insert(0, 'name') values: List[Dict[str, Any]] = list( map(lambda row: dict(zip(header, row.split())), result_lines[1:])) # type: ignore (time_key, _) = SppUtils.get_capture_timestamp_sec() for row in values: # remove ':' from name row['name'] = row['name'][:-1] # set default needed fields row['hostName'] = ssh_command.host_name row['ssh_type'] = ssh_type.name row[time_key] = SppUtils.get_actual_time_sec() # recalculate values to be more usefull if ('available' in row): row['free'] = int(row.pop('available')) row['used'] = int(row['total']) - int(row['free']) return (ssh_command.table_name, values)
def _parse_ps_cmd(self, ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `df` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if(not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if(not ssh_type): raise ValueError("no sshtype given") if(not ssh_command.table_name): raise ValueError("need table name to insert parsed value") result_lines = ssh_command.result.splitlines() header = result_lines[0].split() values: List[Dict[str, Any]] = list( map(lambda row: dict(zip(header, row.split())), result_lines[1:])) # type: ignore # remove `ps` from commands, it is also tracked values = list(filter(lambda row: row["COMMAND"] in self.__process_grep_list, values)) for row in values: # Remove CPU, it is tracked by TOP-Command (see Issue #71) row.pop("%CPU", None) # Add information row["collectionType"] = "PS" # set default needed fields row['hostName'] = ssh_command.host_name row['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() row[time_key] = time_value row['TIME+'] = row.pop('ELAPSED') row['MEM_ABS'] = SppUtils.parse_unit(row.pop("RSS"),"kib") row['VIRT'] = SppUtils.parse_unit(row.pop('VSZ'), "kib") return (ssh_command.table_name, values)
def _parse_system_stats_cmd( ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `vsnap --json system stats` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if (not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if (not ssh_type): raise ValueError("no sshtype given") if (not ssh_command.table_name): raise ValueError("need table name to insert parsed value") try: insert_dict: Dict[str, Any] = json.loads(ssh_command.result) except json.decoder.JSONDecodeError: # type: ignore raise ValueError("cant decode json for system stats command", ssh_command.result, ssh_command, ssh_type) if (not list(filter(lambda val: val is not None, insert_dict.values()))): raise ValueError( "Command and result given, but all values are None") # set default needed fields insert_dict['hostName'] = ssh_command.host_name insert_dict['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() insert_dict[time_key] = time_value return (ssh_command.table_name, [insert_dict])
def __init__(self): self.log_path: str = "" """path to logger, set in set_logger.""" self.pid_file_path: str = "" """path to pid_file, set in check_pid_file.""" self.set_logger() LOGGER.info("Starting SPPMon") if (not self.check_pid_file()): ExceptionUtils.error_message( "Another instance of sppmon with the same args is running") self.exit(ERROR_CODE_START_ERROR) time_stamp_name, time_stamp = SppUtils.get_capture_timestamp_sec() self.start_counter = time.perf_counter() LOGGER.debug("\n\n") LOGGER.debug(f"running script version: {VERSION}") LOGGER.debug(f"cmdline options: {ARGS}") LOGGER.debug(f"{time_stamp_name}: {time_stamp}") LOGGER.debug("") if (not ARGS.configFile): ExceptionUtils.error_message("missing config file, aborting") self.exit(error_code=ERROR_CODE_CMD_ARGS) try: self.config_file = SppUtils.read_conf_file( config_file_path=ARGS.configFile) except ValueError as error: ExceptionUtils.exception_info( error=error, extra_message= "Error when trying to read Config file, unable to read") self.exit(error_code=ERROR_CODE_START_ERROR) LOGGER.info("Setting up configurations") self.setup_args() self.set_critial_configs(self.config_file) self.set_optional_configs(self.config_file)
def store_script_metrics(self) -> None: """Stores script metrics into influxb. To be called before exit. Does not raise any exceptions, skips if influxdb is missing. """ LOGGER.info("Storing script metrics") try: if (not self.influx_client): raise ValueError("no influxClient set up") insert_dict: Dict[str, Union[str, int, float, bool]] = {} # add version nr, api calls are needed insert_dict["sppmon_version"] = VERSION insert_dict["influxdb_version"] = self.influx_client.version if (self.rest_client): try: (version_nr, build) = self.rest_client.get_spp_version_build() insert_dict["spp_version"] = version_nr insert_dict["spp_build"] = build except ValueError as error: ExceptionUtils.exception_info( error=error, extra_message="could not query SPP version and build.") # end total sppmon runtime end_counter = time.perf_counter() insert_dict['duration'] = int( (end_counter - self.start_counter) * 1000) # add arguments of sppmon for (key, value) in vars(ARGS).items(): # Value is either string, true or false/None if (value): insert_dict[key] = value # save occured errors error_count = len(ExceptionUtils.stored_errors) if (error_count > 0): ExceptionUtils.error_message( f"total of {error_count} exception/s occured") insert_dict['errorCount'] = error_count # save list as str if not empty if (ExceptionUtils.stored_errors): insert_dict['errorMessages'] = str( ExceptionUtils.stored_errors) # get end timestamp (time_key, time_val) = SppUtils.get_capture_timestamp_sec() insert_dict[time_key] = time_val # save the metrics self.influx_client.insert_dicts_to_buffer( table_name="sppmon_metrics", list_with_dicts=[insert_dict]) self.influx_client.flush_insert_buffer() LOGGER.info("Stored script metrics sucessfull") # + 1 due the "total of x exception/s occured" if (error_count + 1 < len(ExceptionUtils.stored_errors)): ExceptionUtils.error_message( "A non-critical error occured while storing script metrics. \n\ This error can't be saved into the DB, it's only displayed within the logs." ) except ValueError as error: ExceptionUtils.exception_info( error=error, extra_message= "Error when storing sppmon-metrics, skipping this step. Possible insert-buffer data loss" )
def get_objects(self, endpoint: str = None, uri: str = None, array_name: str = None, white_list: List[str] = None, ignore_list: List[str] = None, add_time_stamp: bool = False) -> List[Dict[str, Any]]: """Querys a response(-list) from a REST-API endpoint or URI. Specify `array_name` if there are multiple results / list. Use white_list to pick only the values specified. Use ignore_list to pick everything but the values specified. Both: white_list items overwrite ignore_list items, still getting all not filtered. Note: Do not specify both endpoint and uri, only uri will be used Keyword Arguments: endpoint {str} -- endpoint to be queried. Either use this or uri (default: {None}) uri {str} -- uri to be queried. Either use this or endpoint (default: {None}) array_name {str} -- name of array if there are multiple results wanted (default: {None}) white_list {list} -- list of item to query (default: {None}) ignore_list {list} -- query all but these items(-groups). (default: {None}) page_size {int} -- Size of page, recommendation is 100, depending on size of data (default: {100}) add_time_stamp {bool} -- whether to add the capture timestamp (default: {False}) Raises: ValueError: Neither a endpoint nor uri is specfied ValueError: Negative or 0 pagesize ValueError: array_name is specified but it is only a single object Returns: {List[Dict[str, Any]]} -- List of dictonarys as the results """ if(not endpoint and not uri): raise ValueError("neiter endpoint nor uri specified") if(endpoint and uri): LOGGER.debug("added both endpoint and uri. This is unneccessary, endpoint is ignored") # if neither specifed, get everything if(not white_list and not ignore_list): ignore_list = [] # create uri out of endpoint if(not uri): next_page = self.__srv_url + endpoint else: next_page = uri result_list: List[Dict[str, Any]] = [] # Aborts if no nextPage is found while(next_page): LOGGER.debug(f"Collected {len(result_list)} items until now. Next page: {next_page}") if(self.__verbose): LOGGER.info(f"Collected {len(result_list)} items until now. Next page: {next_page}") # Request response (response, send_time) = self.__query_url(url=next_page) # find follow page if available and set it (_, next_page_link) = SppUtils.get_nested_kv(key_name="links.nextPage.href", nested_dict=response) next_page = next_page_link # Check if single object or not if(array_name): # get results for this page, if empty nothing happens page_result_list: Optional[List[Dict[str, Any]]] = response.get(array_name, None) if(page_result_list is None): raise ValueError("array_name does not exist, this is probably a single object") else: page_result_list = [response] filtered_results = ConnectionUtils.filter_values_dict( result_list=page_result_list, white_list=white_list, ignore_list=ignore_list) if(add_time_stamp): # direct time add to make the timestamps represent the real capture time for mydict in filtered_results: time_key, time_val = SppUtils.get_capture_timestamp_sec() mydict[time_key] = time_val result_list.extend(filtered_results) # adjust pagesize if(send_time > self.__preferred_time or len(page_result_list) == self.__page_size): self.__page_size = ConnectionUtils.adjust_page_size( page_size=len(page_result_list), min_page_size=self.__min_page_size, preferred_time=self.__preferred_time, send_time=send_time) LOGGER.debug("objectList size %d", len(result_list)) return result_list
def _parse_top_cmd(ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `top` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if (not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if (not ssh_type): raise ValueError("no sshtype given") result_lines = ssh_command.result.splitlines() header = result_lines[6].split() values: List[Dict[str, Any]] = list( map(lambda row: dict(zip(header, row.split())), result_lines[7:])) # type: ignore ram_line = result_lines[3].split() total_mem = SppUtils.parse_unit(data=ram_line[3], given_unit="KiB") time_pattern = re.compile(r"(\d+):(\d{2})(?:\.(\d{2}))?") # remove top statistic itself to avoid spam with useless information values = list( filter( lambda row: row["COMMAND"] in ["mongod", "beam.smp", "java"], values)) for row in values: # set default needed fields row['hostName'] = ssh_command.host_name row['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() row[time_key] = time_value # split time into seconds match = re.match(time_pattern, row['TIME+']) if (match): time_list = match.groups() (hours, minutes, seconds) = time_list if (seconds is None): seconds = 0 time = int(hours) * pow(60, 2) + int(minutes) * pow( 60, 1) + int(seconds) * pow(60, 0) else: time = None row['TIME+'] = time row['MEM_ABS'] = int((float(row['%MEM']) * total_mem) / 100) row['SHR'] = SppUtils.parse_unit(row['SHR']) row['RES'] = SppUtils.parse_unit(row['RES']) row['VIRT'] = SppUtils.parse_unit(row['VIRT']) return (ssh_command.table_name, values)
def get_objects(self, endpoint: str = None, uri: str = None, params: Dict[str, Any] = None, post_data: Dict[str, Any] = None, request_type: RequestType = RequestType.GET, array_name: str = None, allow_list: List[str] = None, ignore_list: List[str] = None, add_time_stamp: bool = False) -> List[Dict[str, Any]]: """Querys a response(-list) from a REST-API endpoint or URI from multiple pages Specify `array_name` if there are multiple results / list. Use allow_list to pick only the values specified. Use ignore_list to pick everything but the values specified. Both: allow_list items overwrite ignore_list items, still getting all not filtered. Param pageSize is only guranteed to be valid for the first page if included within params. Note: Do not specify both endpoint and uri, only uri will be used Keyword Arguments: endpoint {str} -- endpoint to be queried. Either use this or uri (default: {None}) uri {str} -- uri to be queried. Either use this or endpoint (default: {None}) params {Dict[str, Any]} -- Dictionary with all URL-Parameters. pageSize only guranteed to be valid for first page (default: {None}) post_data {Dict[str, Any]} -- Dictionary with Body-Data. Only use on POST-Requests request_type: {RequestType} -- Either GET or POST array_name {str} -- name of array if there are multiple results wanted (default: {None}) allow_list {list} -- list of item to query (default: {None}) ignore_list {list} -- query all but these items(-groups). (default: {None}) add_time_stamp {bool} -- whether to add the capture timestamp (default: {False}) Raises: ValueError: Neither a endpoint nor uri is specfied ValueError: Negative or 0 pagesize ValueError: array_name is specified but it is only a single object Returns: {List[Dict[str, Any]]} -- List of dictonarys as the results """ if(not endpoint and not uri): raise ValueError("neiter endpoint nor uri specified") if(endpoint and uri): LOGGER.debug("added both endpoint and uri. This is unneccessary, uri is ignored") # if neither specifed, get everything if(not allow_list and not ignore_list): ignore_list = [] if(params is None): params = {} # create uri out of endpoint if(endpoint): next_page = self.get_url(endpoint) else: next_page = uri result_list: List[Dict[str, Any]] = [] # Aborts if no nextPage is found while(next_page): LOGGER.debug(f"Collected {len(result_list)} items until now. Next page: {next_page}") if(self.__verbose): LOGGER.info(f"Collected {len(result_list)} items until now. Next page: {next_page}") # Request response (response, send_time) = self.query_url(next_page, params, request_type, post_data) # find follow page if available and set it (_, next_page_link) = SppUtils.get_nested_kv(key_name="links.nextPage.href", nested_dict=response) next_page: Optional[str] = next_page_link if(next_page): # Overwrite params with params from next link params = ConnectionUtils.get_url_params(next_page) # remove params from page next_page = ConnectionUtils.url_set_params(next_page, None) # Check if single object or not if(array_name): # get results for this page, if empty nothing happens page_result_list: Optional[List[Dict[str, Any]]] = response.get(array_name, None) if(page_result_list is None): raise ValueError("array_name does not exist, this is probably a single object") else: page_result_list = [response] filtered_results = ConnectionUtils.filter_values_dict( result_list=page_result_list, allow_list=allow_list, ignore_list=ignore_list) if(add_time_stamp): # direct time add to make the timestamps represent the real capture time for mydict in filtered_results: time_key, time_val = SppUtils.get_capture_timestamp_sec() mydict[time_key] = time_val result_list.extend(filtered_results) # adjust pagesize if either the send time is too high # or regulary adjust on max-page sizes requests # dont adjust if page isnt full and therefore too quick if(send_time > self.__preferred_time or len(page_result_list) == self.__page_size): LOGGER.debug(f"send_time: {send_time}, len: {len(page_result_list)}, pageSize = {self.__page_size} ") self.__page_size = ConnectionUtils.adjust_page_size( page_size=len(page_result_list), min_page_size=self.__min_page_size, preferred_time=self.__preferred_time, send_time=send_time) LOGGER.debug(f"Changed pageSize from {len(page_result_list)} to {self.__page_size} ") params["pageSize"] = self.__page_size LOGGER.debug("objectList size %d", len(result_list)) return result_list
def _parse_pool_show_cmd(ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `vsnap --json pool show` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if(not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if(not ssh_type): raise ValueError("no sshtype given") if(not ssh_command.table_name): raise ValueError("need table name to insert parsed value") pool_result_list: List[Dict[str, Any]] = [] try: result: Dict[str, List[Dict[str, Any]]] = json.loads(ssh_command.result) except json.decoder.JSONDecodeError: # type: ignore raise ValueError("cant decode json for pool command", ssh_command.result, ssh_command, ssh_type) for pool in result['pools']: pool_dict: Dict[str, Any] = {} # acts as allow list insert_list = [ 'compression', 'compression_ratio', 'deduplication', 'deduplication_ratio', 'diskgroup_size', 'encryption.enabled', 'health', 'id', 'name', 'pool_type', 'size_before_compression', 'size_before_deduplication', 'size_free', 'size_total', 'size_used', 'status' ] for item in insert_list: (key, value) = SppUtils.get_nested_kv(item, pool) pool_dict[key] = value # rename pool_dict['encryption_enabled'] = pool_dict.pop('enabled') # change unit from bytes to megabytes try: sz_b_c = SppUtils.parse_unit(pool_dict['size_before_compression']) sz_b_d = SppUtils.parse_unit(pool_dict['size_before_deduplication']) sz_fr = SppUtils.parse_unit(pool_dict['size_free']) sz_t = SppUtils.parse_unit(pool_dict['size_total']) sz_u = SppUtils.parse_unit(pool_dict['size_used']) pool_dict['size_before_compression'] = int(sz_b_c / pow(2, 20)) if sz_b_c else None pool_dict['size_before_deduplication'] = int(sz_b_d / pow(2, 20)) if sz_b_d else None pool_dict['size_free'] = int(sz_fr / pow(2, 20)) if sz_fr else None pool_dict['size_total'] = int(sz_t / pow(2, 20)) if sz_t else None pool_dict['size_used'] = int(sz_u / pow(2, 20)) if sz_u else None except KeyError as error: ExceptionUtils.exception_info( error=error, extra_message=f"failed to reduce size of vsnap pool size for {pool_dict}") # set default needed fields pool_dict['hostName'] = ssh_command.host_name pool_dict['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() pool_dict[time_key] = time_value pool_result_list.append(pool_dict) return (ssh_command.table_name, pool_result_list)
def _parse_top_cmd(self, ssh_command: SshCommand, ssh_type: SshTypes) -> Tuple[str, List[Dict[str, Any]]]: """Parses the result of the `top` command, splitting it into its parts. Arguments: ssh_command {SshCommand} -- command with saved result ssh_type {SshTypes} -- type of the client Raises: ValueError: no command given or no result saved ValueError: no ssh type given Returns: Tuple[str, List[Dict[str, Any]]] -- Tuple of the tablename and a insert list """ if(not ssh_command or not ssh_command.result): raise ValueError("no command given or empty result") if(not ssh_type): raise ValueError("no sshtype given") if(not ssh_command.table_name): raise ValueError("need table name to insert parsed value") result_lines = ssh_command.result.splitlines() header = result_lines[6].split() values: List[Dict[str, Any]] = list( map(lambda row: dict(zip(header, row.split())), result_lines[7:])) # type: ignore # All lines above (header) 5 are pruned, not used anymore. This data is tracked via ps (see Issue #71) time_pattern = re.compile(r"(\d+):(\d{2})(?:\.(\d{2}))?") # remove `top` from commands, it is also tracked values = list(filter(lambda row: row["COMMAND"] in self.__process_grep_list, values)) for row in values: # Delete Memory, this is tracked by ps command (See Issue #71) row.pop("VIRT", None) row.pop("RES", None) row.pop("SHR", None) row.pop("%MEM", None) # Add information row["collectionType"] = "TOP" # unused information row.pop("PR", None) row.pop("NI", None) row.pop("S", None) # set default needed fields row['hostName'] = ssh_command.host_name row['ssh_type'] = ssh_type.name (time_key, time_value) = SppUtils.get_capture_timestamp_sec() row[time_key] = time_value # split time into seconds match = re.match(time_pattern, row['TIME+']) if(match): time_list = match.groups() (hours, minutes, seconds) = time_list if(seconds is None): seconds = 0 time = int(hours)*pow(60, 2) + int(minutes)*pow(60, 1) + int(seconds)*pow(60, 0) else: time = None row['TIME+'] = time return (ssh_command.table_name, values)
def get_vms_per_sla(self) -> List[Dict[str, Any]]: """retrieves and calculates all vmware per SLA.""" endpoint = "/ngp/slapolicy" allow_list = ["name", "id"] array_name = "slapolicies" sla_policty_list = self.__rest_client.get_objects( endpoint=endpoint, allow_list=allow_list, array_name=array_name, add_time_stamp=False ) result_list: List[Dict[str, Any]] = [] for sla_policty in sla_policty_list: try: sla_name: str = sla_policty["name"] except KeyError as error: ExceptionUtils.exception_info(error, extra_message="skipping one sla entry due missing name.") continue sla_id: Optional[str] = sla_policty.get("id", None) result_dict: Dict[str, Any] = {} ## hotadd: sla_name = urllib.parse.quote_plus(sla_name) endpoint = "/api/hypervisor/search" params = { "resourceType": "vm", "from": "hlo", "pageSize": 1, "filter": json.dumps([ { "property": "storageProfileName", "value": sla_name, "op": "=" } ]) } # other options: volume, vm, tag, tagcategory post_data = { "name": "*", "hypervisorType": "vmware", } (response_json, _) = self.__rest_client.query_url( self.__rest_client.get_url(endpoint), params, RequestType.POST, post_data) result_dict["slaName"] = sla_name result_dict["slaId"] = sla_id result_dict["vmCountBySLA"] = response_json.get("total", None) time_key, time = SppUtils.get_capture_timestamp_sec() result_dict[time_key] = time result_list.append(result_dict) return result_list