示例#1
0
    def get_vms_per_sla(self) -> List[Dict[str, Any]]:
        """retrieves and calculates all vmware per SLA."""

        endpoint = "/ngp/slapolicy"
        white_list = ["name", "id"]
        array_name = "slapolicies"

        sla_policty_list = self.__rest_client.get_objects(
            endpoint=endpoint,
            white_list=white_list,
            array_name=array_name,
            add_time_stamp=False)

        result_list: List[Dict[str, Any]] = []
        for sla_policty in sla_policty_list:
            try:
                sla_name: str = sla_policty["name"]
            except KeyError as error:
                ExceptionUtils.exception_info(
                    error,
                    extra_message="skipping one sla entry due missing name.")
                continue
            sla_id: Optional[str] = sla_policty.get("id", None)

            result_dict: Dict[str, Any] = {}

            ## hotadd:
            sla_name = urllib.parse.quote_plus(sla_name)

            endpoint = "/api/hypervisor/search"
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="resourceType",
                                                     param_value="vm")
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="from",
                                                     param_value="hlo")
            filter_str: str = '[{"property":"storageProfileName","value": "' + sla_name + '", "op":"="}]'
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="filter",
                                                     param_value=filter_str)

            # note: currently only vmware is queried per sla, not hyperV
            # need to check if hypervisortype must be specified
            post_data = json.dumps({"name": "*", "hypervisorType": "vmware"})

            response_json = self.__rest_client.post_data(endpoint=endpoint,
                                                         post_data=post_data)

            result_dict["slaName"] = sla_name
            result_dict["slaId"] = sla_id
            result_dict["vmCountBySLA"] = response_json.get("total")

            time_key, time = SppUtils.get_capture_timestamp_sec()
            result_dict[time_key] = time

            result_list.append(result_dict)

        return result_list
示例#2
0
    def get_all_vms(self) -> List[Dict[str, Any]]:
        """retrieves a list of all vm's with their statistics."""
        endpoint = "/api/endeavour/catalog/hypervisor/vm"
        white_list = [
            "id",
            "properties.name",
            "properties.host",
            "catalogTime",
            "properties.vmVersion",
            "properties.configInfo.osName",
            "properties.hypervisorType",
            "properties.isProtected",
            "properties.inHLO",
            "isEncrypted",
            "properties.powerSummary.powerState",
            "properties.powerSummary.uptime",
            "properties.storageSummary.commited",
            "properties.storageSummary.uncommited",
            "properties.storageSummary.shared",
            "properties.datacenter.name",
            "properties.cpu",
            "properties.coresPerCpu",
            "properties.memory",
        ]
        array_name = "children"

        endpoint = ConnectionUtils.url_set_param(
            url=endpoint,
            param_name="embed",
            param_value="(children(properties))")
        return self.__rest_client.get_objects(endpoint=endpoint,
                                              array_name=array_name,
                                              white_list=white_list,
                                              add_time_stamp=False)
示例#3
0
    def get_job_log_details(self, job_logs_type: str,
                            jobsession_id: int) -> List[Dict[str, Any]]:
        """retrieves jobLogs for a certain jobsession.

        Arguments:
            job_logs_type {str} -- types of joblogs, given as comma seperated string-array: '["DEBUG"]'
            page_size {int} -- size of each response
            jobsession_id {int} -- only returns joblogs for this sessionID

        Raises:
            ValueError: No jobsessionid given
            ValueError: No joblogType specified

        Returns:
            List[Dict[str, Any]] -- List of joblogs for the sessionID of the given types.
        """
        if (not jobsession_id):
            raise ValueError("no jobsession_id given to query Logs by an Id")
        if (not job_logs_type):
            raise ValueError(
                "need to specify the jobLogType you want to query")
        # note: job id is the id of the job ( policy)
        # jobsessionid is the unique id of a execution of a job
        # note: jobLogs may be cleared by maintenance jobs after X days. The value can be specified in the SPP GUI

        LOGGER.debug("retrieving jobLogs for jobsessionId: %d", jobsession_id)
        endpoint = "/api/endeavour/log/job"
        white_list = [
            "jobsessionId", "logTime", "id", "messageId", "message",
            "messageParams", "type"
        ]
        array_name = "logs"

        api_filter = '[{"property":"jobsessionId","value":' + str(jobsession_id) + ',"op":"="},' \
                    '{"property":"type","value":'+ job_logs_type +',"op":"IN"}]'

        #update the filter parameter to list all types if message types, not only info..
        endpoint_to_logs = ConnectionUtils.url_set_param(
            url=endpoint, param_name="filter", param_value=api_filter)
        log_list = self.__rest_client.get_objects(endpoint=endpoint_to_logs,
                                                  white_list=white_list,
                                                  array_name=array_name)

        return log_list
示例#4
0
    def __query_url(self, url: str) -> Tuple[Dict[str, Any], float]:
        """Sends a request to this endpoint. Repeats if timeout error occured.

        Adust the pagesize on timeout.

        Arguments:
            url {str} -- URL to be queried.

        Raises:
            ValueError: No URL specified
            ValueError: Error when requesting endpoint
            ValueError: Wrong status code
            ValueError: failed to parse result
            ValueError: Timeout when sending result

        Returns:
            Tuple[Dict[str, Any], float] -- Result of the request with the required send time
        """
        if(not url):
            raise ValueError("no url specified")

        LOGGER.debug(f"endpoint request {url}")

        failed_trys: int = 0
        response_query: Optional[Response] = None
        send_time: float = -1 # prevent unbound var

        while(response_query is None):

            # read pagesize
            actual_page_size = ConnectionUtils.url_get_param_value(url=url, param_name="pageSize")

             # Always set Pagesize to avoid different pagesizes by system
            if(not actual_page_size):
                url = ConnectionUtils.url_set_param(url=url, param_name="pageSize", param_value=self.__page_size)
            else:
                # read the pagesize
                try:
                    actual_page_size = int(actual_page_size[0])
                except (ValueError, KeyError) as error:
                    ExceptionUtils.exception_info(error, extra_message="invalid page size recorded")
                    actual_page_size = -1

            # adjust pagesize of url
            if(actual_page_size != self.__page_size):
                LOGGER.debug(f"setting new pageSize from {actual_page_size} to {self.__page_size}")
                url = ConnectionUtils.url_set_param(url=url, param_name="pageSize", param_value=self.__page_size)

            # send the query
            try:
                start_time = time.perf_counter()
                response_query = requests.get( # type: ignore
                    url=url, headers=self.__headers, verify=False,
                    timeout=(self.__initial_connection_timeout, self.__timeout))
                end_time = time.perf_counter()
                send_time = (end_time - start_time)

            except requests.exceptions.ReadTimeout as timeout_error:

                # timeout occured, increasing failed trys
                failed_trys += 1


                # #### Aborting cases ######
                if(self.__send_retries < failed_trys):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = ConnectionUtils.url_get_param_value(url=url, param_name="pageStartIndex")
                    # report timeout with full information
                    raise ValueError("timeout after repeating a maximum ammount of times.",
                                     timeout_error, failed_trys, self.__page_size, start_index)

                if(self.__page_size == self.__min_page_size):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = ConnectionUtils.url_get_param_value(url=url, param_name="pageStartIndex")
                    # report timeout with full information
                    raise ValueError("timeout after using minumum pagesize. repeating the request is of no use.",
                                     timeout_error, failed_trys, self.__page_size, start_index)

                # #### continuing cases ######
                if(self.__send_retries == failed_trys): # last try
                    LOGGER.debug(f"Timeout error when requesting, now last try of total {self.__send_retries}. Reducing pagesize to minimum for url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now last try of total {self.__send_retries}. Reducing pagesize to minimum for url: {url}")

                    self.__page_size = self.__min_page_size
                    # repeat with minimal possible size

                else: # (self.__send_retries > failed_trys): # more then 1 try left
                    LOGGER.debug(f"Timeout error when requesting, now on try {failed_trys} of {self.__send_retries}. Reducing pagesizefor url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now on try {failed_trys} of {self.__send_retries}. Reducing pagesize for url: {url}")
                    self.__page_size = ConnectionUtils.adjust_page_size(
                        page_size=self.__page_size,
                        min_page_size=self.__min_page_size,
                        time_out=True)
                    # repeat with reduced page size

            except requests.exceptions.RequestException as error:
                ExceptionUtils.exception_info(error=error)
                raise ValueError("error when requesting endpoint", error)

        if response_query.status_code != 200:
            raise ValueError("Wrong Status code when requesting endpoint data",
                             response_query.status_code, url, response_query)

        try:
            response_json: Dict[str, Any] = response_query.json()
        except (json.decoder.JSONDecodeError, ValueError) as error: # type: ignore
            raise ValueError("failed to parse query in restAPI post request", response_query) # type: ignore

        return (response_json, send_time)