Exemple #1
0
    def get_vms_per_sla(self) -> List[Dict[str, Any]]:
        """retrieves and calculates all vmware per SLA."""

        endpoint = "/ngp/slapolicy"
        white_list = ["name", "id"]
        array_name = "slapolicies"

        sla_policty_list = self.__rest_client.get_objects(
            endpoint=endpoint,
            white_list=white_list,
            array_name=array_name,
            add_time_stamp=False)

        result_list: List[Dict[str, Any]] = []
        for sla_policty in sla_policty_list:
            try:
                sla_name: str = sla_policty["name"]
            except KeyError as error:
                ExceptionUtils.exception_info(
                    error,
                    extra_message="skipping one sla entry due missing name.")
                continue
            sla_id: Optional[str] = sla_policty.get("id", None)

            result_dict: Dict[str, Any] = {}

            ## hotadd:
            sla_name = urllib.parse.quote_plus(sla_name)

            endpoint = "/api/hypervisor/search"
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="resourceType",
                                                     param_value="vm")
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="from",
                                                     param_value="hlo")
            filter_str: str = '[{"property":"storageProfileName","value": "' + sla_name + '", "op":"="}]'
            endpoint = ConnectionUtils.url_set_param(url=endpoint,
                                                     param_name="filter",
                                                     param_value=filter_str)

            # note: currently only vmware is queried per sla, not hyperV
            # need to check if hypervisortype must be specified
            post_data = json.dumps({"name": "*", "hypervisorType": "vmware"})

            response_json = self.__rest_client.post_data(endpoint=endpoint,
                                                         post_data=post_data)

            result_dict["slaName"] = sla_name
            result_dict["slaId"] = sla_id
            result_dict["vmCountBySLA"] = response_json.get("total")

            time_key, time = SppUtils.get_capture_timestamp_sec()
            result_dict[time_key] = time

            result_list.append(result_dict)

        return result_list
Exemple #2
0
    def get_all_vms(self) -> List[Dict[str, Any]]:
        """retrieves a list of all vm's with their statistics."""
        endpoint = "/api/endeavour/catalog/hypervisor/vm"
        white_list = [
            "id",
            "properties.name",
            "properties.host",
            "catalogTime",
            "properties.vmVersion",
            "properties.configInfo.osName",
            "properties.hypervisorType",
            "properties.isProtected",
            "properties.inHLO",
            "isEncrypted",
            "properties.powerSummary.powerState",
            "properties.powerSummary.uptime",
            "properties.storageSummary.commited",
            "properties.storageSummary.uncommited",
            "properties.storageSummary.shared",
            "properties.datacenter.name",
            "properties.cpu",
            "properties.coresPerCpu",
            "properties.memory",
        ]
        array_name = "children"

        endpoint = ConnectionUtils.url_set_param(
            url=endpoint,
            param_name="embed",
            param_value="(children(properties))")
        return self.__rest_client.get_objects(endpoint=endpoint,
                                              array_name=array_name,
                                              white_list=white_list,
                                              add_time_stamp=False)
Exemple #3
0
    def get_job_log_details(self, job_logs_type: str,
                            jobsession_id: int) -> List[Dict[str, Any]]:
        """retrieves jobLogs for a certain jobsession.

        Arguments:
            job_logs_type {str} -- types of joblogs, given as comma seperated string-array: '["DEBUG"]'
            page_size {int} -- size of each response
            jobsession_id {int} -- only returns joblogs for this sessionID

        Raises:
            ValueError: No jobsessionid given
            ValueError: No joblogType specified

        Returns:
            List[Dict[str, Any]] -- List of joblogs for the sessionID of the given types.
        """
        if (not jobsession_id):
            raise ValueError("no jobsession_id given to query Logs by an Id")
        if (not job_logs_type):
            raise ValueError(
                "need to specify the jobLogType you want to query")
        # note: job id is the id of the job ( policy)
        # jobsessionid is the unique id of a execution of a job
        # note: jobLogs may be cleared by maintenance jobs after X days. The value can be specified in the SPP GUI

        LOGGER.debug("retrieving jobLogs for jobsessionId: %d", jobsession_id)
        endpoint = "/api/endeavour/log/job"
        white_list = [
            "jobsessionId", "logTime", "id", "messageId", "message",
            "messageParams", "type"
        ]
        array_name = "logs"

        api_filter = '[{"property":"jobsessionId","value":' + str(jobsession_id) + ',"op":"="},' \
                    '{"property":"type","value":'+ job_logs_type +',"op":"IN"}]'

        #update the filter parameter to list all types if message types, not only info..
        endpoint_to_logs = ConnectionUtils.url_set_param(
            url=endpoint, param_name="filter", param_value=api_filter)
        log_list = self.__rest_client.get_objects(endpoint=endpoint_to_logs,
                                                  white_list=white_list,
                                                  array_name=array_name)

        return log_list
Exemple #4
0
    def logout(self) -> None:
        """Logs out of the REST-API.

        Raises:
            ValueError: Error when logging out.
            ValueError: Wrong status code when logging out.
        """
        url = self.get_url("/api/endeavour/session")
        try:
            response_logout: Response = delete(url, headers=self.__headers, verify=False)
        except RequestException as error:
            ExceptionUtils.exception_info(error=error)
            raise ValueError("error when logging out")

        if response_logout.status_code != 204:
            raise ConnectionUtils.rest_response_error(response_logout, "Wrong Status code when logging out")

        if(self.__verbose):
            LOGGER.info("Rest-API logout successfull")
        LOGGER.debug("Rest-API logout successfull")
Exemple #5
0
    def __query_url(self, url: str) -> Tuple[Dict[str, Any], float]:
        """Sends a request to this endpoint. Repeats if timeout error occured.

        Adust the pagesize on timeout.

        Arguments:
            url {str} -- URL to be queried.

        Raises:
            ValueError: No URL specified
            ValueError: Error when requesting endpoint
            ValueError: Wrong status code
            ValueError: failed to parse result
            ValueError: Timeout when sending result

        Returns:
            Tuple[Dict[str, Any], float] -- Result of the request with the required send time
        """
        if(not url):
            raise ValueError("no url specified")

        LOGGER.debug(f"endpoint request {url}")

        failed_trys: int = 0
        response_query: Optional[Response] = None
        send_time: float = -1 # prevent unbound var

        while(response_query is None):

            # read pagesize
            actual_page_size = ConnectionUtils.url_get_param_value(url=url, param_name="pageSize")

             # Always set Pagesize to avoid different pagesizes by system
            if(not actual_page_size):
                url = ConnectionUtils.url_set_param(url=url, param_name="pageSize", param_value=self.__page_size)
            else:
                # read the pagesize
                try:
                    actual_page_size = int(actual_page_size[0])
                except (ValueError, KeyError) as error:
                    ExceptionUtils.exception_info(error, extra_message="invalid page size recorded")
                    actual_page_size = -1

            # adjust pagesize of url
            if(actual_page_size != self.__page_size):
                LOGGER.debug(f"setting new pageSize from {actual_page_size} to {self.__page_size}")
                url = ConnectionUtils.url_set_param(url=url, param_name="pageSize", param_value=self.__page_size)

            # send the query
            try:
                start_time = time.perf_counter()
                response_query = requests.get( # type: ignore
                    url=url, headers=self.__headers, verify=False,
                    timeout=(self.__initial_connection_timeout, self.__timeout))
                end_time = time.perf_counter()
                send_time = (end_time - start_time)

            except requests.exceptions.ReadTimeout as timeout_error:

                # timeout occured, increasing failed trys
                failed_trys += 1


                # #### Aborting cases ######
                if(self.__send_retries < failed_trys):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = ConnectionUtils.url_get_param_value(url=url, param_name="pageStartIndex")
                    # report timeout with full information
                    raise ValueError("timeout after repeating a maximum ammount of times.",
                                     timeout_error, failed_trys, self.__page_size, start_index)

                if(self.__page_size == self.__min_page_size):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = ConnectionUtils.url_get_param_value(url=url, param_name="pageStartIndex")
                    # report timeout with full information
                    raise ValueError("timeout after using minumum pagesize. repeating the request is of no use.",
                                     timeout_error, failed_trys, self.__page_size, start_index)

                # #### continuing cases ######
                if(self.__send_retries == failed_trys): # last try
                    LOGGER.debug(f"Timeout error when requesting, now last try of total {self.__send_retries}. Reducing pagesize to minimum for url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now last try of total {self.__send_retries}. Reducing pagesize to minimum for url: {url}")

                    self.__page_size = self.__min_page_size
                    # repeat with minimal possible size

                else: # (self.__send_retries > failed_trys): # more then 1 try left
                    LOGGER.debug(f"Timeout error when requesting, now on try {failed_trys} of {self.__send_retries}. Reducing pagesizefor url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now on try {failed_trys} of {self.__send_retries}. Reducing pagesize for url: {url}")
                    self.__page_size = ConnectionUtils.adjust_page_size(
                        page_size=self.__page_size,
                        min_page_size=self.__min_page_size,
                        time_out=True)
                    # repeat with reduced page size

            except requests.exceptions.RequestException as error:
                ExceptionUtils.exception_info(error=error)
                raise ValueError("error when requesting endpoint", error)

        if response_query.status_code != 200:
            raise ValueError("Wrong Status code when requesting endpoint data",
                             response_query.status_code, url, response_query)

        try:
            response_json: Dict[str, Any] = response_query.json()
        except (json.decoder.JSONDecodeError, ValueError) as error: # type: ignore
            raise ValueError("failed to parse query in restAPI post request", response_query) # type: ignore

        return (response_json, send_time)
Exemple #6
0
    def get_objects(self,
                    endpoint: str = None, uri: str = None,
                    array_name: str = None,
                    white_list: List[str] = None, ignore_list: List[str] = None,
                    add_time_stamp: bool = False) -> List[Dict[str, Any]]:
        """Querys a response(-list) from a REST-API endpoint or URI.

        Specify `array_name` if there are multiple results / list.
        Use white_list to pick only the values specified.
        Use ignore_list to pick everything but the values specified.
        Both: white_list items overwrite ignore_list items, still getting all not filtered.

        Note:
        Do not specify both endpoint and uri, only uri will be used

        Keyword Arguments:
            endpoint {str} -- endpoint to be queried. Either use this or uri (default: {None})
            uri {str} -- uri to be queried. Either use this or endpoint (default: {None})
            array_name {str} -- name of array if there are multiple results wanted (default: {None})
            white_list {list} -- list of item to query (default: {None})
            ignore_list {list} -- query all but these items(-groups). (default: {None})
            page_size {int} -- Size of page, recommendation is 100, depending on size of data (default: {100})
            add_time_stamp {bool} -- whether to add the capture timestamp  (default: {False})

        Raises:
            ValueError: Neither a endpoint nor uri is specfied
            ValueError: Negative or 0 pagesize
            ValueError: array_name is specified but it is only a single object

        Returns:
            {List[Dict[str, Any]]} -- List of dictonarys as the results
        """
        if(not endpoint and not uri):
            raise ValueError("neiter endpoint nor uri specified")
        if(endpoint and uri):
            LOGGER.debug("added both endpoint and uri. This is unneccessary, endpoint is ignored")
        # if neither specifed, get everything
        if(not white_list and not ignore_list):
            ignore_list = []

        # create uri out of endpoint
        if(not uri):
            next_page = self.__srv_url + endpoint
        else:
            next_page = uri

        result_list: List[Dict[str, Any]] = []

        # Aborts if no nextPage is found
        while(next_page):
            LOGGER.debug(f"Collected {len(result_list)} items until now. Next page: {next_page}")
            if(self.__verbose):
                LOGGER.info(f"Collected {len(result_list)} items until now. Next page: {next_page}")
            # Request response
            (response, send_time) = self.__query_url(url=next_page)

            # find follow page if available and set it
            (_, next_page_link) = SppUtils.get_nested_kv(key_name="links.nextPage.href", nested_dict=response)
            next_page = next_page_link

            # Check if single object or not
            if(array_name):
                # get results for this page, if empty nothing happens
                page_result_list: Optional[List[Dict[str, Any]]] = response.get(array_name, None)
                if(page_result_list is None):
                    raise ValueError("array_name does not exist, this is probably a single object")
            else:
                page_result_list = [response]

            filtered_results = ConnectionUtils.filter_values_dict(
                result_list=page_result_list,
                white_list=white_list,
                ignore_list=ignore_list)

            if(add_time_stamp): # direct time add to make the timestamps represent the real capture time
                for mydict in filtered_results:
                    time_key, time_val = SppUtils.get_capture_timestamp_sec()
                    mydict[time_key] = time_val
            result_list.extend(filtered_results)


            # adjust pagesize
            if(send_time > self.__preferred_time or len(page_result_list) == self.__page_size):
                self.__page_size = ConnectionUtils.adjust_page_size(
                    page_size=len(page_result_list),
                    min_page_size=self.__min_page_size,
                    preferred_time=self.__preferred_time,
                    send_time=send_time)

        LOGGER.debug("objectList size %d", len(result_list))
        return result_list
Exemple #7
0
    def query_url(
        self,
        url: str,
        params: Dict[str, Any] = None,
        request_type: RequestType = RequestType.GET,
        post_data: Dict[str, str] = None,
        auth: HTTPBasicAuth = None) -> Tuple[Dict[str, Any], float]:
        """Sends a request to this endpoint. Repeats if timeout error occured. Adust the pagesize on timeout.

        Arguments:
            url {str} -- URL to be queried. Must contain the server-uri and Endpoint. Does not allow encoded parameters
            post_data {str} -- additional data with filters/parameters. Only to be send with a POST-Request (default: {None})
            auth {HTTPBasicAuth} -- Basic auth to be used to login into SPP via POST-Request(default: {None})
            type {RequestType} -- What kind of Request should be made, defaults to GET

        Raises:
            ValueError: No URL specified
            ValueError: Error when requesting endpoint
            ValueError: Wrong status code
            ValueError: failed to parse result
            ValueError: Timeout when sending result
            ValueError: No post-data/auth is allowed in a GET-Request

        Returns:
            Tuple[Dict[str, Any], float] -- Result of the request with the required send time
        """
        if(not url):
            raise ValueError("no url specified")
        if((post_data or auth) and request_type == RequestType.GET):
            raise ValueError("No post-data/auth is allowed in a GET-Request")
        LOGGER.debug(f"query url: {url}, type: {type}, post_data: {post_data} auth: {True if auth else False}")
        if(not params):
            params = {}

        failed_tries: int = 0
        response_query: Optional[Response] = None
        send_time: float = -1 # prevent unbound var

        # avoid unset pageSize to not get into SPP defaults
        if("pageSize" not in params):
            LOGGER.debug(f"setting pageSize to {self.__page_size} from unset value")
            params["pageSize"] = self.__page_size
        elif(params["pageSize"] is None):
            params.pop("pageSize")

        while(response_query is None):

            # send the query
            try:
                if(request_type == RequestType.GET):
                    response_query = get(
                        url=url, headers=self.__headers, verify=False,
                        params=params,
                        timeout=(self.__initial_connection_timeout, self.__timeout))
                elif(request_type == RequestType.POST):
                    response_query = post(
                        url=url, headers=self.__headers, verify=False,
                        params=params, json=post_data, auth=auth,
                        timeout=(self.__initial_connection_timeout, self.__timeout))
                send_time = response_query.elapsed.total_seconds()

            except ReadTimeout as timeout_error:

                # timeout occured, increasing failed trys
                failed_tries += 1

                url_params = ConnectionUtils.get_url_params(url)


                # #### Aborting cases ######
                if(failed_tries > self.__max_send_retries):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = url_params.get("pageStartIndex", None)
                    page_size = url_params.get("pageSize", None)
                    # report timeout with full information
                    raise ValueError("timeout after repeating a maximum ammount of times.",
                                     timeout_error, failed_tries, page_size, start_index)

                if(self.__page_size == self.__min_page_size):
                    ExceptionUtils.exception_info(error=timeout_error)
                    # read start index for debugging
                    start_index = url_params.get("pageStartIndex", None)
                    page_size = url_params.get("pageSize", None)
                    # report timeout with full information
                    raise ValueError("timeout after using minumum pagesize. repeating the request is of no use.",
                                     timeout_error, failed_tries, page_size, start_index)

                # #### continuing cases ######
                if(failed_tries == self.__max_send_retries): # last try
                    LOGGER.debug(f"Timeout error when requesting, now last try of total {self.__max_send_retries}. Reducing pagesize to minimum for url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now last try of total {self.__max_send_retries}. Reducing pagesize to minimum for url: {url}")

                    # persist reduced size for further requests
                    self.__page_size = self.__min_page_size
                    # repeat with minimal possible size
                    LOGGER.debug(f"setting pageSize from {params.get('pageSize', None)} to {self.__page_size}")
                    params["pageSize"] = self.__page_size

                else: # (failed_tries < self.__max_send_retries): # more then 1 try left
                    LOGGER.debug(f"Timeout error when requesting, now on try {failed_tries} of {self.__max_send_retries}. Reducing pagesizefor url: {url}")
                    if(self.__verbose):
                        LOGGER.info(f"Timeout error when requesting, now on try {failed_tries} of {self.__max_send_retries}. Reducing pagesize for url: {url}")

                    # persist reduced size for further requests
                    self.__page_size = ConnectionUtils.adjust_page_size(
                        page_size=params["pageSize"],
                        min_page_size=self.__min_page_size,
                        timeout=True)
                    # repeat with reduced page size
                    LOGGER.debug(f"setting pageSize from {params.get('pageSize', None)} to {self.__page_size}")
                    params["pageSize"] = self.__page_size

            except RequestException as error:
                ExceptionUtils.exception_info(error=error)
                raise ValueError("error when requesting endpoint", error)

        if( not response_query.ok):
            raise ConnectionUtils.rest_response_error( response_query,
            "Wrong Status code when requesting endpoint data",
            url)

        try:
            response_json: Dict[str, Any] = response_query.json()
        except (json.decoder.JSONDecodeError, ValueError) as error:
            raise ValueError("failed to parse query in restAPI request", response_query)

        return (response_json, send_time)
Exemple #8
0
    def get_objects(self,
                    endpoint: str = None, uri: str = None,
                    params: Dict[str, Any] = None,
                    post_data: Dict[str, Any] = None,
                    request_type: RequestType = RequestType.GET,
                    array_name: str = None,
                    allow_list: List[str] = None, ignore_list: List[str] = None,
                    add_time_stamp: bool = False) -> List[Dict[str, Any]]:
        """Querys a response(-list) from a REST-API endpoint or URI from multiple pages

        Specify `array_name` if there are multiple results / list.
        Use allow_list to pick only the values specified.
        Use ignore_list to pick everything but the values specified.
        Both: allow_list items overwrite ignore_list items, still getting all not filtered.
        Param pageSize is only guranteed to be valid for the first page if included within params.

        Note:
        Do not specify both endpoint and uri, only uri will be used

        Keyword Arguments:
            endpoint {str} -- endpoint to be queried. Either use this or uri (default: {None})
            uri {str} -- uri to be queried. Either use this or endpoint (default: {None})
            params {Dict[str, Any]} -- Dictionary with all URL-Parameters. pageSize only guranteed to be valid for first page (default: {None})
            post_data {Dict[str, Any]} -- Dictionary with Body-Data. Only use on POST-Requests
            request_type: {RequestType} -- Either GET or POST
            array_name {str} -- name of array if there are multiple results wanted (default: {None})
            allow_list {list} -- list of item to query (default: {None})
            ignore_list {list} -- query all but these items(-groups). (default: {None})
            add_time_stamp {bool} -- whether to add the capture timestamp  (default: {False})

        Raises:
            ValueError: Neither a endpoint nor uri is specfied
            ValueError: Negative or 0 pagesize
            ValueError: array_name is specified but it is only a single object

        Returns:
            {List[Dict[str, Any]]} -- List of dictonarys as the results
        """
        if(not endpoint and not uri):
            raise ValueError("neiter endpoint nor uri specified")
        if(endpoint and uri):
            LOGGER.debug("added both endpoint and uri. This is unneccessary, uri is ignored")
        # if neither specifed, get everything
        if(not allow_list and not ignore_list):
            ignore_list = []
        if(params is None):
            params = {}

        # create uri out of endpoint
        if(endpoint):
            next_page =  self.get_url(endpoint)
        else:
            next_page = uri

        result_list: List[Dict[str, Any]] = []

        # Aborts if no nextPage is found
        while(next_page):
            LOGGER.debug(f"Collected {len(result_list)} items until now. Next page: {next_page}")
            if(self.__verbose):
                LOGGER.info(f"Collected {len(result_list)} items until now. Next page: {next_page}")

            # Request response
            (response, send_time) = self.query_url(next_page, params, request_type, post_data)

            # find follow page if available and set it
            (_, next_page_link) = SppUtils.get_nested_kv(key_name="links.nextPage.href", nested_dict=response)
            next_page: Optional[str] = next_page_link
            if(next_page):
                # Overwrite params with params from next link
                params = ConnectionUtils.get_url_params(next_page)
                # remove params from page
                next_page = ConnectionUtils.url_set_params(next_page, None)



            # Check if single object or not
            if(array_name):
                # get results for this page, if empty nothing happens
                page_result_list: Optional[List[Dict[str, Any]]] = response.get(array_name, None)
                if(page_result_list is None):
                    raise ValueError("array_name does not exist, this is probably a single object")
            else:
                page_result_list = [response]

            filtered_results = ConnectionUtils.filter_values_dict(
                result_list=page_result_list,
                allow_list=allow_list,
                ignore_list=ignore_list)

            if(add_time_stamp): # direct time add to make the timestamps represent the real capture time
                for mydict in filtered_results:
                    time_key, time_val = SppUtils.get_capture_timestamp_sec()
                    mydict[time_key] = time_val
            result_list.extend(filtered_results)

            # adjust pagesize if either the send time is too high
            # or regulary adjust on max-page sizes requests
            # dont adjust if page isnt full and therefore too quick
            if(send_time > self.__preferred_time or len(page_result_list) == self.__page_size):
                LOGGER.debug(f"send_time: {send_time}, len: {len(page_result_list)}, pageSize = {self.__page_size} ")
                self.__page_size = ConnectionUtils.adjust_page_size(
                    page_size=len(page_result_list),
                    min_page_size=self.__min_page_size,
                    preferred_time=self.__preferred_time,
                    send_time=send_time)
                LOGGER.debug(f"Changed pageSize from {len(page_result_list)} to {self.__page_size} ")
                params["pageSize"] = self.__page_size





        LOGGER.debug("objectList size %d", len(result_list))
        return result_list