Example #1
0
    def _get_json(self, url, timeout, params, auth=None, verify=True):
        execution_time_exceeded_error_message = 'Transaction cancelled: maximum execution time exceeded'

        response = requests.get(url, timeout=timeout, params=params, auth=auth, verify=verify)
        if response.status_code != 200:
            raise CheckException('Got status: %d when hitting %s' % (response.status_code, response.url))

        try:
            response_json = json.loads(response.text.encode('utf-8'))
        except UnicodeEncodeError as e:
            raise CheckException('Encoding error: "%s" in response from url %s' % (e, response.url))
        except json_parse_exception as e:
            # Fix for ServiceNow bug: Sometimes there is a response with status 200 and malformed json with
            # error message 'Transaction cancelled: maximum execution time exceeded'.
            # We send right error message because ParserError is just side effect error.
            if execution_time_exceeded_error_message in response.text:
                error_msg = 'ServiceNow Error "%s" in response from url %s' % (
                    execution_time_exceeded_error_message, response.url
                )
            else:
                error_msg = 'Json parse error: "%s" in response from url %s' % (e, response.url)
            raise CheckException(error_msg)

        if response_json.get('error'):
            raise CheckException(
                'ServiceNow error: "%s" in response from url %s' % (response_json['error'].get('message'), response.url)
            )

        if response_json.get('result'):
            self.log.debug('Got %d results in response', len(response_json['result']))

        return response_json
Example #2
0
    def method_request(self, url, name, auth=None, params={}, request_id=1):
        payload = {
            "jsonrpc": "2.0",
            "method": "%s" % name,
            "id": request_id,
            "params": params
        }
        if auth:
            payload['auth'] = auth

        self.log.debug("Request to URL: %s" % url)
        self.log.debug("Request payload: %s" % payload)
        response = requests.get(url, json=payload, verify=self.ssl_verify)
        response.raise_for_status()
        self.log.debug("Request response: %s" % response.text)
        try:
            response_json = json.loads(response.text.encode('utf-8'))
            return response_json
        except UnicodeEncodeError as e:
            raise CheckException(
                'Encoding error: "%s" in response from url %s' %
                (e, response.url))
        except Exception as e:
            raise Exception('Error "%s" in response from url %s' %
                            (str(e), response.url))
Example #3
0
    def _batch_collect(self, collect_function, instance_info):
        """
        batch processing of components or relations fetched from CMDB
        :return: collected components
        """
        offset = 0
        batch_number = 0
        completed = False
        collection = []

        while not completed:
            elements = collect_function(instance_info, offset)
            if "result" in elements and isinstance(elements["result"], list):
                number_of_elements_in_current_batch = len(elements.get("result"))
            else:
                raise CheckException('Method %s has no result' % collect_function)
            completed = number_of_elements_in_current_batch < instance_info.batch_size
            collection.extend(elements['result'])
            batch_number += 1
            offset += instance_info.batch_size
            self.log.info(
                '%s processed batch no. %d with %d items.',
                collect_function.__name__, batch_number, number_of_elements_in_current_batch
            )

        return collection
Example #4
0
    def _search_chunk(self, saved_search, search_id, offset, count):
        """
        Retrieves the results of an already running splunk search, identified by the given search id.
        :param saved_search: current SavedSearch being processed
        :param search_id: perform a search operation on the search id
        :param offset: starting offset, begin is 0, to start retrieving from
        :param count: the maximum number of elements expecting to be returned by the API call
        :return: raw json response from splunk
        """
        search_path = '/servicesNS/-/-/search/jobs/%s/results?output_mode=json&offset=%s&count=%s' %\
                      (search_id, offset, count)
        response = self._do_get(search_path,
                                saved_search.request_timeout_seconds,
                                self.instance_config.verify_ssl_certificate)
        retry_count = 0

        # retry until information is available.
        while response.status_code == 204:  # HTTP No Content response
            self.log.debug(
                "Splunk has no result available yet for saved search {}. Going to retry".format(saved_search.name))
            if retry_count == saved_search.search_max_retry_count:
                raise CheckException(
                    "maximum retries reached for %s with saved search %s" %
                    (self.instance_config.base_url, saved_search.name))
            retry_count += 1
            time.sleep(saved_search.search_seconds_between_retries)
            response = self._do_get(search_path,
                                    saved_search.request_timeout_seconds,
                                    self.instance_config.verify_ssl_certificate)

        return response.json()
Example #5
0
    def __init__(self, instance, init_config):
        self.instance_config = InstanceConfig(instance, init_config)
        self.splunk_client = self._build_splunk_client()

        self.snapshot = bool(instance.get('snapshot', True))

        # no saved searches may be configured
        if not isinstance(instance['component_saved_searches'], list):
            instance['component_saved_searches'] = []
        if not isinstance(instance['relation_saved_searches'], list):
            instance['relation_saved_searches'] = []

        # transform component and relation saved searches to SavedSearch objects
        components = [
            SavedSearch("component", self.instance_config,
                        saved_search_instance)
            for saved_search_instance in instance['component_saved_searches']
        ]
        relations = [
            SavedSearch("relation", self.instance_config,
                        saved_search_instance)
            for saved_search_instance in instance['relation_saved_searches']
        ]

        self.saved_searches = SavedSearches(self.instance_config,
                                            self.splunk_client,
                                            components + relations)

        if 'polling_interval_seconds' in instance:
            raise CheckException(
                "deprecated config `polling_interval_seconds` found. Please use the new collection_interval."
            )
def take_required_field(field, obj):
    """
    Get a field form an object, remove its value and remove the field form the object
    """
    if field not in obj:
        raise CheckException("Missing '%s' field in result data" % field)
    value = obj[field]
    del obj[field]
    return value
Example #7
0
    def check(self, instance):
        if self.instance_data is None:
            self.instance_data = self._build_instance(instance)

        committable_state = CommittableState(self.commit_state,
                                             self.load_state(instance))

        instance = self.instance_data

        if instance.snapshot:
            self.start_snapshot()

        try:
            instance.splunk_client.auth_session(committable_state)

            def _service_check(status, tags=None, hostname=None, message=None):
                self.service_check(self.SERVICE_CHECK_NAME, status, tags,
                                   hostname, message)

            def _process_data(saved_search, response):
                if saved_search.element_type == "component":
                    return self._extract_components(instance, response)
                elif saved_search.element_type == "relation":
                    return self._extract_relations(instance, response)

            instance.saved_searches.run_saved_searches(_process_data,
                                                       _service_check,
                                                       self.log,
                                                       committable_state)

            if instance.snapshot:
                self.stop_snapshot()
        except TokenExpiredException as e:
            self.service_check(self.SERVICE_CHECK_NAME,
                               AgentCheck.CRITICAL,
                               tags=instance.instance_config.tags,
                               message=str(e.message))
            self.log.exception("Splunk topology exception: %s" % str(e))
        except Exception as e:
            self.service_check(self.SERVICE_CHECK_NAME,
                               AgentCheck.CRITICAL,
                               tags=instance.instance_config.tags,
                               message=str(e))
            self.log.exception("Splunk topology exception: %s" % str(e))
            if not instance.instance_config.ignore_saved_search_errors:
                raise CheckException(
                    "Splunk topology failed with message: %s" % e, None,
                    sys.exc_info()[2])
Example #8
0
    def saved_search_results(self, search_id, saved_search):
        """
        Perform a saved search, returns a list of responses that were received
        """
        # fetch results in batches
        offset = 0
        nr_of_results = None
        results = []
        while nr_of_results is None or nr_of_results == saved_search.batch_size:
            response = self._search_chunk(saved_search, search_id, offset, saved_search.batch_size)
            # received a message?
            for message in response['messages']:
                if message['type'] == "FATAL":
                    raise CheckException("Received FATAL exception from Splunk, got: " + message['text'])

            results.append(response)
            nr_of_results = len(response['results'])
            offset += nr_of_results
        return results
Example #9
0
    def __init__(self, instance, init_config):
        super(InstanceConfig, self).__init__(
            instance, init_config, {
                'default_request_timeout_seconds': 5,
                'default_search_max_retry_count': 3,
                'default_search_seconds_between_retries': 1,
                'default_verify_ssl_certificate': False,
                'default_batch_size': 1000,
                'default_saved_searches_parallel': 3,
                'default_app': "search",
                'default_parameters': {
                    "force_dispatch": True,
                    "dispatch.now": True
                }
            })

        if 'default_polling_interval_seconds' in init_config:
            raise CheckException(
                "deprecated config `init_config.default_polling_interval_seconds` found."
                " Please use the new collection_interval on the instance config."
            )
    def __init__(self, instance, init_config, defaults):
        self.log = logging.getLogger('%s' % __name__)

        self.defaults = defaults
        self.init_config = init_config

        self.default_request_timeout_seconds = self.get_or_default(
            'default_request_timeout_seconds')
        self.default_search_max_retry_count = self.get_or_default(
            'default_search_max_retry_count')
        self.default_search_seconds_between_retries = self.get_or_default(
            'default_search_seconds_between_retries')
        self.default_verify_ssl_certificate = self.get_or_default(
            'default_verify_ssl_certificate')
        self.default_batch_size = self.get_or_default('default_batch_size')
        self.default_saved_searches_parallel = self.get_or_default(
            'default_saved_searches_parallel')
        self.default_app = self.get_or_default('default_app')
        self.default_parameters = self.get_or_default('default_parameters')

        self.verify_ssl_certificate = bool(
            instance.get('verify_ssl_certificate',
                         self.default_verify_ssl_certificate))

        if 'url' not in instance:
            raise CheckException('Instance is missing "url" value.')
        self.base_url = instance['url']

        if 'authentication' in instance:
            authentication = instance["authentication"]
            if 'token_auth' in authentication:
                token_auth = authentication["token_auth"]
                if 'name' not in token_auth:
                    raise CheckException(
                        'Instance missing "authentication.token_auth.name" value'
                    )
                if 'initial_token' not in token_auth:
                    raise CheckException(
                        'Instance missing "authentication.token_auth.initial_token" '
                        'value')
                if 'audience' not in token_auth:
                    raise CheckException(
                        'Instance missing "authentication.token_auth.audience" value'
                    )
                self.auth_type = AuthType.TokenAuth
                self.audience = token_auth.get("audience")
                self.initial_token = token_auth.get("initial_token")
                self.name = token_auth.get("name")
                self.token_expiration_days = token_auth.get(
                    "token_expiration_days", 90)
                self.renewal_days = token_auth.get("renewal_days", 10)
            elif 'basic_auth' in authentication:
                basic_auth = authentication["basic_auth"]
                if 'username' not in basic_auth:
                    raise CheckException(
                        'Instance missing "authentication.basic_auth.username" value'
                    )
                if 'password' not in basic_auth:
                    raise CheckException(
                        'Instance missing "authentication.basic_auth.password" value'
                    )
                self.auth_type = AuthType.BasicAuth
                self.username = basic_auth.get("username")
                self.password = basic_auth.get("password")
            else:
                raise CheckException(
                    'Instance missing "authentication.basic_auth" or '
                    '"authentication.token_auth" value')
        else:
            if instance.get('username') is not None and instance.get(
                    'password') is not None:
                self.log.warning(
                    "This username and password configuration will be deprecated soon. Please use the new "
                    "updated configuration from the conf")
                self.username = instance.get('username')
                self.password = instance.get('password')
                self.auth_type = AuthType.BasicAuth
            else:
                raise CheckException("Instance missing 'authentication'.")

        self.ignore_saved_search_errors = instance.get(
            'ignore_saved_search_errors', False)
        self.saved_searches_parallel = int(
            instance.get('saved_searches_parallel',
                         self.default_saved_searches_parallel))
        self.tags = instance.get('tags', [])
 def saved_search_results(self, search_id, saved_search):
     if search_id == "exception":
         raise CheckException("maximum retries reached for saved search " +
                              str(search_id))
     # sid is set to saved search name
     return [load_fixture("%s.json" % search_id)]
    def _process_saved_search(self, process_data, service_check, log,
                              search_id, saved_search, start_time):
        count = 0
        fail_count = 0

        try:
            responses = self.splunk_client.saved_search_results(
                search_id, saved_search)

            for response in responses:
                for message in response['messages']:
                    if message['type'] != "FATAL" and message['type'] != "INFO":
                        log.info(
                            "Received unhandled message for saved search %s, got: %s"
                            % (saved_search.name, message))

                count += len(response["results"])
                fail_count += process_data(saved_search, response)

            log.debug(
                "Saved search done: %s in time %d with results %d of which %d failed"
                % (saved_search.name, time.time() - start_time, count,
                   fail_count))

            if fail_count != 0:
                if (fail_count != count) and (count != 0):
                    msg = "The saved search '%s' contained %d incomplete records" % (
                        saved_search.name, fail_count)
                    service_check(AgentCheck.WARNING,
                                  tags=self.instance_config.tags,
                                  message=msg)
                    log.warn(msg)
                    return False
                elif count != 0:
                    raise CheckException(
                        "All result of saved search '%s' contained incomplete data"
                        % saved_search.name)

        except CheckException as e:
            if not self.instance_config.ignore_saved_search_errors:
                log.error(
                    "Received Check exception while processing saved search " +
                    saved_search.name)
                raise e
            log.warning(
                "Check exception occured %s while processing saved search name %s"
                % (str(e), saved_search.name))
            service_check(AgentCheck.WARNING,
                          tags=self.instance_config.tags,
                          message=str(e))
            return False
        except Exception as e:
            if not self.instance_config.ignore_saved_search_errors:
                log.error(
                    "Received an exception while processing saved search " +
                    saved_search.name)
                raise e
            log.warning(
                "Got an error %s while processing saved search name %s" %
                (str(e), saved_search.name))
            service_check(AgentCheck.WARNING,
                          tags=self.instance_config.tags,
                          message=str(e))
            return False

        return True