コード例 #1
0
 def get_api_client(self):
     """Get api client."""
     api_key = get_api_key(self.session_key, self.logger)
     if not api_key:
         self._handle_alert_exit(1)
     return GreyNoise(api_key=api_key,
                      timeout=120,
                      integration_name=INTEGRATION_NAME)
コード例 #2
0
    def generate(self):
        
        try:
            # Setup logger
            logger = utility.setup_logger(session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

            try:
                message = ''
                api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
            except APIKeyNotFoundError as e:
                message = str(e)
            except HTTPError as e:
                message = str(e)
            
            if message:
                logger.error("Error occured while retrieving API key, Error: {}".format(message))
                self.write_error(message)
                exit(1)

            # This will call the do_generate method of the respective class from which this class was called
            # And generate the events
            for event in self.do_generate(api_key, logger):
                yield event

        except RateLimitError:
            logger.error("Rate limit error occured while executing the custom command.")
            self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
        except RequestFailure as e:

            response_code, response_message = e.args
            if response_code == 401:
                msg = "Unauthorized. Please check your API key."
            else:
                # Need to handle this, as splunklib is unable to handle the exception with (400, {'error': 'error_reason'}) format
                msg = "The API call to the GreyNoise platform has failed with status_code: " + str(response_code) + " and error: "
                # Error Message may be in 'error' or 'message' key in dict with addition of RIOT endpoint
                if isinstance(response_message, dict):
                    if 'error' in response_message.keys():
                        msg += response_message['error']
                    elif 'message' in response_message.keys():
                        msg += response_message['message']
                else:
                    msg += response_message

            logger.error("{}".format(str(msg)))
            self.write_error(msg)
        except ConnectionError:
            logger.error("Error while connecting to the Server. Please check your connection and try again.")
            self.write_error("Error while connecting to the Server. Please check your connection and try again.")
        except RequestException:
            logger.error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
            self.write_error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
        except Exception as e:
            logger.error("Exception occured while executing the custom command, Exception: {} ".format(str(traceback.format_exc())))
            self.write_error("Exception occured while executing the {custom_command} custom command. See greynoise_main.log for more details.".format(custom_command=str(self._metadata.searchinfo.command)))
コード例 #3
0
    def generate(self):
        """Method that yields records to the Splunk processing pipeline."""
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            try:
                api_key = utility.get_api_key(
                    self._metadata.searchinfo.session_key, logger=logger)

                # Completing the search if the API key is not available.
                if not api_key:
                    logger.error(
                        "API key not found. Please configure the GreyNoise App for Splunk."
                    )
                    exit(1)

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=240,
                                       integration_name=INTEGRATION_NAME)

                queries = {
                    "malicious": "classification:malicious last_seen:today",
                    "benign": "classification:benign last_seen:today",
                    "unknown": "classification:unknown last_seen:today"
                }

                for key, value in queries.items():
                    logger.debug(
                        "Fetching records for classification: {}".format(key))
                    stats_data = api_client.stats(value, None)
                    if stats_data.get("stats"):
                        self.handle_stats(stats_data.get("stats"), key)
                    else:
                        logger.error(
                            "Returning no results because of unexpected response in one of the query."
                        )
                        exit(1)

                for result in self.RESULTS:
                    yield result
                logger.info("Events returned successfully to Splunk.")

            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                exit(1)
コード例 #4
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_addresses = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 5000
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if ip_addresses and ip_field:
            logger.error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command."
            )
            self.write_error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command"
            )
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(
                self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error(
                "Error occured while retrieving API key, Error: {}".format(
                    message))
            exit(1)

        if ip_addresses and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Splitting the ip_addresses by commas and stripping spaces from both the sides for each IP address
            ip_addresses = [ip.strip() for ip in ip_addresses.split(',')]

            logger.info("Started retrieving results")
            try:
                logger.debug(
                    "Initiating to fetch noise and RIOT status for IP address(es): {}"
                    .format(str(ip_addresses)))

                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       integration_name=INTEGRATION_NAME)

                # CACHING START
                cache_enabled, cache_client = utility.get_caching(
                    self._metadata.searchinfo.session_key, 'multi', logger)
                if int(cache_enabled) == 1 and cache_client is not None:
                    cache_start = time.time()
                    ips_not_in_cache, ips_in_cache = utility.get_ips_not_in_cache(
                        cache_client, ip_addresses, logger)
                    try:
                        response = []
                        if len(ips_in_cache) >= 1:
                            response = cache_client.query_kv_store(
                                ips_in_cache)
                        if response is None:
                            logger.debug(
                                "KVStore is not ready. Skipping caching mechanism."
                            )
                            noise_status = api_client.quick(ip_addresses)
                        elif response == []:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client, ip_addresses,
                                logger)
                        else:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client,
                                ips_not_in_cache, logger)
                            noise_status.extend(response)
                    except Exception:
                        logger.debug(
                            "An exception occurred while fetching response from cache.\n{}"
                            .format(traceback.format_exc()))
                    logger.debug(
                        "Generating command with caching took {} seconds.".
                        format(time.time() - cache_start))
                else:
                    # Opting timout 120 seconds for the requests
                    noise_status = api_client.quick(ip_addresses)
                logger.info("Retrieved results successfully")
                # CACHING END

                # Process the API response and send the noise and RIOT status information of IP with extractions
                # to the Splunk, Using this flag to handle the field extraction issue in custom commands
                # Only the fields extracted from the first event of generated by custom command
                # will be extracted from all events
                first_record_flag = True

                # Flag to indicate whether erroneous IPs are present
                erroneous_ip_present = False
                for ip in ip_addresses:
                    for sample in noise_status:
                        if ip == sample['ip']:
                            yield event_generator.make_valid_event(
                                'quick', sample, first_record_flag)
                            if first_record_flag:
                                first_record_flag = False
                            logger.debug(
                                "Fetched noise and RIOT status for ip={} from GreyNoise API"
                                .format(str(ip)))
                            break
                    else:
                        erroneous_ip_present = True
                        try:
                            validate_ip(ip, strict=True)
                        except ValueError as e:
                            error_msg = str(e).split(":")
                            logger.debug(
                                "Generating noise and RIOT status for ip={} manually"
                                .format(str(ip)))
                            event = {'ip': ip, 'error': error_msg[0]}
                            yield event_generator.make_invalid_event(
                                'quick', event, first_record_flag)

                            if first_record_flag:
                                first_record_flag = False

                if erroneous_ip_present:
                    logger.warn(
                        "Value of one or more IP address(es) is either invalid or non-routable"
                    )
                    self.write_warning(
                        "Value of one or more IP address(es) passed to {command_name} "
                        "is either invalid or non-routable".format(
                            command_name=str(
                                self._metadata.searchinfo.command)))

            except RateLimitError:
                logger.error(
                    "Rate limit error occured while fetching the context information for ips={}"
                    .format(str(ip_addresses)))
                self.write_error(
                    "The Rate Limit has been exceeded. Please contact the Administrator"
                )
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = (
                        "The API call to the GreyNoise platform have been failed "
                        "with status_code: {} and error: {}").format(
                            response_code,
                            response_message['error'] if isinstance(
                                response_message, dict) else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
                self.write_error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                self.write_error(
                    "Exception occured while fetching the noise and RIOT status of the IP address(es). "
                    "See greynoise_main.log for more details.")

        elif ip_field:
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:

                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(
                            option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(
                            api_key, logger)
                        logger.debug(
                            "API validation status: {}, message: {}".format(
                                api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    chunk_dict = event_generator.batch(records, ip_field,
                                                       EVENTS_PER_CHUNK,
                                                       logger)

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.info(
                            "Less then 1000 distinct IPs are present, "
                            "optimizing the IP requests call to GreyNoise API..."
                        )
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(api_key=api_key,
                                           timeout=120,
                                           use_cache=USE_CACHE,
                                           integration_name=INTEGRATION_NAME)
                    # When no records found, batch will return {0:([],[])}
                    tot_time_start = time.time()
                    if len(list(chunk_dict.values())[0][0]) >= 1:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key,
                                api_client,
                                'multi',
                                ip_field,
                                chunk_dict,
                                logger,
                                threads=THREADS):
                            yield event
                    else:
                        logger.info(
                            "No events found, please increase the search timespan to have more search results."
                        )
                    tot_time_end = time.time()
                    logger.debug(
                        "Total execution time => {}".format(tot_time_end -
                                                            tot_time_start))
                except Exception:
                    logger.info(
                        "Exception occured while adding the noise and RIOT status to the events, Error: {}"
                        .format(traceback.format_exc()))
                    self.write_error(
                        "Exception occured while adding the noise and RIOT status of "
                        "the IP addresses to events. See greynoise_main.log for more details."
                    )

        else:
            logger.error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
            self.write_error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
コード例 #5
0
ファイル: gnfilter.py プロジェクト: mtonsmann/SA-GreyNoise
    def transform(self, records):

        method = 'filter'

        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1000
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field
            noise_events = self.noise_events

            logger.info(
                "Started filtering the IP address(es) present in field: {}, with noise_status: {}"
                .format(str(ip_field), str(noise_events)))

            try:
                if ip_field:
                    ip_field = ip_field.strip()
                if noise_events:
                    noise_events = noise_events.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                    noise_events = validator.Boolean(
                        option_name='noise_events').validate(noise_events)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                api_key_validation, message = utility.validate_api_key(
                    api_key, logger)
                logger.debug("API validation status: {}, message: {}".format(
                    api_key_validation, str(message)))
                if not api_key_validation:
                    logger.info(message)
                    self.write_error(message)
                    exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records, ip_field,
                                                   EVENTS_PER_CHUNK, logger)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.info(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name="Splunk")

                # When no records found, batch will return {0:([],[])}
                if len(list(chunk_dict.values())[0][0]) >= 1:
                    for chunk_index, result in event_generator.get_all_events(
                            api_client,
                            method,
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        # Pass the collected data to the event filter method
                        for event in event_filter(chunk_index, result,
                                                  chunk_dict[chunk_index],
                                                  ip_field, noise_events,
                                                  method):
                            yield event

                        # Deleting the chunk with the events that are already indexed
                        del chunk_dict[chunk_index]

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception as e:
                logger.info(
                    "Exception occured while filtering events, Error: {}".
                    format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while filtering the events based on noise status. See greynoise_main.log for more details."
                )
コード例 #6
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_address = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 1
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

        if ip_address and ip_field:
            logger.error("Please use parameter ip to work gnriot as generating command or "
                         "use parameter ip_field to work gnriot as transforming command.")
            self.write_error("Please use parameter ip to work gnriot as generating command or "
                             "use parameter ip_field to work gnriot as transforming command")
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error("Error occured while retrieving API key, Error: {}".format(message))
            exit(1)

        if ip_address and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Strip the spaces from the parameter value if given
            ip_address = ip_address.strip()

            logger.info("Started retrieving results")
            try:
                logger.debug("Initiating to fetch RIOT information for IP address: {}".format(str(ip_address)))
                api_client = GreyNoise(api_key=api_key, timeout=120, integration_name=INTEGRATION_NAME)
                # Opting timout 120 seconds for the requests
                session_key = self._metadata.searchinfo.session_key
                riot_information = utility.get_response_for_generating(
                    session_key, api_client, ip_address, 'greynoise_riot', logger)
                logger.info("Retrieved results successfully")

                # Process the API response and send the riot information of IP with extractions to the Splunk
                yield event_generator.make_valid_event('riot', riot_information, True)
                logger.debug("Fetched RIOT information for ip={} from GreyNoise API".format(str(ip_address)))

            except ValueError as e:
                error_msg = str(e).split(":")
                logger.debug("Generating RIOT information for ip={} manually".format(str(ip_address)))
                event = {
                    'ip': ip_address,
                    'error': error_msg[0]
                }
                yield event_generator.make_invalid_event('riot', event, True)
                logger.warn(error_msg)
                self.write_warning(
                    "Value of IP address passed to {command_name} is either invalid or non-routable".format(
                        command_name=str(self._metadata.searchinfo.command)))
            except RateLimitError:
                logger.error("Rate limit error occured while fetching the context information for ip={}".format(
                    str(ip_address)))
                self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = ("The API call to the GreyNoise platform have been failed "
                           "with status_code: {} and error: {}").format(
                        response_code, response_message['error'] if isinstance(response_message, dict)
                        else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error("Error while connecting to the Server. Please check your connection and try again.")
                self.write_error("Error while connecting to the Server. Please check your connection and try again.")
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
            except Exception:
                logger.error("Exception: {} ".format(str(traceback.format_exc())))
                self.write_error("Exception occured while fetching the RIOT information of the IP address. "
                                 "See greynoise_main.log for more details.")

        elif ip_field:

            logger.info("Started retrieving RIOT information for the IP addresses present in field: {}".format(
                str(ip_field)))
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:
                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(api_key, logger)
                        logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    # divide the records in the form of dict of tuples having chunk_index as key
                    # {<index>: (<records>, <All the ips in records>)}
                    chunk_dict = event_generator.batch(
                        records, ip_field, EVENTS_PER_CHUNK, logger, optimize_requests=False)
                    logger.debug("Successfully divided events into chunks")

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.debug("Less then 1000 distinct IPs are present, "
                                     "optimizing the IP requests call to GreyNoise API...")
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(
                        api_key=api_key, timeout=120, use_cache=USE_CACHE, integration_name=INTEGRATION_NAME)

                    # When no records found, batch will return {0:([],[])}
                    if len(chunk_dict) > 0:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key, api_client, 'greynoise_riot', ip_field,
                                chunk_dict, logger, threads=THREADS):
                            yield event

                        logger.info("Successfully sent all the results to the Splunk")
                    else:
                        logger.info("No events found, please increase the search timespan to have more search results.")
                except Exception:
                    logger.info(
                        "Exception occured while adding the RIOT information to the events, Error: {}".format(
                            traceback.format_exc()))
                    self.write_error("Exception occured while adding the RIOT information of the IP addresses "
                                     "to events. See greynoise_main.log for more details.")

        else:
            logger.error("Please specify exactly one parameter from ip and ip_field with some value.")
            self.write_error("Please specify exactly one parameter from ip and ip_field with some value.")
コード例 #7
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field

            logger.info(
                "Started retrieving context information for the IP addresses present in field: {}"
                .format(str(ip_field)))

            try:
                # Strip the spaces from the parameter value if given
                if ip_field:
                    ip_field = ip_field.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(
                        api_key, logger)
                    logger.debug(
                        "API validation status: {}, message: {}".format(
                            api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records,
                                                   ip_field,
                                                   EVENTS_PER_CHUNK,
                                                   logger,
                                                   optimize_requests=False)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.debug(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name=INTEGRATION_NAME)

                if len(chunk_dict) > 0:
                    for event in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key,
                            api_client,
                            'enrich',
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        yield event

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception:
                logger.info(
                    "Exception occured while getting context information for events events, Error: {}"
                    .format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while enriching events with the context information of IP addresses. "
                    "See greynoise_main.log for more details.")
コード例 #8
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 15 15:33:56 2020

@author: rayansami
"""
import requests
import CosineCalculation
import utility  # Custom module, only returns FDC API key. Not available inside git repo for preventing abuse
"""
    ### If you want to use this program, get FDC API key and change the value below ###
"""
api_key = utility.get_api_key()
url = 'https://api.nal.usda.gov/fdc/v1/foods/search'


def checkForMaxCosineSimilarItems(fooditem, jsonData):
    print('Checking item:', fooditem)
    print('Checking json:', jsonData['foods'][0]['description'])

    listi = []
    for index in range(0, len(
            jsonData['foods'])):  # Starting from 0 index of the foods array
        descriptionOnList = jsonData['foods'][index]['description']
        cosine_value = CosineCalculation.calculate_cosine_between_strings(
            fooditem.upper(), descriptionOnList.upper())
        dictionary = {}
        dictionary['fdcId'] = jsonData['foods'][index]["fdcId"]
        dictionary['cosineValue'] = cosine_value
        listi.append(dictionary)