def __init__(self, *args, **kwargs):
     """Initialize the parameters."""
     super(PurgeHandler, self).__init__(*args, **kwargs)
     self._validator = validator
     self._args = args
     self._kwargs = kwargs
     self.path = os.path.abspath(__file__)
     self.session_key_obj = GetSessionKey()
     self.logger = setup_logger(session_key=self.session_key_obj.session_key, log_context="api_validation")
 def __init__(self, *args, **kwargs):
     """Initialize the parameters."""
     super(GreyNoiseScanDeployment, self).__init__(*args, **kwargs)
     self._validator = validator
     self._args = args
     self._kwargs = kwargs
     self.path = os.path.abspath(__file__)
     self.session_key_obj = GetSessionKey()
     self.logger = setup_logger(session_key=self.session_key_obj.session_key, log_context="scan_deployment")
    def generate(self):
        
        try:
            # Setup logger
            logger = utility.setup_logger(session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

            try:
                message = ''
                api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
            except APIKeyNotFoundError as e:
                message = str(e)
            except HTTPError as e:
                message = str(e)
            
            if message:
                logger.error("Error occured while retrieving API key, Error: {}".format(message))
                self.write_error(message)
                exit(1)

            # This will call the do_generate method of the respective class from which this class was called
            # And generate the events
            for event in self.do_generate(api_key, logger):
                yield event

        except RateLimitError:
            logger.error("Rate limit error occured while executing the custom command.")
            self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
        except RequestFailure as e:

            response_code, response_message = e.args
            if response_code == 401:
                msg = "Unauthorized. Please check your API key."
            else:
                # Need to handle this, as splunklib is unable to handle the exception with (400, {'error': 'error_reason'}) format
                msg = "The API call to the GreyNoise platform has failed with status_code: " + str(response_code) + " and error: "
                # Error Message may be in 'error' or 'message' key in dict with addition of RIOT endpoint
                if isinstance(response_message, dict):
                    if 'error' in response_message.keys():
                        msg += response_message['error']
                    elif 'message' in response_message.keys():
                        msg += response_message['message']
                else:
                    msg += response_message

            logger.error("{}".format(str(msg)))
            self.write_error(msg)
        except ConnectionError:
            logger.error("Error while connecting to the Server. Please check your connection and try again.")
            self.write_error("Error while connecting to the Server. Please check your connection and try again.")
        except RequestException:
            logger.error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
            self.write_error("There was an ambiguous exception that occurred while handling your Request. Please try again.")
        except Exception as e:
            logger.error("Exception occured while executing the custom command, Exception: {} ".format(str(traceback.format_exc())))
            self.write_error("Exception occured while executing the {custom_command} custom command. See greynoise_main.log for more details.".format(custom_command=str(self._metadata.searchinfo.command)))
 def __init__(self, *args, **kwargs):
     """Initialize the parameters."""
     super(TtlHandler, self).__init__(*args, **kwargs)
     self._validator = validator
     self._args = args
     self._kwargs = kwargs
     self.path = os.path.abspath(__file__)
     self.session_key_obj = GetSessionKey()
     mgmt_port = splunk.clilib.cli_common.getMgmtUri().split(":")[-1]
     self.service = client.connect(port=mgmt_port, token=self.session_key_obj.session_key, app=APP_NAME)
     self.logger = setup_logger(session_key=self.session_key_obj.session_key, log_context="api_validation")
Exemple #5
0
    def generate(self):
        """Method that yields records to the Splunk processing pipeline."""
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            try:
                api_key = utility.get_api_key(
                    self._metadata.searchinfo.session_key, logger=logger)

                # Completing the search if the API key is not available.
                if not api_key:
                    logger.error(
                        "API key not found. Please configure the GreyNoise App for Splunk."
                    )
                    exit(1)

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=240,
                                       integration_name=INTEGRATION_NAME)

                queries = {
                    "malicious": "classification:malicious last_seen:today",
                    "benign": "classification:benign last_seen:today",
                    "unknown": "classification:unknown last_seen:today"
                }

                for key, value in queries.items():
                    logger.debug(
                        "Fetching records for classification: {}".format(key))
                    stats_data = api_client.stats(value, None)
                    if stats_data.get("stats"):
                        self.handle_stats(stats_data.get("stats"), key)
                    else:
                        logger.error(
                            "Returning no results because of unexpected response in one of the query."
                        )
                        exit(1)

                for result in self.RESULTS:
                    yield result
                logger.info("Events returned successfully to Splunk.")

            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                exit(1)
Exemple #6
0
 def __init__(self, conf, section, source=1):
     getConf(self, conf, section)
     self.source = source
     self.ser = serial.Serial(
         port=self.port,  # "/dev/ttyS0",
         baudrate=int(self.baudrate),
         parity=eval(self.parity),
         stopbits=eval(self.stopbits),
         bytesize=eval(self.bytesize),
         timeout=float(self.timeout),
         rtscts=self.rtscts,
         dsrdtr=self.dsrdtr)
     self.logger = setup_logger('serial',
                                os.path.join(logDir, 'serial.log'))
 def transform(self, records):
     """Method to clear cache kvstore via rest calls."""
     try:
         logger = utility.setup_logger(
             session_key=self._metadata.searchinfo.session_key,
             log_context=self._metadata.searchinfo.command)
         logger.info("Initiating cache maintenance")
         session_key = self._metadata.searchinfo.session_key
         multi_cache_client = Caching(session_key, logger, 'multi')
         context_cache_client = Caching(session_key, logger, 'context')
         riot_cache_client = Caching(session_key, logger, 'greynoise_riot')
         cache_clients = [
             multi_cache_client, context_cache_client, riot_cache_client
         ]
         mgmt_port = splunk.clilib.cli_common.getMgmtUri().split(":")[-1]
         service = client.connect(port=mgmt_port,
                                  token=session_key,
                                  app=APP_NAME)
         ttl = abs(
             int(
                 service.get("properties/macros/greynoise_ttl/definition")
                 ['body'].read()))
     except ValueError:
         logger.warn(
             "Invalid value found for TTL. Using a default value of '24'.")
         ttl = 24
         service.post("properties/macros/greynoise_ttl",
                      definition=str(ttl))
     except Exception:
         logger.error(
             "An exception occurred during cache maintenance. Exiting.\n{}".
             format(traceback.format_exc()))
         exit(1)
     try:
         for cache_client in cache_clients:
             if cache_client is not None:
                 cache_client.maintain_cache(ttl)
             else:
                 logger.debug("Either KVStore is disabled or not ready")
         logger.debug("Cache maintenance completed successfully")
     except Exception:
         logger.error(
             "An exception occurred during cache maintenance.\n{}".format(
                 traceback.format_exc()))
     yield {}
Exemple #8
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_addresses = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 5000
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if ip_addresses and ip_field:
            logger.error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command."
            )
            self.write_error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command"
            )
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(
                self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error(
                "Error occured while retrieving API key, Error: {}".format(
                    message))
            exit(1)

        if ip_addresses and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Splitting the ip_addresses by commas and stripping spaces from both the sides for each IP address
            ip_addresses = [ip.strip() for ip in ip_addresses.split(',')]

            logger.info("Started retrieving results")
            try:
                logger.debug(
                    "Initiating to fetch noise and RIOT status for IP address(es): {}"
                    .format(str(ip_addresses)))

                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       integration_name=INTEGRATION_NAME)

                # CACHING START
                cache_enabled, cache_client = utility.get_caching(
                    self._metadata.searchinfo.session_key, 'multi', logger)
                if int(cache_enabled) == 1 and cache_client is not None:
                    cache_start = time.time()
                    ips_not_in_cache, ips_in_cache = utility.get_ips_not_in_cache(
                        cache_client, ip_addresses, logger)
                    try:
                        response = []
                        if len(ips_in_cache) >= 1:
                            response = cache_client.query_kv_store(
                                ips_in_cache)
                        if response is None:
                            logger.debug(
                                "KVStore is not ready. Skipping caching mechanism."
                            )
                            noise_status = api_client.quick(ip_addresses)
                        elif response == []:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client, ip_addresses,
                                logger)
                        else:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client,
                                ips_not_in_cache, logger)
                            noise_status.extend(response)
                    except Exception:
                        logger.debug(
                            "An exception occurred while fetching response from cache.\n{}"
                            .format(traceback.format_exc()))
                    logger.debug(
                        "Generating command with caching took {} seconds.".
                        format(time.time() - cache_start))
                else:
                    # Opting timout 120 seconds for the requests
                    noise_status = api_client.quick(ip_addresses)
                logger.info("Retrieved results successfully")
                # CACHING END

                # Process the API response and send the noise and RIOT status information of IP with extractions
                # to the Splunk, Using this flag to handle the field extraction issue in custom commands
                # Only the fields extracted from the first event of generated by custom command
                # will be extracted from all events
                first_record_flag = True

                # Flag to indicate whether erroneous IPs are present
                erroneous_ip_present = False
                for ip in ip_addresses:
                    for sample in noise_status:
                        if ip == sample['ip']:
                            yield event_generator.make_valid_event(
                                'quick', sample, first_record_flag)
                            if first_record_flag:
                                first_record_flag = False
                            logger.debug(
                                "Fetched noise and RIOT status for ip={} from GreyNoise API"
                                .format(str(ip)))
                            break
                    else:
                        erroneous_ip_present = True
                        try:
                            validate_ip(ip, strict=True)
                        except ValueError as e:
                            error_msg = str(e).split(":")
                            logger.debug(
                                "Generating noise and RIOT status for ip={} manually"
                                .format(str(ip)))
                            event = {'ip': ip, 'error': error_msg[0]}
                            yield event_generator.make_invalid_event(
                                'quick', event, first_record_flag)

                            if first_record_flag:
                                first_record_flag = False

                if erroneous_ip_present:
                    logger.warn(
                        "Value of one or more IP address(es) is either invalid or non-routable"
                    )
                    self.write_warning(
                        "Value of one or more IP address(es) passed to {command_name} "
                        "is either invalid or non-routable".format(
                            command_name=str(
                                self._metadata.searchinfo.command)))

            except RateLimitError:
                logger.error(
                    "Rate limit error occured while fetching the context information for ips={}"
                    .format(str(ip_addresses)))
                self.write_error(
                    "The Rate Limit has been exceeded. Please contact the Administrator"
                )
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = (
                        "The API call to the GreyNoise platform have been failed "
                        "with status_code: {} and error: {}").format(
                            response_code,
                            response_message['error'] if isinstance(
                                response_message, dict) else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
                self.write_error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                self.write_error(
                    "Exception occured while fetching the noise and RIOT status of the IP address(es). "
                    "See greynoise_main.log for more details.")

        elif ip_field:
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:

                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(
                            option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(
                            api_key, logger)
                        logger.debug(
                            "API validation status: {}, message: {}".format(
                                api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    chunk_dict = event_generator.batch(records, ip_field,
                                                       EVENTS_PER_CHUNK,
                                                       logger)

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.info(
                            "Less then 1000 distinct IPs are present, "
                            "optimizing the IP requests call to GreyNoise API..."
                        )
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(api_key=api_key,
                                           timeout=120,
                                           use_cache=USE_CACHE,
                                           integration_name=INTEGRATION_NAME)
                    # When no records found, batch will return {0:([],[])}
                    tot_time_start = time.time()
                    if len(list(chunk_dict.values())[0][0]) >= 1:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key,
                                api_client,
                                'multi',
                                ip_field,
                                chunk_dict,
                                logger,
                                threads=THREADS):
                            yield event
                    else:
                        logger.info(
                            "No events found, please increase the search timespan to have more search results."
                        )
                    tot_time_end = time.time()
                    logger.debug(
                        "Total execution time => {}".format(tot_time_end -
                                                            tot_time_start))
                except Exception:
                    logger.info(
                        "Exception occured while adding the noise and RIOT status to the events, Error: {}"
                        .format(traceback.format_exc()))
                    self.write_error(
                        "Exception occured while adding the noise and RIOT status of "
                        "the IP addresses to events. See greynoise_main.log for more details."
                    )

        else:
            logger.error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
            self.write_error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
Exemple #9
0
        'L2': l2,
        'inception_mean': is_m,
        'inception_std': is_std
    }
    metrics_data['results'].append(m_result)

    if 'logger' in metrics_data:
        _str = 'i:%d;\t\E: %f;\t\TV: %f;\t\L2: %f;\tIS-mean: %f;\tIS-std: %f;' % \
               (iteration, e, tv, l2, is_m, is_std)
        metrics_data['logger'].info(_str)


if __name__ == "__main__":
    opt = parse_args()
    create_dirs(opt)
    logger = setup_logger(opt.outdir)
    logger.info(opt)
    if opt.verbose:
        print(opt)

    # Fix seed, use also random.seed(opt.seed) if it is used
    torch.manual_seed(opt.seed)
    if opt.cuda:
        import torch.backends.cudnn as cudnn
        cudnn.benchmark = True
        torch.cuda.manual_seed(opt.seed)
        dtype = torch.cuda.FloatTensor
    else:
        dtype = torch.FloatTensor

    # Required for storing X ~ G(Z)
Exemple #10
0
    def transform(self, records):

        method = 'filter'

        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1000
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field
            noise_events = self.noise_events

            logger.info(
                "Started filtering the IP address(es) present in field: {}, with noise_status: {}"
                .format(str(ip_field), str(noise_events)))

            try:
                if ip_field:
                    ip_field = ip_field.strip()
                if noise_events:
                    noise_events = noise_events.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                    noise_events = validator.Boolean(
                        option_name='noise_events').validate(noise_events)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                api_key_validation, message = utility.validate_api_key(
                    api_key, logger)
                logger.debug("API validation status: {}, message: {}".format(
                    api_key_validation, str(message)))
                if not api_key_validation:
                    logger.info(message)
                    self.write_error(message)
                    exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records, ip_field,
                                                   EVENTS_PER_CHUNK, logger)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.info(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name="Splunk")

                # When no records found, batch will return {0:([],[])}
                if len(list(chunk_dict.values())[0][0]) >= 1:
                    for chunk_index, result in event_generator.get_all_events(
                            api_client,
                            method,
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        # Pass the collected data to the event filter method
                        for event in event_filter(chunk_index, result,
                                                  chunk_dict[chunk_index],
                                                  ip_field, noise_events,
                                                  method):
                            yield event

                        # Deleting the chunk with the events that are already indexed
                        del chunk_dict[chunk_index]

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception as e:
                logger.info(
                    "Exception occured while filtering events, Error: {}".
                    format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while filtering the events based on noise status. See greynoise_main.log for more details."
                )
Exemple #11
0
        log.info(f"Creating '{name}'")
        cv2.imwrite(name, self.get_rgb().swapaxes(0, 1))
        return name

    def build_rgb(self, shape):
        log.info(
            f"Computing mandelbrot: center = {self.center}, shape = {shape[0]} x {shape[1]}, step_size = {self.step_size}, max_depth = {self.max_depth}"
        )
        start = time.time()
        hsv_img = create_mandelbrot(self.center, shape, self.step_size,
                                    self.max_depth, self.buckets)
        self.last_computation_time = time.time() - start
        log.info("Computation took {0:.2f}s".format(
            self.last_computation_time))
        self.last_img = cv2.cvtColor(hsv_img, cv2.COLOR_HSV2BGR)

    def get_rgb(self):
        return self.last_img


if __name__ == "__main__":
    SHAPE = (1920, 1080)
    MAX_DEPTH = 1500

    setup_logger()
    # random.seed("HELLO")

    mb = Mandelbrot(MAX_DEPTH)
    for i in range(1000):
        mb.random_snapshot(SHAPE, 1.0, 10000)
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_address = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 1
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

        if ip_address and ip_field:
            logger.error("Please use parameter ip to work gnriot as generating command or "
                         "use parameter ip_field to work gnriot as transforming command.")
            self.write_error("Please use parameter ip to work gnriot as generating command or "
                             "use parameter ip_field to work gnriot as transforming command")
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error("Error occured while retrieving API key, Error: {}".format(message))
            exit(1)

        if ip_address and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Strip the spaces from the parameter value if given
            ip_address = ip_address.strip()

            logger.info("Started retrieving results")
            try:
                logger.debug("Initiating to fetch RIOT information for IP address: {}".format(str(ip_address)))
                api_client = GreyNoise(api_key=api_key, timeout=120, integration_name=INTEGRATION_NAME)
                # Opting timout 120 seconds for the requests
                session_key = self._metadata.searchinfo.session_key
                riot_information = utility.get_response_for_generating(
                    session_key, api_client, ip_address, 'greynoise_riot', logger)
                logger.info("Retrieved results successfully")

                # Process the API response and send the riot information of IP with extractions to the Splunk
                yield event_generator.make_valid_event('riot', riot_information, True)
                logger.debug("Fetched RIOT information for ip={} from GreyNoise API".format(str(ip_address)))

            except ValueError as e:
                error_msg = str(e).split(":")
                logger.debug("Generating RIOT information for ip={} manually".format(str(ip_address)))
                event = {
                    'ip': ip_address,
                    'error': error_msg[0]
                }
                yield event_generator.make_invalid_event('riot', event, True)
                logger.warn(error_msg)
                self.write_warning(
                    "Value of IP address passed to {command_name} is either invalid or non-routable".format(
                        command_name=str(self._metadata.searchinfo.command)))
            except RateLimitError:
                logger.error("Rate limit error occured while fetching the context information for ip={}".format(
                    str(ip_address)))
                self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = ("The API call to the GreyNoise platform have been failed "
                           "with status_code: {} and error: {}").format(
                        response_code, response_message['error'] if isinstance(response_message, dict)
                        else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error("Error while connecting to the Server. Please check your connection and try again.")
                self.write_error("Error while connecting to the Server. Please check your connection and try again.")
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
            except Exception:
                logger.error("Exception: {} ".format(str(traceback.format_exc())))
                self.write_error("Exception occured while fetching the RIOT information of the IP address. "
                                 "See greynoise_main.log for more details.")

        elif ip_field:

            logger.info("Started retrieving RIOT information for the IP addresses present in field: {}".format(
                str(ip_field)))
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:
                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(api_key, logger)
                        logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    # divide the records in the form of dict of tuples having chunk_index as key
                    # {<index>: (<records>, <All the ips in records>)}
                    chunk_dict = event_generator.batch(
                        records, ip_field, EVENTS_PER_CHUNK, logger, optimize_requests=False)
                    logger.debug("Successfully divided events into chunks")

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.debug("Less then 1000 distinct IPs are present, "
                                     "optimizing the IP requests call to GreyNoise API...")
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(
                        api_key=api_key, timeout=120, use_cache=USE_CACHE, integration_name=INTEGRATION_NAME)

                    # When no records found, batch will return {0:([],[])}
                    if len(chunk_dict) > 0:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key, api_client, 'greynoise_riot', ip_field,
                                chunk_dict, logger, threads=THREADS):
                            yield event

                        logger.info("Successfully sent all the results to the Splunk")
                    else:
                        logger.info("No events found, please increase the search timespan to have more search results.")
                except Exception:
                    logger.info(
                        "Exception occured while adding the RIOT information to the events, Error: {}".format(
                            traceback.format_exc()))
                    self.write_error("Exception occured while adding the RIOT information of the IP addresses "
                                     "to events. See greynoise_main.log for more details.")

        else:
            logger.error("Please specify exactly one parameter from ip and ip_field with some value.")
            self.write_error("Please specify exactly one parameter from ip and ip_field with some value.")
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field

            logger.info(
                "Started retrieving context information for the IP addresses present in field: {}"
                .format(str(ip_field)))

            try:
                # Strip the spaces from the parameter value if given
                if ip_field:
                    ip_field = ip_field.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(
                        api_key, logger)
                    logger.debug(
                        "API validation status: {}, message: {}".format(
                            api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records,
                                                   ip_field,
                                                   EVENTS_PER_CHUNK,
                                                   logger,
                                                   optimize_requests=False)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.debug(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name=INTEGRATION_NAME)

                if len(chunk_dict) > 0:
                    for event in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key,
                            api_client,
                            'enrich',
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        yield event

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception:
                logger.info(
                    "Exception occured while getting context information for events events, Error: {}"
                    .format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while enriching events with the context information of IP addresses. "
                    "See greynoise_main.log for more details.")
Exemple #14
0
    "Generates a sprite atlas using different materials and models.")
parser.add_argument("--xml-file",
                    metavar="FILE",
                    help="Specify the xml file used for generation",
                    required=True)
parser.add_argument("--result-dir",
                    metavar="DIRECTORY",
                    help="Specify the output directory",
                    required=True)
parser.add_argument("--render-script",
                    metavar="FILE",
                    help="Specify the python script invoked in blender",
                    required=True)
args = parser.parse_args()

utility.setup_logger()
logger = logging.getLogger()

logger.info("Started resource generation")

XML_PATH = os.path.abspath(args.xml_file)
RESOURCE_DIR = os.path.dirname(XML_PATH)
RENDER_SCRIPT_PATH = os.path.abspath(args.render_script)
RESULT_DIR = os.path.abspath(args.result_dir)
XML_COPY_PATH = os.path.join(RESULT_DIR, os.path.basename(XML_PATH))

logger.info("Resource dir: " + RESOURCE_DIR + "'")
logger.info("Result dir: '" + RESULT_DIR + "'")
logger.info("Render script: '" + RENDER_SCRIPT_PATH + "'")

logger.info("XML file: '" + XML_PATH + "'")