Esempio n. 1
0
    def do_generate(self, api_key, logger):

        ip_address = self.ip

        try:
            # Strip the spaces from the parameter value if given
            if ip_address:
                ip_address = ip_address.strip()

            logger.info("Initiating to fetch RIOT status for ip: {}".format(
                str(ip_address)))
            # Opting default timout 60 seconds for the request
            api_client = GreyNoise(api_key=api_key,
                                   timeout=60,
                                   integration_name="Splunk")
            context_info = api_client.riot(ip_address)
            logger.info(
                "Successfully retrieved the RIOT status for ip={}".format(
                    str(ip_address)))

            # Process the API response and send the context information of IP with extractions in the Splunk
            results = event_generator.make_valid_event('ip', context_info,
                                                       True)
            yield results

        except ValueError:
            logger.error(
                "IP address: {} doesn\'t match the valid IP format".format(
                    str(ip_address)))
            self.write_error("IP address doesn\'t match the valid IP format")
Esempio n. 2
0
 def gnIPCheck(rIP):
     #https://developer.greynoise.io/docs/using-the-greynoise-community-api
     api_key = os.getenv('greynoise_key')
     api_client = GreyNoise(api_key=api_key)
     utils.rIPRating = []
     utils.rIPDomain = []
     for ip in rIP:
         if ipaddress.ip_address(ip).is_private is False:
             response = api_client.ip(ip)
             if response['seen'] is False:
                 utils.rIPRating.append('unknown')
                 utils.rIPDomain.append('unknown')
             elif response['classification'] == 'malicious':
                 utils.rIPRating.append('High')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             elif response['classification'] == 'unknown':
                 utils.rIPRating.append('Medium')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             elif response['classification'] == 'benign':
                 utils.rIPRating.append('Low')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             else:
                 utils.rIPRating.append('error')
                 utils.rIPDomain.append('error')
         else:
             utils.rIPRating.append('Private')
             utils.rIPDomain.append('Local')
Esempio n. 3
0
def get_greynoise_info_ip():

    ip_lookup = input("[+] 2 IPv4 query  Enter IP Address to query: ")
    
    greynoise_lookup = GreyNoise(api_key=GN_KEY)
    gn_seen_before = greynoise_lookup.quick(ip_lookup)

    print(style.BLUE + f'GreyNoise Output: {gn_seen_before}' + "\n")
 def __init__(self, context):
     super(SwMain, self).__init__(context)
     self.ip_address = context.inputs["ip_address"]
     self.api_key = context.asset["api_key"]
     self.session = GreyNoise(
         api_key=self.api_key,
         integration_name="greynoise-community-swimlane-" + PLUGIN_VERSION,
         offering="community",
     )
Esempio n. 5
0
    def do_generate(self, api_key, logger):

        query = self.query
        count = self.count

        if query == '':
            logger.error("Parameter query should not be empty.")
            self.write_error("Parameter query should not be empty.")
            exit(1)

        # Strip the spaces from the parameter value if given
        if count:
            count = count.strip()
        # Validating the given parameters
        try:
            count = validator.Integer(option_name='count',
                                      minimum=1).validate(count)
        except ValueError as e:
            # Validator will throw ValueError with error message when the parameters are not proper
            logger.error(str(e))
            self.write_error(str(e))
            exit(1)

        logger.info(
            "Fetching aggregate statistics for query: {}, count: {}".format(
                str(query), count))
        # Opting timout 120 seconds for the requests
        api_client = GreyNoise(api_key=api_key,
                               timeout=240,
                               integration_name="Splunk")
        # If count is not passed explicitely to the command by the user, then it will have the value None
        stats_data = api_client.stats(query, count)
        logger.info(
            "Successfully retrieved response for the aggregate statistics for query: {}, count: {}"
            .format(str(query), count))

        if int(stats_data.get('count', -1)) >= 0:
            results = {}
            results['source'] = 'greynoise'
            results['sourcetype'] = 'greynoise'
            results['_time'] = time.time()
            results['_raw'] = {'results': stats_data}
            yield results
        else:
            response = stats_data.get('message', None) or stats_data.get(
                'error', None)

            if 'bad count' in response or 'bad query' in response:
                logger.error(
                    "Invalid response retrieved from the GreyNoise API for query: {}, response: {}"
                    .format(str(query), str(response)))
                if 'message' in response:
                    event = {'message': response}
                else:
                    event = {'error': response}
                yield event_generator.make_invalid_event('stats', event, True)
Esempio n. 6
0
 def run(self, conf, args, plugins):
     logging.getLogger("greynoise").setLevel(logging.CRITICAL)
     gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     else:
         self.parser.print_help()
Esempio n. 7
0
 def run(self, conf, args, plugins):
     if conf["GreyNoise"]["key"] == "":
         print("You need to set your API key with GreyNoise")
         sys.exit()
     gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     else:
         self.parser.print_help()
Esempio n. 8
0
    def generate(self):
        """Method that yields records to the Splunk processing pipeline."""
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            try:
                api_key = utility.get_api_key(
                    self._metadata.searchinfo.session_key, logger=logger)

                # Completing the search if the API key is not available.
                if not api_key:
                    logger.error(
                        "API key not found. Please configure the GreyNoise App for Splunk."
                    )
                    exit(1)

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=240,
                                       integration_name=INTEGRATION_NAME)

                queries = {
                    "malicious": "classification:malicious last_seen:today",
                    "benign": "classification:benign last_seen:today",
                    "unknown": "classification:unknown last_seen:today"
                }

                for key, value in queries.items():
                    logger.debug(
                        "Fetching records for classification: {}".format(key))
                    stats_data = api_client.stats(value, None)
                    if stats_data.get("stats"):
                        self.handle_stats(stats_data.get("stats"), key)
                    else:
                        logger.error(
                            "Returning no results because of unexpected response in one of the query."
                        )
                        exit(1)

                for result in self.RESULTS:
                    yield result
                logger.info("Events returned successfully to Splunk.")

            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                exit(1)
    def create_entities(cls, request, response):
        api_key = request.TransformSettings["GNApiKey"]
        api_client = GreyNoise(
            api_key=api_key,
            integration_name="maltego-community-v1.0.0",
            offering="community",
        )
        input_ip = response.addEntity("maltego.IPv4Address", request.Value)
        try:
            resp = api_client.ip(request.Value)
            if resp["noise"] or resp["riot"]:
                if resp["noise"]:
                    response.addEntity("greynoise.noise", "Noise Detected")
                if resp["riot"]:
                    response.addEntity("greynoise.noise",
                                       "Benign Service Detected")
                response.addEntity("maltego.Alias", resp["name"])
                response.addEntity("greynoise.classification",
                                   resp["classification"])
                response.addEntity("maltego.DateTime", resp["last_seen"])
                url = response.addEntity("maltego.URL", resp["link"])
                url.addProperty(
                    fieldName="short-title",
                    displayName="GreyNoise color",
                    value=resp["link"],
                    matchingRule="strict",
                )
                url.addProperty(
                    fieldName="url",
                    displayName="GreyNoise color",
                    value=resp["link"],
                    matchingRule="strict",
                )
            else:
                response.addEntity("greynoise.noise", "No Noise Detected")
                response.addUIMessage(
                    f"The IP address {request.Value} hasn't been seen by GreyNoise."
                )

            add_display_info(
                input_ip,
                resp.get("classification"),
                resp.get("last_seen"),
                resp.get("link"),
                resp.get("name"),
            )
        except Exception as e:
            response.addUIMessage(e)
Esempio n. 10
0
class Connection(insightconnect_plugin_runtime.Connection):
    def __init__(self):
        super(self.__class__, self).__init__(input=ConnectionSchema())
        self.api_key = None
        self.server = None
        self.user_agent = None
        self.gn_client = None

    def connect(self, params):
        self.api_key = params.get("credentials").get("secretKey", "")
        self.server = "https://api.greynoise.io"
        self.user_agent = f"rapid7-insightconnect-v{self.meta.version}"
        self.gn_client = GreyNoise(api_server=self.server, api_key=self.api_key, integration_name=self.user_agent)
        self.logger.info("Connect: Connecting...")

    def test(self):
        try:
            resp = self.gn_client.test_connection()

        except RequestFailure as e:
            if e.args[0] == 401:
                raise ConnectionTestException(preset=ConnectionTestException.Preset.API_KEY, data=e.args[1])
            elif e.args[0] == 429:
                raise ConnectionTestException(preset=ConnectionTestException.Preset.RATE_LIMIT, data=e.args[1])
            elif e.args[0] == 500:
                raise ConnectionTestException(preset=ConnectionTestException.Preset.SERVER_ERROR, data=e.args[1])

        return resp
Esempio n. 11
0
    def do_generate(self, api_key, logger):

        query = self.query
        result_size = self.result_size

        logger.info("Started retrieving results for query: {}".format(str(query)))

        if query == '':
            logger.error("Parameter query should not be empty.")
            self.write_error("Parameter query should not be empty.")
            exit(1)
        
        # Strip the spaces from the parameter value if given
        if result_size:
            result_size = result_size.strip()
        
        # Validating the given parameters
        try:
            result_size = validator.Integer(option_name='result_size', minimum=1).validate(result_size)
        except ValueError as e:
            # Validator will throw ValueError with error message when the parameters are not proper
            logger.error(str(e))
            self.write_error(str(e))
            exit(1)

        # Opting timeout of 240 seconds for the request
        api_client = GreyNoise(api_key=api_key, timeout=240)

        logger.info("Fetching results for GNQL query: {}, requested number of results: {}".format(str(query), str(result_size)))
        
        # Keep generating the events till result_size is not reached or all the query results are sent to Splunk
        for event in response_scroller(api_client, logger, query, result_size):
            yield event

        logger.info("Succcessfully retrieved results for the GreyNoise query: {}".format(str(query)))
Esempio n. 12
0
def main():
    """Run the core."""
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')
    setup_parser = subs.add_parser('setup')
    setup_parser.add_argument('-k',
                              '--api-key',
                              dest='api_key',
                              required=True,
                              help='API key for GreyNoise.',
                              type=str)
    args = parser.parse_args()

    if args.cmd == 'setup':
        if not os.path.exists(CONFIG_PATH):
            os.makedirs(CONFIG_PATH)
        config = CONFIG_DEFAULTS
        config['api_key'] = args.api_key
        with open(CONFIG_FILE, 'w') as conf_file_handle:
            json.dump(config,
                      conf_file_handle,
                      indent=4,
                      separators=(',', ': '))

    config = json.load(open(CONFIG_FILE))
    if config['api_key'] == '':
        raise Exception("Run setup before any other actions!")

    GreyNoise(config['api_key'])
    raise NotImplementedError
    def __init__(self, config):
        super(GreyNoiseBaseAction, self).__init__(config=config)

        gn_api_key = self.config.get('greynoise_api_key', None)
        self.gn_client = GreyNoise(api_key=gn_api_key,
                                   integration_name="greynoise-stackstorm-v" +
                                   PACK_VERSION)
Esempio n. 14
0
 def intel(self, type, query, data, conf):
     if type == "ip":
         print("[+] Checking GreyNoise...")
         logging.getLogger("greynoise").setLevel(logging.CRITICAL)
         gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
         res = gn.ip(query)
         if res["seen"]:
             data["reports"].append({
                 "url":
                 "https://viz.greynoise.io/ip/{}".format(query),
                 "title":
                 "Seen by GreyNoise as {}".format(", ".join(res["tags"])),
                 "date":
                 None,
                 "source":
                 "GreyNoise"
             })
Esempio n. 15
0
 def get_api_client(self):
     """Get api client."""
     api_key = get_api_key(self.session_key, self.logger)
     if not api_key:
         self._handle_alert_exit(1)
     return GreyNoise(api_key=api_key,
                      timeout=120,
                      integration_name=INTEGRATION_NAME)
Esempio n. 16
0
    def run(self, params={}):
        gn_client = GreyNoise(
            api_server=self.connection.server,
            api_key=self.connection.api_key,
            integration_name=self.connection.user_agent,
            offering="community",
        )
        try:
            resp = gn_client.ip(params.get(Input.IP_ADDRESS))
            if resp["noise"] or resp["riot"]:
                resp["last_seen"] = pendulum.parse(
                    resp["last_seen"]).to_rfc3339_string()

        except RequestFailure as e:
            raise GNRequestFailure(e.args[0], e.args[1])

        except ValueError as e:
            raise GNValueError(e.args[0])

        return resp
def validate_api_key(api_key, logger=None):
    """
    Validate the API key using the actual lightweight call to the GreyNoise API.

    Returns false only when 401 code is thrown, indicating the unauthorised access.
    :param api_key:
    :param logger:
    """
    if logger:
        logger.debug("Validating the api key...")

    try:
        api_client = GreyNoise(api_key=api_key,
                               timeout=120,
                               integration_name=INTEGRATION_NAME)
        api_client.test_connection()
        return (True, 'API key is valid')

    except RateLimitError:
        msg = "RateLimitError occured, please contact the Administrator"
        return (False, 'API key not validated, Error: {}'.format(msg))
    except RequestFailure as e:
        response_code, response_message = e.args
        if response_code == 401:
            return (False, 'Unauthorized. Please check your API key.')
        else:
            # Need to handle this, as splunklib is unable to handle the exception with
            # (400, {'error': 'error_reason'}) format
            msg = ("The API call to the GreyNoise API has failed "
                   "with status_code: {} and error: {}").format(
                       response_code, response_message['error'] if isinstance(
                           response_message, dict) else response_message)
            return (False, 'API key not validated, Error: {}'.format(msg))
    except ConnectionError:
        msg = "ConnectionError occured, please check your connection and try again."
        return (False, 'API key not validated, Error: {}'.format(msg))
    except RequestException:
        msg = "An ambiguous exception occured, please try again."
        return (False, 'API key not validated, Error: {}'.format(msg))
    except Exception as e:
        return (False, 'API key not validated, Error: {}'.format(str(e)))
Esempio n. 18
0
def lkup_sus_ip_address(susp_addr):
    """Find RDNS of IP address and return info using GreyNoise API.

    Args:
        susp_addr: Suspect IPv4 address.
    Returns:
        Domain (if found) & GreyNoise Output (if found).
    Raises:
        Error: if susp_addr domain name cannot be found.
    """
    try:
        rev = dns.reversename.from_address(susp_addr)
        output = str(dns.resolver.query(rev, 'PTR')[0])

        api_client = GreyNoise(api_key="", timeout=15)
        bring_the_noise = api_client.ip(susp_addr)

        print("Found domain: {}".format(output))
        print('*' * 80)
        print(bring_the_noise)

    except dns.resolver.NXDOMAIN as e:
        print(e)
    def run(self):

        if self.data_type == "ip":
            api_key = self.get_param("config.key", None)
            api_type = self.get_param("config.api_type", None)
            if api_type and api_type.lower() == "community":
                api_client = GreyNoise(
                    api_key=api_key,
                    timeout=30,
                    integration_name="greynoise-cortex-analyzer-v3.1",
                    offering="community",
                )
            else:
                api_client = GreyNoise(
                    api_key=api_key,
                    timeout=30,
                    integration_name="greynoise-cortex-analyzer-v3.1",
                )
            try:
                self.report(api_client.ip(self.get_data()))
            except Exception as e:
                self.error("Unable to query GreyNoise API\n{}".format(e))
        else:
            self.notSupported()
class SwMain(GreynoiseBaseClass):
    def __init__(self, context):
        super(SwMain, self).__init__(context)
        self.ip_address = context.inputs["ip_address"]
        self.api_key = context.asset["api_key"]
        self.session = GreyNoise(
            api_key=self.api_key,
            integration_name="greynoise-community-swimlane-" + PLUGIN_VERSION,
            offering="community",
        )

    def execute(self):
        output = []
        response = self.session.ip(self.ip_address)
        output.append(response)

        return output
Esempio n. 21
0
    def do_generate(self, api_key, logger):
        """
        Method to fetch the api response and process and send the response with extractions in the Splunk.

        :param api_key: GreyNoise API Key.
        :logger: logger object.
        """
        ip_address = self.ip

        try:
            # Strip the spaces from the parameter value if given
            if ip_address:
                ip_address = ip_address.strip()

            logger.info(
                "Initiating to fetch context information for ip: {}".format(
                    str(ip_address)))
            # Opting default timout 60 seconds for the request
            api_client = GreyNoise(api_key=api_key,
                                   timeout=60,
                                   integration_name=INTEGRATION_NAME)
            session_key = self._metadata.searchinfo.session_key
            context_info = get_response_for_generating(session_key, api_client,
                                                       ip_address, 'ip',
                                                       logger)
            logger.info(
                "Successfully retrieved the context information for ip={}".
                format(str(ip_address)))

            # Process the API response and send the context information of IP with extractions in the Splunk
            results = event_generator.make_valid_event('ip', context_info,
                                                       True)
            yield results

        except ValueError as e:
            error_msg = str(e).split(":")
            logger.error(e)
            self.write_error(error_msg[0])
Esempio n. 22
0
 def intel(self, type, query, data, conf):
     if type == "ip":
         print("[+] Checking GreyNoise...")
         logging.getLogger("greynoise").setLevel(logging.CRITICAL)
         if conf["GreyNoise"]["api_type"].lower() == "community":
             gn = GreyNoise(
                 api_key=conf["GreyNoise"]["key"],
                 integration_name=
                 "Harpoon (https://github.com/Te-k/harpoon)",
                 offering="community",
             )
             res = gn.ip(query)
             if res["noise"]:
                 data["reports"].append({
                     "url":
                     "https://viz.greynoise.io/ip/{}".format(query),
                     "title":
                     "Seen by GreyNoise as {}".format(res["name"]),
                     "date":
                     None,
                     "source":
                     "GreyNoise",
                 })
         else:
             gn = GreyNoise(
                 api_key=conf["GreyNoise"]["key"],
                 integration_name=
                 "Harpoon (https://github.com/Te-k/harpoon)",
             )
             res = gn.ip(query)
             if res["seen"]:
                 data["reports"].append({
                     "url":
                     "https://viz.greynoise.io/ip/{}".format(query),
                     "title":
                     "Seen by GreyNoise as {}".format(", ".join(
                         res["tags"])),
                     "date":
                     None,
                     "source":
                     "GreyNoise",
                 })
Esempio n. 23
0
 def run(self, conf, args, plugins):
     logging.getLogger("greynoise").setLevel(logging.CRITICAL)
     if conf["GreyNoise"]["api_type"].lower() == "community":
         gn = GreyNoise(
             api_key=conf["GreyNoise"]["key"],
             integration_name="Harpoon (https://github.com/Te-k/harpoon)",
             offering="community",
         )
     else:
         gn = GreyNoise(
             api_key=conf["GreyNoise"]["key"],
             integration_name="Harpoon (https://github.com/Te-k/harpoon)",
         )
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     elif args.list:
         res = gn.metadata()
         self.print_results(res, args)
     else:
         self.parser.print_help()
Esempio n. 24
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_addresses = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 5000
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if ip_addresses and ip_field:
            logger.error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command."
            )
            self.write_error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command"
            )
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(
                self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error(
                "Error occured while retrieving API key, Error: {}".format(
                    message))
            exit(1)

        if ip_addresses and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Splitting the ip_addresses by commas and stripping spaces from both the sides for each IP address
            ip_addresses = [ip.strip() for ip in ip_addresses.split(',')]

            logger.info("Started retrieving results")
            try:
                logger.debug(
                    "Initiating to fetch noise and RIOT status for IP address(es): {}"
                    .format(str(ip_addresses)))

                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       integration_name=INTEGRATION_NAME)

                # CACHING START
                cache_enabled, cache_client = utility.get_caching(
                    self._metadata.searchinfo.session_key, 'multi', logger)
                if int(cache_enabled) == 1 and cache_client is not None:
                    cache_start = time.time()
                    ips_not_in_cache, ips_in_cache = utility.get_ips_not_in_cache(
                        cache_client, ip_addresses, logger)
                    try:
                        response = []
                        if len(ips_in_cache) >= 1:
                            response = cache_client.query_kv_store(
                                ips_in_cache)
                        if response is None:
                            logger.debug(
                                "KVStore is not ready. Skipping caching mechanism."
                            )
                            noise_status = api_client.quick(ip_addresses)
                        elif response == []:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client, ip_addresses,
                                logger)
                        else:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client,
                                ips_not_in_cache, logger)
                            noise_status.extend(response)
                    except Exception:
                        logger.debug(
                            "An exception occurred while fetching response from cache.\n{}"
                            .format(traceback.format_exc()))
                    logger.debug(
                        "Generating command with caching took {} seconds.".
                        format(time.time() - cache_start))
                else:
                    # Opting timout 120 seconds for the requests
                    noise_status = api_client.quick(ip_addresses)
                logger.info("Retrieved results successfully")
                # CACHING END

                # Process the API response and send the noise and RIOT status information of IP with extractions
                # to the Splunk, Using this flag to handle the field extraction issue in custom commands
                # Only the fields extracted from the first event of generated by custom command
                # will be extracted from all events
                first_record_flag = True

                # Flag to indicate whether erroneous IPs are present
                erroneous_ip_present = False
                for ip in ip_addresses:
                    for sample in noise_status:
                        if ip == sample['ip']:
                            yield event_generator.make_valid_event(
                                'quick', sample, first_record_flag)
                            if first_record_flag:
                                first_record_flag = False
                            logger.debug(
                                "Fetched noise and RIOT status for ip={} from GreyNoise API"
                                .format(str(ip)))
                            break
                    else:
                        erroneous_ip_present = True
                        try:
                            validate_ip(ip, strict=True)
                        except ValueError as e:
                            error_msg = str(e).split(":")
                            logger.debug(
                                "Generating noise and RIOT status for ip={} manually"
                                .format(str(ip)))
                            event = {'ip': ip, 'error': error_msg[0]}
                            yield event_generator.make_invalid_event(
                                'quick', event, first_record_flag)

                            if first_record_flag:
                                first_record_flag = False

                if erroneous_ip_present:
                    logger.warn(
                        "Value of one or more IP address(es) is either invalid or non-routable"
                    )
                    self.write_warning(
                        "Value of one or more IP address(es) passed to {command_name} "
                        "is either invalid or non-routable".format(
                            command_name=str(
                                self._metadata.searchinfo.command)))

            except RateLimitError:
                logger.error(
                    "Rate limit error occured while fetching the context information for ips={}"
                    .format(str(ip_addresses)))
                self.write_error(
                    "The Rate Limit has been exceeded. Please contact the Administrator"
                )
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = (
                        "The API call to the GreyNoise platform have been failed "
                        "with status_code: {} and error: {}").format(
                            response_code,
                            response_message['error'] if isinstance(
                                response_message, dict) else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
                self.write_error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                self.write_error(
                    "Exception occured while fetching the noise and RIOT status of the IP address(es). "
                    "See greynoise_main.log for more details.")

        elif ip_field:
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:

                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(
                            option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(
                            api_key, logger)
                        logger.debug(
                            "API validation status: {}, message: {}".format(
                                api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    chunk_dict = event_generator.batch(records, ip_field,
                                                       EVENTS_PER_CHUNK,
                                                       logger)

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.info(
                            "Less then 1000 distinct IPs are present, "
                            "optimizing the IP requests call to GreyNoise API..."
                        )
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(api_key=api_key,
                                           timeout=120,
                                           use_cache=USE_CACHE,
                                           integration_name=INTEGRATION_NAME)
                    # When no records found, batch will return {0:([],[])}
                    tot_time_start = time.time()
                    if len(list(chunk_dict.values())[0][0]) >= 1:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key,
                                api_client,
                                'multi',
                                ip_field,
                                chunk_dict,
                                logger,
                                threads=THREADS):
                            yield event
                    else:
                        logger.info(
                            "No events found, please increase the search timespan to have more search results."
                        )
                    tot_time_end = time.time()
                    logger.debug(
                        "Total execution time => {}".format(tot_time_end -
                                                            tot_time_start))
                except Exception:
                    logger.info(
                        "Exception occured while adding the noise and RIOT status to the events, Error: {}"
                        .format(traceback.format_exc()))
                    self.write_error(
                        "Exception occured while adding the noise and RIOT status of "
                        "the IP addresses to events. See greynoise_main.log for more details."
                    )

        else:
            logger.error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
            self.write_error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
Esempio n. 25
0
from greynoise import GreyNoise
api_client = GreyNoise(api_key=<api_key>, timeout=<timeout_in_seconds>)

api_client.ip('58.220.219.247')
{
  "ip": "58.220.219.247",
  "seen": true,
  "classification": "malicious",
  "first_seen": "2019-04-04",
  "last_seen": "2019-08-21",
  "actor": "unknown",
  "tags": [
    "MSSQL Bruteforcer",
    "MSSQL Scanner",
    "RDP Scanner"
  ],
  "metadata": {
    "country": "China",
    "country_code": "CN",
    "city": "Kunshan",
    "organization": "CHINANET jiangsu province network",
    "asn": "AS4134",
    "tor": false,
    "os": "Windows 7/8",
    "category": "isp"
  },
  "raw_data": {
    "scan": [
      {
        "port": 1433,
        "protocol": "TCP"
Esempio n. 26
0
    def transform(self, records):

        method = 'filter'

        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1000
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field
            noise_events = self.noise_events

            logger.info(
                "Started filtering the IP address(es) present in field: {}, with noise_status: {}"
                .format(str(ip_field), str(noise_events)))

            try:
                if ip_field:
                    ip_field = ip_field.strip()
                if noise_events:
                    noise_events = noise_events.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                    noise_events = validator.Boolean(
                        option_name='noise_events').validate(noise_events)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                api_key_validation, message = utility.validate_api_key(
                    api_key, logger)
                logger.debug("API validation status: {}, message: {}".format(
                    api_key_validation, str(message)))
                if not api_key_validation:
                    logger.info(message)
                    self.write_error(message)
                    exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records, ip_field,
                                                   EVENTS_PER_CHUNK, logger)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.info(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name="Splunk")

                # When no records found, batch will return {0:([],[])}
                if len(list(chunk_dict.values())[0][0]) >= 1:
                    for chunk_index, result in event_generator.get_all_events(
                            api_client,
                            method,
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        # Pass the collected data to the event filter method
                        for event in event_filter(chunk_index, result,
                                                  chunk_dict[chunk_index],
                                                  ip_field, noise_events,
                                                  method):
                            yield event

                        # Deleting the chunk with the events that are already indexed
                        del chunk_dict[chunk_index]

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception as e:
                logger.info(
                    "Exception occured while filtering events, Error: {}".
                    format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while filtering the events based on noise status. See greynoise_main.log for more details."
                )
Esempio n. 27
0
 def connect(self, params):
     self.api_key = params.get("credentials").get("secretKey", "")
     self.server = "https://api.greynoise.io"
     self.user_agent = f"rapid7-insightconnect-v{self.meta.version}"
     self.gn_client = GreyNoise(api_server=self.server, api_key=self.api_key, integration_name=self.user_agent)
     self.logger.info("Connect: Connecting...")
 def __init__(self, context):
     self.api_key = context.asset["api_key"]
     self.session = GreyNoise(
         api_key=self.api_key,
         integration_name="greynoise-swimlane-" + PLUGIN_VERSION,
     )
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_address = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 1
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

        if ip_address and ip_field:
            logger.error("Please use parameter ip to work gnriot as generating command or "
                         "use parameter ip_field to work gnriot as transforming command.")
            self.write_error("Please use parameter ip to work gnriot as generating command or "
                             "use parameter ip_field to work gnriot as transforming command")
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error("Error occured while retrieving API key, Error: {}".format(message))
            exit(1)

        if ip_address and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Strip the spaces from the parameter value if given
            ip_address = ip_address.strip()

            logger.info("Started retrieving results")
            try:
                logger.debug("Initiating to fetch RIOT information for IP address: {}".format(str(ip_address)))
                api_client = GreyNoise(api_key=api_key, timeout=120, integration_name=INTEGRATION_NAME)
                # Opting timout 120 seconds for the requests
                session_key = self._metadata.searchinfo.session_key
                riot_information = utility.get_response_for_generating(
                    session_key, api_client, ip_address, 'greynoise_riot', logger)
                logger.info("Retrieved results successfully")

                # Process the API response and send the riot information of IP with extractions to the Splunk
                yield event_generator.make_valid_event('riot', riot_information, True)
                logger.debug("Fetched RIOT information for ip={} from GreyNoise API".format(str(ip_address)))

            except ValueError as e:
                error_msg = str(e).split(":")
                logger.debug("Generating RIOT information for ip={} manually".format(str(ip_address)))
                event = {
                    'ip': ip_address,
                    'error': error_msg[0]
                }
                yield event_generator.make_invalid_event('riot', event, True)
                logger.warn(error_msg)
                self.write_warning(
                    "Value of IP address passed to {command_name} is either invalid or non-routable".format(
                        command_name=str(self._metadata.searchinfo.command)))
            except RateLimitError:
                logger.error("Rate limit error occured while fetching the context information for ip={}".format(
                    str(ip_address)))
                self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = ("The API call to the GreyNoise platform have been failed "
                           "with status_code: {} and error: {}").format(
                        response_code, response_message['error'] if isinstance(response_message, dict)
                        else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error("Error while connecting to the Server. Please check your connection and try again.")
                self.write_error("Error while connecting to the Server. Please check your connection and try again.")
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
            except Exception:
                logger.error("Exception: {} ".format(str(traceback.format_exc())))
                self.write_error("Exception occured while fetching the RIOT information of the IP address. "
                                 "See greynoise_main.log for more details.")

        elif ip_field:

            logger.info("Started retrieving RIOT information for the IP addresses present in field: {}".format(
                str(ip_field)))
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:
                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(api_key, logger)
                        logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    # divide the records in the form of dict of tuples having chunk_index as key
                    # {<index>: (<records>, <All the ips in records>)}
                    chunk_dict = event_generator.batch(
                        records, ip_field, EVENTS_PER_CHUNK, logger, optimize_requests=False)
                    logger.debug("Successfully divided events into chunks")

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.debug("Less then 1000 distinct IPs are present, "
                                     "optimizing the IP requests call to GreyNoise API...")
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(
                        api_key=api_key, timeout=120, use_cache=USE_CACHE, integration_name=INTEGRATION_NAME)

                    # When no records found, batch will return {0:([],[])}
                    if len(chunk_dict) > 0:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key, api_client, 'greynoise_riot', ip_field,
                                chunk_dict, logger, threads=THREADS):
                            yield event

                        logger.info("Successfully sent all the results to the Splunk")
                    else:
                        logger.info("No events found, please increase the search timespan to have more search results.")
                except Exception:
                    logger.info(
                        "Exception occured while adding the RIOT information to the events, Error: {}".format(
                            traceback.format_exc()))
                    self.write_error("Exception occured while adding the RIOT information of the IP addresses "
                                     "to events. See greynoise_main.log for more details.")

        else:
            logger.error("Please specify exactly one parameter from ip and ip_field with some value.")
            self.write_error("Please specify exactly one parameter from ip and ip_field with some value.")
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field

            logger.info(
                "Started retrieving context information for the IP addresses present in field: {}"
                .format(str(ip_field)))

            try:
                # Strip the spaces from the parameter value if given
                if ip_field:
                    ip_field = ip_field.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(
                        api_key, logger)
                    logger.debug(
                        "API validation status: {}, message: {}".format(
                            api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records,
                                                   ip_field,
                                                   EVENTS_PER_CHUNK,
                                                   logger,
                                                   optimize_requests=False)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.debug(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name=INTEGRATION_NAME)

                if len(chunk_dict) > 0:
                    for event in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key,
                            api_client,
                            'enrich',
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        yield event

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception:
                logger.info(
                    "Exception occured while getting context information for events events, Error: {}"
                    .format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while enriching events with the context information of IP addresses. "
                    "See greynoise_main.log for more details.")