예제 #1
0
    def do_generate(self, api_key, logger):

        query = self.query
        result_size = self.result_size

        logger.info("Started retrieving results for query: {}".format(str(query)))

        if query == '':
            logger.error("Parameter query should not be empty.")
            self.write_error("Parameter query should not be empty.")
            exit(1)
        
        # Strip the spaces from the parameter value if given
        if result_size:
            result_size = result_size.strip()
        
        # Validating the given parameters
        try:
            result_size = validator.Integer(option_name='result_size', minimum=1).validate(result_size)
        except ValueError as e:
            # Validator will throw ValueError with error message when the parameters are not proper
            logger.error(str(e))
            self.write_error(str(e))
            exit(1)

        # Opting timeout of 240 seconds for the request
        api_client = GreyNoise(api_key=api_key, timeout=240)

        logger.info("Fetching results for GNQL query: {}, requested number of results: {}".format(str(query), str(result_size)))
        
        # Keep generating the events till result_size is not reached or all the query results are sent to Splunk
        for event in response_scroller(api_client, logger, query, result_size):
            yield event

        logger.info("Succcessfully retrieved results for the GreyNoise query: {}".format(str(query)))
예제 #2
0
    def do_generate(self, api_key, logger):

        ip_address = self.ip

        try:
            # Strip the spaces from the parameter value if given
            if ip_address:
                ip_address = ip_address.strip()

            logger.info("Initiating to fetch RIOT status for ip: {}".format(
                str(ip_address)))
            # Opting default timout 60 seconds for the request
            api_client = GreyNoise(api_key=api_key,
                                   timeout=60,
                                   integration_name="Splunk")
            context_info = api_client.riot(ip_address)
            logger.info(
                "Successfully retrieved the RIOT status for ip={}".format(
                    str(ip_address)))

            # Process the API response and send the context information of IP with extractions in the Splunk
            results = event_generator.make_valid_event('ip', context_info,
                                                       True)
            yield results

        except ValueError:
            logger.error(
                "IP address: {} doesn\'t match the valid IP format".format(
                    str(ip_address)))
            self.write_error("IP address doesn\'t match the valid IP format")
예제 #3
0
파일: inquire.py 프로젝트: dpdug4n/os_dash
 def gnIPCheck(rIP):
     #https://developer.greynoise.io/docs/using-the-greynoise-community-api
     api_key = os.getenv('greynoise_key')
     api_client = GreyNoise(api_key=api_key)
     utils.rIPRating = []
     utils.rIPDomain = []
     for ip in rIP:
         if ipaddress.ip_address(ip).is_private is False:
             response = api_client.ip(ip)
             if response['seen'] is False:
                 utils.rIPRating.append('unknown')
                 utils.rIPDomain.append('unknown')
             elif response['classification'] == 'malicious':
                 utils.rIPRating.append('High')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             elif response['classification'] == 'unknown':
                 utils.rIPRating.append('Medium')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             elif response['classification'] == 'benign':
                 utils.rIPRating.append('Low')
                 utils.rIPDomain.append(
                     response['metadata']['organization'])
             else:
                 utils.rIPRating.append('error')
                 utils.rIPDomain.append('error')
         else:
             utils.rIPRating.append('Private')
             utils.rIPDomain.append('Local')
예제 #4
0
def main():
    """Run the core."""
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')
    setup_parser = subs.add_parser('setup')
    setup_parser.add_argument('-k',
                              '--api-key',
                              dest='api_key',
                              required=True,
                              help='API key for GreyNoise.',
                              type=str)
    args = parser.parse_args()

    if args.cmd == 'setup':
        if not os.path.exists(CONFIG_PATH):
            os.makedirs(CONFIG_PATH)
        config = CONFIG_DEFAULTS
        config['api_key'] = args.api_key
        with open(CONFIG_FILE, 'w') as conf_file_handle:
            json.dump(config,
                      conf_file_handle,
                      indent=4,
                      separators=(',', ': '))

    config = json.load(open(CONFIG_FILE))
    if config['api_key'] == '':
        raise Exception("Run setup before any other actions!")

    GreyNoise(config['api_key'])
    raise NotImplementedError
    def __init__(self, config):
        super(GreyNoiseBaseAction, self).__init__(config=config)

        gn_api_key = self.config.get('greynoise_api_key', None)
        self.gn_client = GreyNoise(api_key=gn_api_key,
                                   integration_name="greynoise-stackstorm-v" +
                                   PACK_VERSION)
예제 #6
0
def get_greynoise_info_ip():

    ip_lookup = input("[+] 2 IPv4 query  Enter IP Address to query: ")
    
    greynoise_lookup = GreyNoise(api_key=GN_KEY)
    gn_seen_before = greynoise_lookup.quick(ip_lookup)

    print(style.BLUE + f'GreyNoise Output: {gn_seen_before}' + "\n")
예제 #7
0
 def get_api_client(self):
     """Get api client."""
     api_key = get_api_key(self.session_key, self.logger)
     if not api_key:
         self._handle_alert_exit(1)
     return GreyNoise(api_key=api_key,
                      timeout=120,
                      integration_name=INTEGRATION_NAME)
 def __init__(self, context):
     super(SwMain, self).__init__(context)
     self.ip_address = context.inputs["ip_address"]
     self.api_key = context.asset["api_key"]
     self.session = GreyNoise(
         api_key=self.api_key,
         integration_name="greynoise-community-swimlane-" + PLUGIN_VERSION,
         offering="community",
     )
예제 #9
0
    def do_generate(self, api_key, logger):

        query = self.query
        count = self.count

        if query == '':
            logger.error("Parameter query should not be empty.")
            self.write_error("Parameter query should not be empty.")
            exit(1)

        # Strip the spaces from the parameter value if given
        if count:
            count = count.strip()
        # Validating the given parameters
        try:
            count = validator.Integer(option_name='count',
                                      minimum=1).validate(count)
        except ValueError as e:
            # Validator will throw ValueError with error message when the parameters are not proper
            logger.error(str(e))
            self.write_error(str(e))
            exit(1)

        logger.info(
            "Fetching aggregate statistics for query: {}, count: {}".format(
                str(query), count))
        # Opting timout 120 seconds for the requests
        api_client = GreyNoise(api_key=api_key,
                               timeout=240,
                               integration_name="Splunk")
        # If count is not passed explicitely to the command by the user, then it will have the value None
        stats_data = api_client.stats(query, count)
        logger.info(
            "Successfully retrieved response for the aggregate statistics for query: {}, count: {}"
            .format(str(query), count))

        if int(stats_data.get('count', -1)) >= 0:
            results = {}
            results['source'] = 'greynoise'
            results['sourcetype'] = 'greynoise'
            results['_time'] = time.time()
            results['_raw'] = {'results': stats_data}
            yield results
        else:
            response = stats_data.get('message', None) or stats_data.get(
                'error', None)

            if 'bad count' in response or 'bad query' in response:
                logger.error(
                    "Invalid response retrieved from the GreyNoise API for query: {}, response: {}"
                    .format(str(query), str(response)))
                if 'message' in response:
                    event = {'message': response}
                else:
                    event = {'error': response}
                yield event_generator.make_invalid_event('stats', event, True)
예제 #10
0
 def intel(self, type, query, data, conf):
     if type == "ip":
         print("[+] Checking GreyNoise...")
         logging.getLogger("greynoise").setLevel(logging.CRITICAL)
         if conf["GreyNoise"]["api_type"].lower() == "community":
             gn = GreyNoise(
                 api_key=conf["GreyNoise"]["key"],
                 integration_name=
                 "Harpoon (https://github.com/Te-k/harpoon)",
                 offering="community",
             )
             res = gn.ip(query)
             if res["noise"]:
                 data["reports"].append({
                     "url":
                     "https://viz.greynoise.io/ip/{}".format(query),
                     "title":
                     "Seen by GreyNoise as {}".format(res["name"]),
                     "date":
                     None,
                     "source":
                     "GreyNoise",
                 })
         else:
             gn = GreyNoise(
                 api_key=conf["GreyNoise"]["key"],
                 integration_name=
                 "Harpoon (https://github.com/Te-k/harpoon)",
             )
             res = gn.ip(query)
             if res["seen"]:
                 data["reports"].append({
                     "url":
                     "https://viz.greynoise.io/ip/{}".format(query),
                     "title":
                     "Seen by GreyNoise as {}".format(", ".join(
                         res["tags"])),
                     "date":
                     None,
                     "source":
                     "GreyNoise",
                 })
예제 #11
0
파일: gn.py 프로젝트: firebitsbr/harpoon
 def run(self, conf, args, plugins):
     logging.getLogger("greynoise").setLevel(logging.CRITICAL)
     gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     else:
         self.parser.print_help()
예제 #12
0
파일: gn.py 프로젝트: evilcel3ri/harpoon
 def run(self, conf, args, plugins):
     if conf["GreyNoise"]["key"] == "":
         print("You need to set your API key with GreyNoise")
         sys.exit()
     gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     else:
         self.parser.print_help()
예제 #13
0
    def generate(self):
        """Method that yields records to the Splunk processing pipeline."""
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            try:
                api_key = utility.get_api_key(
                    self._metadata.searchinfo.session_key, logger=logger)

                # Completing the search if the API key is not available.
                if not api_key:
                    logger.error(
                        "API key not found. Please configure the GreyNoise App for Splunk."
                    )
                    exit(1)

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=240,
                                       integration_name=INTEGRATION_NAME)

                queries = {
                    "malicious": "classification:malicious last_seen:today",
                    "benign": "classification:benign last_seen:today",
                    "unknown": "classification:unknown last_seen:today"
                }

                for key, value in queries.items():
                    logger.debug(
                        "Fetching records for classification: {}".format(key))
                    stats_data = api_client.stats(value, None)
                    if stats_data.get("stats"):
                        self.handle_stats(stats_data.get("stats"), key)
                    else:
                        logger.error(
                            "Returning no results because of unexpected response in one of the query."
                        )
                        exit(1)

                for result in self.RESULTS:
                    yield result
                logger.info("Events returned successfully to Splunk.")

            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                exit(1)
예제 #14
0
 def run(self, conf, args, plugins):
     logging.getLogger("greynoise").setLevel(logging.CRITICAL)
     if conf["GreyNoise"]["api_type"].lower() == "community":
         gn = GreyNoise(
             api_key=conf["GreyNoise"]["key"],
             integration_name="Harpoon (https://github.com/Te-k/harpoon)",
             offering="community",
         )
     else:
         gn = GreyNoise(
             api_key=conf["GreyNoise"]["key"],
             integration_name="Harpoon (https://github.com/Te-k/harpoon)",
         )
     if args.ip:
         res = gn.ip(args.ip)
         self.print_results(res, args)
     elif args.query:
         res = gn.query(args.query)
         self.print_results(res, args)
     elif args.list:
         res = gn.metadata()
         self.print_results(res, args)
     else:
         self.parser.print_help()
    def run(self):

        if self.data_type == "ip":
            api_key = self.get_param("config.key", None)
            api_type = self.get_param("config.api_type", None)
            if api_type and api_type.lower() == "community":
                api_client = GreyNoise(
                    api_key=api_key,
                    timeout=30,
                    integration_name="greynoise-cortex-analyzer-v3.1",
                    offering="community",
                )
            else:
                api_client = GreyNoise(
                    api_key=api_key,
                    timeout=30,
                    integration_name="greynoise-cortex-analyzer-v3.1",
                )
            try:
                self.report(api_client.ip(self.get_data()))
            except Exception as e:
                self.error("Unable to query GreyNoise API\n{}".format(e))
        else:
            self.notSupported()
    def create_entities(cls, request, response):
        api_key = request.TransformSettings["GNApiKey"]
        api_client = GreyNoise(
            api_key=api_key,
            integration_name="maltego-community-v1.0.0",
            offering="community",
        )
        input_ip = response.addEntity("maltego.IPv4Address", request.Value)
        try:
            resp = api_client.ip(request.Value)
            if resp["noise"] or resp["riot"]:
                if resp["noise"]:
                    response.addEntity("greynoise.noise", "Noise Detected")
                if resp["riot"]:
                    response.addEntity("greynoise.noise",
                                       "Benign Service Detected")
                response.addEntity("maltego.Alias", resp["name"])
                response.addEntity("greynoise.classification",
                                   resp["classification"])
                response.addEntity("maltego.DateTime", resp["last_seen"])
                url = response.addEntity("maltego.URL", resp["link"])
                url.addProperty(
                    fieldName="short-title",
                    displayName="GreyNoise color",
                    value=resp["link"],
                    matchingRule="strict",
                )
                url.addProperty(
                    fieldName="url",
                    displayName="GreyNoise color",
                    value=resp["link"],
                    matchingRule="strict",
                )
            else:
                response.addEntity("greynoise.noise", "No Noise Detected")
                response.addUIMessage(
                    f"The IP address {request.Value} hasn't been seen by GreyNoise."
                )

            add_display_info(
                input_ip,
                resp.get("classification"),
                resp.get("last_seen"),
                resp.get("link"),
                resp.get("name"),
            )
        except Exception as e:
            response.addUIMessage(e)
예제 #17
0
파일: gn.py 프로젝트: firebitsbr/harpoon
 def intel(self, type, query, data, conf):
     if type == "ip":
         print("[+] Checking GreyNoise...")
         logging.getLogger("greynoise").setLevel(logging.CRITICAL)
         gn = GreyNoise(api_key=conf["GreyNoise"]["key"])
         res = gn.ip(query)
         if res["seen"]:
             data["reports"].append({
                 "url":
                 "https://viz.greynoise.io/ip/{}".format(query),
                 "title":
                 "Seen by GreyNoise as {}".format(", ".join(res["tags"])),
                 "date":
                 None,
                 "source":
                 "GreyNoise"
             })
예제 #18
0
    def run(self, params={}):
        gn_client = GreyNoise(
            api_server=self.connection.server,
            api_key=self.connection.api_key,
            integration_name=self.connection.user_agent,
            offering="community",
        )
        try:
            resp = gn_client.ip(params.get(Input.IP_ADDRESS))
            if resp["noise"] or resp["riot"]:
                resp["last_seen"] = pendulum.parse(
                    resp["last_seen"]).to_rfc3339_string()

        except RequestFailure as e:
            raise GNRequestFailure(e.args[0], e.args[1])

        except ValueError as e:
            raise GNValueError(e.args[0])

        return resp
예제 #19
0
def validate_api_key(api_key, logger=None):
    """
    Validate the API key using the actual lightweight call to the GreyNoise API.

    Returns false only when 401 code is thrown, indicating the unauthorised access.
    :param api_key:
    :param logger:
    """
    if logger:
        logger.debug("Validating the api key...")

    try:
        api_client = GreyNoise(api_key=api_key,
                               timeout=120,
                               integration_name=INTEGRATION_NAME)
        api_client.test_connection()
        return (True, 'API key is valid')

    except RateLimitError:
        msg = "RateLimitError occured, please contact the Administrator"
        return (False, 'API key not validated, Error: {}'.format(msg))
    except RequestFailure as e:
        response_code, response_message = e.args
        if response_code == 401:
            return (False, 'Unauthorized. Please check your API key.')
        else:
            # Need to handle this, as splunklib is unable to handle the exception with
            # (400, {'error': 'error_reason'}) format
            msg = ("The API call to the GreyNoise API has failed "
                   "with status_code: {} and error: {}").format(
                       response_code, response_message['error'] if isinstance(
                           response_message, dict) else response_message)
            return (False, 'API key not validated, Error: {}'.format(msg))
    except ConnectionError:
        msg = "ConnectionError occured, please check your connection and try again."
        return (False, 'API key not validated, Error: {}'.format(msg))
    except RequestException:
        msg = "An ambiguous exception occured, please try again."
        return (False, 'API key not validated, Error: {}'.format(msg))
    except Exception as e:
        return (False, 'API key not validated, Error: {}'.format(str(e)))
예제 #20
0
    def do_generate(self, api_key, logger):
        """
        Method to fetch the api response and process and send the response with extractions in the Splunk.

        :param api_key: GreyNoise API Key.
        :logger: logger object.
        """
        ip_address = self.ip

        try:
            # Strip the spaces from the parameter value if given
            if ip_address:
                ip_address = ip_address.strip()

            logger.info(
                "Initiating to fetch context information for ip: {}".format(
                    str(ip_address)))
            # Opting default timout 60 seconds for the request
            api_client = GreyNoise(api_key=api_key,
                                   timeout=60,
                                   integration_name=INTEGRATION_NAME)
            session_key = self._metadata.searchinfo.session_key
            context_info = get_response_for_generating(session_key, api_client,
                                                       ip_address, 'ip',
                                                       logger)
            logger.info(
                "Successfully retrieved the context information for ip={}".
                format(str(ip_address)))

            # Process the API response and send the context information of IP with extractions in the Splunk
            results = event_generator.make_valid_event('ip', context_info,
                                                       True)
            yield results

        except ValueError as e:
            error_msg = str(e).split(":")
            logger.error(e)
            self.write_error(error_msg[0])
예제 #21
0
def lkup_sus_ip_address(susp_addr):
    """Find RDNS of IP address and return info using GreyNoise API.

    Args:
        susp_addr: Suspect IPv4 address.
    Returns:
        Domain (if found) & GreyNoise Output (if found).
    Raises:
        Error: if susp_addr domain name cannot be found.
    """
    try:
        rev = dns.reversename.from_address(susp_addr)
        output = str(dns.resolver.query(rev, 'PTR')[0])

        api_client = GreyNoise(api_key="", timeout=15)
        bring_the_noise = api_client.ip(susp_addr)

        print("Found domain: {}".format(output))
        print('*' * 80)
        print(bring_the_noise)

    except dns.resolver.NXDOMAIN as e:
        print(e)
예제 #22
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_addresses = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 5000
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if ip_addresses and ip_field:
            logger.error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command."
            )
            self.write_error(
                "Please use parameter ip to work gnquick as generating command or "
                "use parameter ip_field to work gnquick as transforming command"
            )
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(
                self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error(
                "Error occured while retrieving API key, Error: {}".format(
                    message))
            exit(1)

        if ip_addresses and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Splitting the ip_addresses by commas and stripping spaces from both the sides for each IP address
            ip_addresses = [ip.strip() for ip in ip_addresses.split(',')]

            logger.info("Started retrieving results")
            try:
                logger.debug(
                    "Initiating to fetch noise and RIOT status for IP address(es): {}"
                    .format(str(ip_addresses)))

                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       integration_name=INTEGRATION_NAME)

                # CACHING START
                cache_enabled, cache_client = utility.get_caching(
                    self._metadata.searchinfo.session_key, 'multi', logger)
                if int(cache_enabled) == 1 and cache_client is not None:
                    cache_start = time.time()
                    ips_not_in_cache, ips_in_cache = utility.get_ips_not_in_cache(
                        cache_client, ip_addresses, logger)
                    try:
                        response = []
                        if len(ips_in_cache) >= 1:
                            response = cache_client.query_kv_store(
                                ips_in_cache)
                        if response is None:
                            logger.debug(
                                "KVStore is not ready. Skipping caching mechanism."
                            )
                            noise_status = api_client.quick(ip_addresses)
                        elif response == []:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client, ip_addresses,
                                logger)
                        else:
                            noise_status = utility.fetch_response_from_api(
                                api_client.quick, cache_client,
                                ips_not_in_cache, logger)
                            noise_status.extend(response)
                    except Exception:
                        logger.debug(
                            "An exception occurred while fetching response from cache.\n{}"
                            .format(traceback.format_exc()))
                    logger.debug(
                        "Generating command with caching took {} seconds.".
                        format(time.time() - cache_start))
                else:
                    # Opting timout 120 seconds for the requests
                    noise_status = api_client.quick(ip_addresses)
                logger.info("Retrieved results successfully")
                # CACHING END

                # Process the API response and send the noise and RIOT status information of IP with extractions
                # to the Splunk, Using this flag to handle the field extraction issue in custom commands
                # Only the fields extracted from the first event of generated by custom command
                # will be extracted from all events
                first_record_flag = True

                # Flag to indicate whether erroneous IPs are present
                erroneous_ip_present = False
                for ip in ip_addresses:
                    for sample in noise_status:
                        if ip == sample['ip']:
                            yield event_generator.make_valid_event(
                                'quick', sample, first_record_flag)
                            if first_record_flag:
                                first_record_flag = False
                            logger.debug(
                                "Fetched noise and RIOT status for ip={} from GreyNoise API"
                                .format(str(ip)))
                            break
                    else:
                        erroneous_ip_present = True
                        try:
                            validate_ip(ip, strict=True)
                        except ValueError as e:
                            error_msg = str(e).split(":")
                            logger.debug(
                                "Generating noise and RIOT status for ip={} manually"
                                .format(str(ip)))
                            event = {'ip': ip, 'error': error_msg[0]}
                            yield event_generator.make_invalid_event(
                                'quick', event, first_record_flag)

                            if first_record_flag:
                                first_record_flag = False

                if erroneous_ip_present:
                    logger.warn(
                        "Value of one or more IP address(es) is either invalid or non-routable"
                    )
                    self.write_warning(
                        "Value of one or more IP address(es) passed to {command_name} "
                        "is either invalid or non-routable".format(
                            command_name=str(
                                self._metadata.searchinfo.command)))

            except RateLimitError:
                logger.error(
                    "Rate limit error occured while fetching the context information for ips={}"
                    .format(str(ip_addresses)))
                self.write_error(
                    "The Rate Limit has been exceeded. Please contact the Administrator"
                )
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = (
                        "The API call to the GreyNoise platform have been failed "
                        "with status_code: {} and error: {}").format(
                            response_code,
                            response_message['error'] if isinstance(
                                response_message, dict) else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
                self.write_error(
                    "Error while connecting to the Server. Please check your connection and try again."
                )
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again."
                )
            except Exception:
                logger.error("Exception: {} ".format(
                    str(traceback.format_exc())))
                self.write_error(
                    "Exception occured while fetching the noise and RIOT status of the IP address(es). "
                    "See greynoise_main.log for more details.")

        elif ip_field:
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:

                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(
                            option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(
                            api_key, logger)
                        logger.debug(
                            "API validation status: {}, message: {}".format(
                                api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    chunk_dict = event_generator.batch(records, ip_field,
                                                       EVENTS_PER_CHUNK,
                                                       logger)

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.info(
                            "Less then 1000 distinct IPs are present, "
                            "optimizing the IP requests call to GreyNoise API..."
                        )
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(api_key=api_key,
                                           timeout=120,
                                           use_cache=USE_CACHE,
                                           integration_name=INTEGRATION_NAME)
                    # When no records found, batch will return {0:([],[])}
                    tot_time_start = time.time()
                    if len(list(chunk_dict.values())[0][0]) >= 1:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key,
                                api_client,
                                'multi',
                                ip_field,
                                chunk_dict,
                                logger,
                                threads=THREADS):
                            yield event
                    else:
                        logger.info(
                            "No events found, please increase the search timespan to have more search results."
                        )
                    tot_time_end = time.time()
                    logger.debug(
                        "Total execution time => {}".format(tot_time_end -
                                                            tot_time_start))
                except Exception:
                    logger.info(
                        "Exception occured while adding the noise and RIOT status to the events, Error: {}"
                        .format(traceback.format_exc()))
                    self.write_error(
                        "Exception occured while adding the noise and RIOT status of "
                        "the IP addresses to events. See greynoise_main.log for more details."
                    )

        else:
            logger.error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
            self.write_error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
예제 #23
0
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        ip_address = self.ip
        ip_field = self.ip_field
        api_key = ""
        EVENTS_PER_CHUNK = 1
        THREADS = 3
        USE_CACHE = False
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key, log_context=self._metadata.searchinfo.command)

        if ip_address and ip_field:
            logger.error("Please use parameter ip to work gnriot as generating command or "
                         "use parameter ip_field to work gnriot as transforming command.")
            self.write_error("Please use parameter ip to work gnriot as generating command or "
                             "use parameter ip_field to work gnriot as transforming command")
            exit(1)

        try:
            message = ''
            api_key = utility.get_api_key(self._metadata.searchinfo.session_key, logger=logger)
        except APIKeyNotFoundError as e:
            message = str(e)
        except HTTPError as e:
            message = str(e)

        if message:
            self.write_error(message)
            logger.error("Error occured while retrieving API key, Error: {}".format(message))
            exit(1)

        if ip_address and not ip_field:
            # This peice of code will work as generating command and will not use the Splunk events.
            # Strip the spaces from the parameter value if given
            ip_address = ip_address.strip()

            logger.info("Started retrieving results")
            try:
                logger.debug("Initiating to fetch RIOT information for IP address: {}".format(str(ip_address)))
                api_client = GreyNoise(api_key=api_key, timeout=120, integration_name=INTEGRATION_NAME)
                # Opting timout 120 seconds for the requests
                session_key = self._metadata.searchinfo.session_key
                riot_information = utility.get_response_for_generating(
                    session_key, api_client, ip_address, 'greynoise_riot', logger)
                logger.info("Retrieved results successfully")

                # Process the API response and send the riot information of IP with extractions to the Splunk
                yield event_generator.make_valid_event('riot', riot_information, True)
                logger.debug("Fetched RIOT information for ip={} from GreyNoise API".format(str(ip_address)))

            except ValueError as e:
                error_msg = str(e).split(":")
                logger.debug("Generating RIOT information for ip={} manually".format(str(ip_address)))
                event = {
                    'ip': ip_address,
                    'error': error_msg[0]
                }
                yield event_generator.make_invalid_event('riot', event, True)
                logger.warn(error_msg)
                self.write_warning(
                    "Value of IP address passed to {command_name} is either invalid or non-routable".format(
                        command_name=str(self._metadata.searchinfo.command)))
            except RateLimitError:
                logger.error("Rate limit error occured while fetching the context information for ip={}".format(
                    str(ip_address)))
                self.write_error("The Rate Limit has been exceeded. Please contact the Administrator")
            except RequestFailure as e:
                response_code, response_message = e.args
                if response_code == 401:
                    msg = "Unauthorized. Please check your API key."
                else:
                    # Need to handle this, as splunklib is unable to handle the exception with
                    # (400, {'error': 'error_reason'}) format
                    msg = ("The API call to the GreyNoise platform have been failed "
                           "with status_code: {} and error: {}").format(
                        response_code, response_message['error'] if isinstance(response_message, dict)
                        else response_message)

                logger.error("{}".format(str(msg)))
                self.write_error(msg)
            except ConnectionError:
                logger.error("Error while connecting to the Server. Please check your connection and try again.")
                self.write_error("Error while connecting to the Server. Please check your connection and try again.")
            except RequestException:
                logger.error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
                self.write_error(
                    "There was an ambiguous exception that occurred while handling your Request. Please try again.")
            except Exception:
                logger.error("Exception: {} ".format(str(traceback.format_exc())))
                self.write_error("Exception occured while fetching the RIOT information of the IP address. "
                                 "See greynoise_main.log for more details.")

        elif ip_field:

            logger.info("Started retrieving RIOT information for the IP addresses present in field: {}".format(
                str(ip_field)))
            # Enter the mechanism only when the Search is complete and all the events are available
            if self.search_results_info and not self.metadata.preview:
                try:
                    # Strip the spaces from the parameter value if given
                    ip_field = ip_field.strip()
                    # Validating the given parameter
                    try:
                        ip_field = validator.Fieldname(option_name='ip_field').validate(ip_field)
                    except ValueError as e:
                        # Validator will throw ValueError with error message when the parameters are not proper
                        logger.error(str(e))
                        self.write_error(str(e))
                        exit(1)

                    # API key validation
                    if not self.api_validation_flag:
                        api_key_validation, message = utility.validate_api_key(api_key, logger)
                        logger.debug("API validation status: {}, message: {}".format(api_key_validation, str(message)))
                        self.api_validation_flag = True
                        if not api_key_validation:
                            logger.info(message)
                            self.write_error(message)
                            exit(1)

                    # This piece of code will work as transforming command and will use
                    # the Splunk ingested events and field which is specified in ip_field.
                    # divide the records in the form of dict of tuples having chunk_index as key
                    # {<index>: (<records>, <All the ips in records>)}
                    chunk_dict = event_generator.batch(
                        records, ip_field, EVENTS_PER_CHUNK, logger, optimize_requests=False)
                    logger.debug("Successfully divided events into chunks")

                    # This means there are only 1000 or below IPs to call in the entire bunch of records
                    # Use one thread with single thread with caching mechanism enabled for the chunk
                    if len(chunk_dict) == 1:
                        logger.debug("Less then 1000 distinct IPs are present, "
                                     "optimizing the IP requests call to GreyNoise API...")
                        THREADS = 1
                        USE_CACHE = True

                    api_client = GreyNoise(
                        api_key=api_key, timeout=120, use_cache=USE_CACHE, integration_name=INTEGRATION_NAME)

                    # When no records found, batch will return {0:([],[])}
                    if len(chunk_dict) > 0:
                        for event in event_generator.get_all_events(
                                self._metadata.searchinfo.session_key, api_client, 'greynoise_riot', ip_field,
                                chunk_dict, logger, threads=THREADS):
                            yield event

                        logger.info("Successfully sent all the results to the Splunk")
                    else:
                        logger.info("No events found, please increase the search timespan to have more search results.")
                except Exception:
                    logger.info(
                        "Exception occured while adding the RIOT information to the events, Error: {}".format(
                            traceback.format_exc()))
                    self.write_error("Exception occured while adding the RIOT information of the IP addresses "
                                     "to events. See greynoise_main.log for more details.")

        else:
            logger.error("Please specify exactly one parameter from ip and ip_field with some value.")
            self.write_error("Please specify exactly one parameter from ip and ip_field with some value.")
예제 #24
0
 def connect(self, params):
     self.api_key = params.get("credentials").get("secretKey", "")
     self.server = "https://api.greynoise.io"
     self.user_agent = f"rapid7-insightconnect-v{self.meta.version}"
     self.gn_client = GreyNoise(api_server=self.server, api_key=self.api_key, integration_name=self.user_agent)
     self.logger.info("Connect: Connecting...")
예제 #25
0
파일: ip.py 프로젝트: evilcel3ri/harpoon
    def run(self, conf, args, plugins):
        if "subcommand" in args:
            if args.subcommand == "info":
                if not is_ip(unbracket(args.IP)):
                    print("Invalid IP address")
                    sys.exit(1)
                # FIXME: move code here in a library
                ip = unbracket(args.IP)
                try:
                    ipy = IP(ip)
                except ValueError:
                    print("Invalid IP format, quitting...")
                    return
                ipinfo = self.ipinfo(ip)
                print("MaxMind: Located in %s, %s" %
                      (ipinfo["city"], ipinfo["country"]))
                if ipinfo["asn"] == 0:
                    print("MaxMind: IP not found in the ASN database")
                else:
                    print("MaxMind: ASN%i, %s" %
                          (ipinfo["asn"], ipinfo["asn_name"]))
                    print("CAIDA Type: %s" % ipinfo["asn_type"])
                try:
                    asndb2 = pyasn.pyasn(self.asncidr)
                    res = asndb2.lookup(ip)
                except OSError:
                    print("Configuration files are not available")
                    print("Please run harpoon update before using harpoon")
                    sys.exit(1)
                if res[1] is None:
                    print("IP not found in ASN database")
                else:
                    # Search for name
                    f = open(self.asnname, "r")
                    found = False
                    line = f.readline()
                    name = ""
                    while not found and line != "":
                        s = line.split("|")
                        if s[0] == str(res[0]):
                            name = s[1].strip()
                            found = True
                        line = f.readline()

                    print("ASN %i - %s (range %s)" % (res[0], name, res[1]))
                if ipinfo["hostname"] != "":
                    print("Hostname: %s" % ipinfo["hostname"])
                if ipinfo["specific"] != "":
                    print("Specific: %s" % ipinfo["specific"])
                if ipy.iptype() == "PRIVATE":
                    "Private IP"
                print("")
                if ipy.version() == 4:
                    print("Censys:\t\thttps://censys.io/ipv4/%s" % ip)
                    print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip)
                    print("IP Info:\thttp://ipinfo.io/%s" % ip)
                    print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip)
                    print(
                        "IP Location:\thttps://www.iplocation.net/?query=%s" %
                        ip)
            elif args.subcommand == "intel":
                if not is_ip(unbracket(args.IP)):
                    print("Invalid IP address")
                    sys.exit(1)
                # Start with MISP and OTX to get Intelligence Reports
                print("###################### %s ###################" %
                      unbracket(args.IP))
                passive_dns = []
                urls = []
                malware = []
                files = []
                # MISP
                misp_e = plugins["misp"].test_config(conf)
                if misp_e:
                    print("[+] Downloading MISP information...")
                    server = ExpandedPyMISP(conf["Misp"]["url"],
                                            conf["Misp"]["key"])
                    misp_results = server.search("attributes",
                                                 value=unbracket(args.IP))
                # Binary Edge
                be_e = plugins["binaryedge"].test_config(conf)
                if be_e:
                    try:
                        print("[+] Downloading BinaryEdge information...")
                        be = BinaryEdge(conf["BinaryEdge"]["key"])
                        # FIXME: this only get the first page
                        res = be.domain_ip(unbracket(args.IP))
                        for d in res["events"]:
                            passive_dns.append({
                                "domain":
                                d["domain"],
                                "first":
                                parse(d["updated_at"]).astimezone(pytz.utc),
                                "last":
                                parse(d["updated_at"]).astimezone(pytz.utc),
                                "source":
                                "BinaryEdge",
                            })
                    except BinaryEdgeException:
                        print(
                            "BinaryEdge request failed, you need a paid subscription"
                        )
                # OTX
                otx_e = plugins["otx"].test_config(conf)
                if otx_e:
                    print("[+] Downloading OTX information....")
                    otx = OTXv2(conf["AlienVaultOtx"]["key"])
                    res = otx.get_indicator_details_full(
                        IndicatorTypes.IPv4, unbracket(args.IP))
                    otx_pulses = res["general"]["pulse_info"]["pulses"]
                    # Get Passive DNS
                    if "passive_dns" in res:
                        for r in res["passive_dns"]["passive_dns"]:
                            passive_dns.append({
                                "domain":
                                r["hostname"],
                                "first":
                                parse(r["first"]).astimezone(pytz.utc),
                                "last":
                                parse(r["last"]).astimezone(pytz.utc),
                                "source":
                                "OTX",
                            })
                    if "url_list" in res:
                        for r in res["url_list"]["url_list"]:
                            if "result" in r:
                                urls.append({
                                    "date":
                                    parse(r["date"]).astimezone(pytz.utc),
                                    "url":
                                    r["url"],
                                    "ip":
                                    r["result"]["urlworker"]["ip"] if "ip"
                                    in r["result"]["urlworker"] else "",
                                    "source":
                                    "OTX",
                                })
                            else:
                                urls.append({
                                    "date":
                                    parse(r["date"]).astimezone(pytz.utc),
                                    "url":
                                    r["url"],
                                    "ip":
                                    "",
                                    "source":
                                    "OTX",
                                })
                # RobTex
                print("[+] Downloading Robtex information....")
                rob = Robtex()
                try:
                    res = rob.get_ip_info(unbracket(args.IP))
                except RobtexError:
                    print("Error with Robtex")
                else:
                    for d in ["pas", "pash", "act", "acth"]:
                        if d in res:
                            for a in res[d]:
                                passive_dns.append({
                                    "first":
                                    a["date"].astimezone(pytz.utc),
                                    "last":
                                    a["date"].astimezone(pytz.utc),
                                    "domain":
                                    a["o"],
                                    "source":
                                    "Robtex",
                                })

                # PT
                pt_e = plugins["pt"].test_config(conf)
                if pt_e:
                    out_pt = False
                    print("[+] Downloading Passive Total information....")
                    client = DnsRequest(conf["PassiveTotal"]["username"],
                                        conf["PassiveTotal"]["key"])
                    try:
                        raw_results = client.get_passive_dns(
                            query=unbracket(args.IP))
                        if "results" in raw_results:
                            for res in raw_results["results"]:
                                passive_dns.append({
                                    "first":
                                    parse(res["firstSeen"]).astimezone(
                                        pytz.utc),
                                    "last":
                                    parse(res["lastSeen"]).astimezone(
                                        pytz.utc),
                                    "domain":
                                    res["resolve"],
                                    "source":
                                    "PT",
                                })
                        if "message" in raw_results:
                            if "quota_exceeded" in raw_results["message"]:
                                print("Quota exceeded for Passive Total")
                                out_pt = True
                                pt_osint = {}
                    except requests.exceptions.ReadTimeout:
                        print("Timeout on Passive Total requests")
                    if not out_pt:
                        try:
                            client2 = EnrichmentRequest(
                                conf["PassiveTotal"]["username"],
                                conf["PassiveTotal"]["key"],
                            )
                            # Get OSINT
                            # TODO: add PT projects here
                            pt_osint = client2.get_osint(
                                query=unbracket(args.IP))
                            # Get malware
                            raw_results = client2.get_malware(
                                query=unbracket(args.IP))
                            if "results" in raw_results:
                                for r in raw_results["results"]:
                                    malware.append({
                                        "hash":
                                        r["sample"],
                                        "date":
                                        parse(r["collectionDate"]),
                                        "source":
                                        "PT (%s)" % r["source"],
                                    })
                        except requests.exceptions.ReadTimeout:
                            print("Timeout on Passive Total requests")
                # Urlhaus
                uh_e = plugins["urlhaus"].test_config(conf)
                if uh_e:
                    print("[+] Checking urlhaus data...")
                    try:
                        urlhaus = UrlHaus(conf["UrlHaus"]["key"])
                        res = urlhaus.get_host(unbracket(args.IP))
                    except UrlHausError:
                        print("Error with the query")
                    else:
                        if "urls" in res:
                            for r in res["urls"]:
                                urls.append({
                                    "date":
                                    parse(r["date_added"]).astimezone(
                                        pytz.utc),
                                    "url":
                                    r["url"],
                                    "source":
                                    "UrlHaus",
                                })
                # VT
                vt_e = plugins["vt"].test_config(conf)
                if vt_e:
                    if conf["VirusTotal"]["type"] != "public":
                        print("[+] Downloading VT information....")
                        vt = PrivateApi(conf["VirusTotal"]["key"])
                        res = vt.get_ip_report(unbracket(args.IP))
                        if "results" in res:
                            if "resolutions" in res["results"]:
                                for r in res["results"]["resolutions"]:
                                    passive_dns.append({
                                        "first":
                                        parse(r["last_resolved"]).astimezone(
                                            pytz.utc),
                                        "last":
                                        parse(r["last_resolved"]).astimezone(
                                            pytz.utc),
                                        "domain":
                                        r["hostname"],
                                        "source":
                                        "VT",
                                    })
                            if "undetected_downloaded_samples" in res[
                                    "results"]:
                                for r in res["results"][
                                        "undetected_downloaded_samples"]:
                                    files.append({
                                        "hash": r["sha256"],
                                        "date": parse(r["date"]),
                                        "source": "VT",
                                    })
                            if "undetected_referrer_samples" in res["results"]:
                                for r in res["results"][
                                        "undetected_referrer_samples"]:
                                    if "date" in r:
                                        files.append({
                                            "hash": r["sha256"],
                                            "date": parse(r["date"]),
                                            "source": "VT",
                                        })
                                    else:
                                        # FIXME : should consider data without dates
                                        files.append({
                                            "hash":
                                            r["sha256"],
                                            "date":
                                            datetime.datetime(1970, 1, 1),
                                            "source":
                                            "VT",
                                        })
                            if "detected_downloaded_samples" in res["results"]:
                                for r in res["results"][
                                        "detected_downloaded_samples"]:
                                    malware.append({
                                        "hash": r["sha256"],
                                        "date": parse(r["date"]),
                                        "source": "VT",
                                    })
                            if "detected_referrer_samples" in res["results"]:
                                for r in res["results"][
                                        "detected_referrer_samples"]:
                                    if "date" in r:
                                        malware.append({
                                            "hash": r["sha256"],
                                            "date": parse(r["date"]),
                                            "source": "VT",
                                        })
                    else:
                        vt_e = False

                print("[+] Downloading GreyNoise information....")
                gn = GreyNoise(conf["GreyNoise"]["key"])
                if gn == "":
                    print("Greynoise API key is not set up.")
                greynoise = gn.ip(unbracket(args.IP))

                tg_e = plugins["threatgrid"].test_config(conf)
                if tg_e:
                    print("[+] Downloading Threat Grid....")
                    try:
                        tg = ThreatGrid(conf["ThreatGrid"]["key"])
                        res = tg.search_samples(unbracket(args.IP), type="ip")
                        already = []
                        if "items" in res:
                            for r in res["items"]:
                                if r["sample_sha256"] not in already:
                                    d = parse(r["ts"])
                                    d = d.replace(tzinfo=None)
                                    malware.append({
                                        "hash": r["sample_sha256"],
                                        "date": d,
                                        "source": "TG",
                                    })
                                    already.append(r["sample_sha256"])
                    except ThreatGridError as e:
                        print("Error with threat grid: {}".format(e.message))

                # ThreatMiner
                print("[+] Downloading ThreatMiner....")
                tm = ThreatMiner()
                response = tm.get_report(unbracket(args.IP))
                if response["status_code"] == "200":
                    tmm = response["results"]
                else:
                    tmm = []
                    if response["status_code"] != "404":
                        print("Request to ThreatMiner failed: {}".format(
                            response["status_message"]))
                response = tm.get_related_samples(unbracket(args.IP))
                if response["status_code"] == "200":
                    for r in response["results"]:
                        malware.append({
                            "hash": r,
                            "date": None,
                            "source": "ThreatMiner"
                        })

                print("----------------- Intelligence Report")
                ctor = CommandTor()
                tor_list = ctor.get_list()
                if tor_list:
                    if unbracket(args.IP) in tor_list:
                        print("{} is a Tor Exit node".format(unbracket(
                            args.IP)))
                else:
                    print("Impossible to reach the Tor Exit Node list")
                if otx_e:
                    if len(otx_pulses):
                        print("OTX:")
                        for p in otx_pulses:
                            print("- %s (%s - %s)" % (
                                p["name"],
                                p["created"][:10],
                                "https://otx.alienvault.com/pulse/" + p["id"],
                            ))
                    else:
                        print("OTX: Not found in any pulse")
                if misp_e:
                    if len(misp_results["Attribute"]) > 0:
                        print("MISP:")
                        for event in misp_results["Attribute"]:
                            print("- {} - {}".format(event["Event"]["id"],
                                                     event["Event"]["info"]))
                if len(greynoise) > 0:
                    print("GreyNoise: IP identified as")
                    for key, value in greynoise.items():
                        print(key, "->", value)
                else:
                    print("GreyNoise: Not found")
                if pt_e:
                    if "results" in pt_osint:
                        if len(pt_osint["results"]):
                            if len(pt_osint["results"]) == 1:
                                if "name" in pt_osint["results"][0]:
                                    print("PT: %s %s" % (
                                        pt_osint["results"][0]["name"],
                                        pt_osint["results"][0]["sourceUrl"],
                                    ))
                                else:
                                    print("PT: %s" %
                                          pt_osint["results"][0]["sourceUrl"])
                            else:
                                print("PT:")
                                for r in pt_osint["results"]:
                                    if "name" in r:
                                        print("-%s %s" %
                                              (r["name"], r["sourceUrl"]))
                                    else:
                                        print("-%s" % r["sourceUrl"])
                        else:
                            print("PT: Nothing found!")
                    else:
                        print("PT: Nothing found!")
                # ThreatMiner
                if len(tmm) > 0:
                    print("ThreatMiner:")
                    for r in tmm:
                        print("- {} {} - {}".format(r["year"], r["filename"],
                                                    r["URL"]))

                if len(malware) > 0:
                    print("----------------- Malware")
                    for r in malware:
                        print("[%s] %s %s" % (
                            r["source"],
                            r["hash"],
                            r["date"].strftime("%Y-%m-%d")
                            if r["date"] else "",
                        ))
                if len(files) > 0:
                    print("----------------- Files")
                    for r in sorted(files, key=lambda x: x["date"]):
                        print("[%s] %s %s" % (r["source"], r["hash"],
                                              r["date"].strftime("%Y-%m-%d")))
                if len(passive_dns) > 0:
                    print("----------------- Passive DNS")
                    for r in sorted(passive_dns,
                                    key=lambda x: x["first"],
                                    reverse=True):
                        print("[+] %-40s (%s -> %s)(%s)" % (
                            r["domain"],
                            r["first"].strftime("%Y-%m-%d"),
                            r["last"].strftime("%Y-%m-%d"),
                            r["source"],
                        ))
                if len(urls) > 0:
                    print("----------------- Urls")
                    for r in sorted(urls,
                                    key=lambda x: x["date"],
                                    reverse=True):
                        print("[%s] %s - %s" %
                              (r["source"], r["url"],
                               r["date"].strftime("%Y-%m-%d")))
            else:
                self.parser.print_help()
        else:
            self.parser.print_help()
예제 #26
0
from greynoise import GreyNoise
api_client = GreyNoise(api_key=<api_key>, timeout=<timeout_in_seconds>)

api_client.ip('58.220.219.247')
{
  "ip": "58.220.219.247",
  "seen": true,
  "classification": "malicious",
  "first_seen": "2019-04-04",
  "last_seen": "2019-08-21",
  "actor": "unknown",
  "tags": [
    "MSSQL Bruteforcer",
    "MSSQL Scanner",
    "RDP Scanner"
  ],
  "metadata": {
    "country": "China",
    "country_code": "CN",
    "city": "Kunshan",
    "organization": "CHINANET jiangsu province network",
    "asn": "AS4134",
    "tor": false,
    "os": "Windows 7/8",
    "category": "isp"
  },
  "raw_data": {
    "scan": [
      {
        "port": 1433,
        "protocol": "TCP"
    def transform(self, records):
        """Method that processes and yield event records to the Splunk events pipeline."""
        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field

            logger.info(
                "Started retrieving context information for the IP addresses present in field: {}"
                .format(str(ip_field)))

            try:
                # Strip the spaces from the parameter value if given
                if ip_field:
                    ip_field = ip_field.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                if not self.api_validation_flag:
                    api_key_validation, message = utility.validate_api_key(
                        api_key, logger)
                    logger.debug(
                        "API validation status: {}, message: {}".format(
                            api_key_validation, str(message)))
                    self.api_validation_flag = True
                    if not api_key_validation:
                        logger.info(message)
                        self.write_error(message)
                        exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records,
                                                   ip_field,
                                                   EVENTS_PER_CHUNK,
                                                   logger,
                                                   optimize_requests=False)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.debug(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name=INTEGRATION_NAME)

                if len(chunk_dict) > 0:
                    for event in event_generator.get_all_events(
                            self._metadata.searchinfo.session_key,
                            api_client,
                            'enrich',
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        yield event

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception:
                logger.info(
                    "Exception occured while getting context information for events events, Error: {}"
                    .format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while enriching events with the context information of IP addresses. "
                    "See greynoise_main.log for more details.")
    def create_entities(cls, request: MaltegoMsg, response):
        api_key = request.TransformSettings["GNApiKey"]
        api_client = GreyNoise(
            api_key=api_key,
            integration_name="maltego-community-v1.0.0",
            offering="community",
        )

        # make a precise copy of the input to avoid creating a new graph entity
        type_name = "maltego.IPv4Address"
        extra_props = {}
        if request.Genealogy:
            type_name = request.Genealogy[0]["Name"]
            extra_props = request.Properties
        input_ip = response.addEntity(type_name, request.Value)
        for k, v in extra_props.items():
            input_ip.addProperty(fieldName=k, value=v, matchingRule="loose")

        try:
            resp = api_client.ip(request.Value)
            if resp["noise"] or resp["riot"]:
                if resp["noise"]:
                    response.addEntity("greynoise.noise", "Noise Detected")
                if resp["riot"]:
                    response.addEntity("greynoise.noise",
                                       "Benign Service Detected")

                if resp["name"] != "unknown":
                    response.addEntity("maltego.Organization", resp["name"])

                response.addEntity("greynoise.classification",
                                   resp["classification"])

                # add dynamic properties instead of returning more to the graph
                input_ip.addProperty(
                    fieldName="gn_url",
                    displayName="GreyNoise URL",
                    value=resp["link"],
                    matchingRule="loose",
                )
                input_ip.addProperty(
                    fieldName="gn_last_seen",
                    displayName="GreyNoise last seen",
                    value=resp["last_seen"],
                    matchingRule="loose",
                )
            else:
                response.addEntity("greynoise.noise", "No Noise Detected")
                response.addUIMessage(
                    f"The IP address {request.Value} hasn't been seen by GreyNoise."
                )

            add_display_info(
                input_ip,
                resp.get("classification"),
                resp.get("last_seen"),
                resp.get("link"),
                resp.get("name"),
            )
        except Exception as e:
            response.addUIMessage(e)
예제 #29
0
    def transform(self, records):

        method = 'filter'

        # Setup logger
        logger = utility.setup_logger(
            session_key=self._metadata.searchinfo.session_key,
            log_context=self._metadata.searchinfo.command)

        # Enter the mechanism only when the Search is complete and all the events are available
        if self.search_results_info and not self.metadata.preview:

            EVENTS_PER_CHUNK = 1000
            THREADS = 3
            USE_CACHE = False
            ip_field = self.ip_field
            noise_events = self.noise_events

            logger.info(
                "Started filtering the IP address(es) present in field: {}, with noise_status: {}"
                .format(str(ip_field), str(noise_events)))

            try:
                if ip_field:
                    ip_field = ip_field.strip()
                if noise_events:
                    noise_events = noise_events.strip()
                # Validating the given parameters
                try:
                    ip_field = validator.Fieldname(
                        option_name='ip_field').validate(ip_field)
                    noise_events = validator.Boolean(
                        option_name='noise_events').validate(noise_events)
                except ValueError as e:
                    # Validator will throw ValueError with error message when the parameters are not proper
                    logger.error(str(e))
                    self.write_error(str(e))
                    exit(1)

                try:
                    message = ''
                    api_key = utility.get_api_key(
                        self._metadata.searchinfo.session_key, logger=logger)
                except APIKeyNotFoundError as e:
                    message = str(e)
                except HTTPError as e:
                    message = str(e)

                if message:
                    self.write_error(message)
                    logger.error(
                        "Error occured while retrieving API key, Error: {}".
                        format(message))
                    exit(1)

                # API key validation
                api_key_validation, message = utility.validate_api_key(
                    api_key, logger)
                logger.debug("API validation status: {}, message: {}".format(
                    api_key_validation, str(message)))
                if not api_key_validation:
                    logger.info(message)
                    self.write_error(message)
                    exit(1)

                # divide the records in the form of dict of tuples having chunk_index as key
                # {<index>: (<records>, <All the ips in records>)}
                chunk_dict = event_generator.batch(records, ip_field,
                                                   EVENTS_PER_CHUNK, logger)
                logger.debug("Successfully divided events into chunks")

                # This means there are only 1000 or below IPs to call in the entire bunch of records
                # Use one thread with single thread with caching mechanism enabled for the chunk
                if len(chunk_dict) == 1:
                    logger.info(
                        "Less then 1000 distinct IPs are present, optimizing the IP requests call to GreyNoise API..."
                    )
                    THREADS = 1
                    USE_CACHE = True

                # Opting timout 120 seconds for the requests
                api_client = GreyNoise(api_key=api_key,
                                       timeout=120,
                                       use_cache=USE_CACHE,
                                       integration_name="Splunk")

                # When no records found, batch will return {0:([],[])}
                if len(list(chunk_dict.values())[0][0]) >= 1:
                    for chunk_index, result in event_generator.get_all_events(
                            api_client,
                            method,
                            ip_field,
                            chunk_dict,
                            logger,
                            threads=THREADS):
                        # Pass the collected data to the event filter method
                        for event in event_filter(chunk_index, result,
                                                  chunk_dict[chunk_index],
                                                  ip_field, noise_events,
                                                  method):
                            yield event

                        # Deleting the chunk with the events that are already indexed
                        del chunk_dict[chunk_index]

                    logger.info(
                        "Successfully sent all the results to the Splunk")
                else:
                    logger.info(
                        "No events found, please increase the search timespan to have more search results."
                    )

            except Exception as e:
                logger.info(
                    "Exception occured while filtering events, Error: {}".
                    format(traceback.format_exc()))
                self.write_error(
                    "Exception occured while filtering the events based on noise status. See greynoise_main.log for more details."
                )
 def __init__(self, context):
     self.api_key = context.asset["api_key"]
     self.session = GreyNoise(
         api_key=self.api_key,
         integration_name="greynoise-swimlane-" + PLUGIN_VERSION,
     )