def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) cve_id = params.get("cve_id") endpoint = endpoints.Vulnerability.vulnerability_checks( self.connection.console_url) self.logger.info(f"Using {endpoint}...") params = {"search": cve_id} results = resource_helper.paged_resource_request(endpoint=endpoint, method="get", params=params) # Get unique vulnerability IDs vuln_ids = set() for r in results: vuln_ids.add(r["vulnerability"]) self.logger.info( f"Received {len(vuln_ids)} vulnerability IDs from search, getting details..." ) # Get vulnerability details vulns = [] for v in vuln_ids: endpoint = endpoints.Vulnerability.vulnerability( self.connection.console_url, v) response = resource_helper.resource_request(endpoint=endpoint) vulns.append(response) return {"vulnerabilities": vulns}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) tag_name = params.get("name") tag_type = params.get("type") endpoint = endpoints.Tag.tags(self.connection.console_url) self.logger.info("Using %s ..." % endpoint) tags = resource_helper.paged_resource_request(endpoint=endpoint) if tag_name == '': tag_name = None if tag_type == '': tag_type = None if tag_name or tag_type: regex = re.compile(tag_name, re.IGNORECASE) filtered_tags = [] for t in tags: if tag_name and tag_type: if (regex.match(t['name'])) and (t['type'] == tag_type): filtered_tags.append(t) else: if (regex.match(t['name'])) or (t['type'] == tag_type): filtered_tags.append(t) self.logger.info("Returning %d tags based on filters..." % (len(filtered_tags))) tags = filtered_tags return {"tags": tags}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) name = params.get("name") endpoint = endpoints.AssetGroup.asset_groups( self.connection.console_url) self.logger.info("Using %s ..." % endpoint) groups = resource_helper.paged_resource_request(endpoint=endpoint) if name == '': name = None if name: regex = re.compile(name, re.IGNORECASE) filtered_groups = [] for g in groups: if regex.match(g['name']): filtered_groups.append(g) self.logger.info("Returning %d asset groups based on filters..." % (len(filtered_groups))) groups = filtered_groups return {"asset_groups": groups}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) site_id = params.get("id") state = params.get("active") params = {"active": state} # # If a filter was provided, first get the site name because it's not provided # in the results from the site scans endpoint and this action should be # consistent even when the API is not. # if site_id: endpoint = endpoints.Site.sites(self.connection.console_url, site_id) response = resource_helper.resource_request(endpoint) site_name = response["name"] endpoint = endpoints.Scan.site_scans(self.connection.console_url, site_id) else: site_name = None endpoint = endpoints.Scan.scans(self.connection.console_url) response = resource_helper.paged_resource_request(endpoint=endpoint, params=params) # Add the name and ID if necessary if site_id: for r in response: r["siteId"] = site_id r["siteName"] = site_name return {"scans": response}
def run(self, params={}): # Generate unique identifier for report names identifier = uuid.uuid4() scan_id = params.get(Input.SCAN_ID) # Report to collect site ID and asset IDs of scan report_payload = { 'name': f"Rapid7-ScanAssets-InsightConnect-{identifier}", 'format': 'sql-query', 'query': 'SELECT site_id, asset_id ' 'FROM dim_site_scan AS dss ' 'JOIN dim_asset_scan AS das ON das.scan_id = dss.scan_id', 'version': '2.3.0', 'scope': {'scan': scan_id} } report_contents = util.adhoc_sql_report(self.connection, self.logger, report_payload) self.logger.info(f"Processing Assets of Scan ID {scan_id}") # Extract site ID and asset IDs scan_asset_ids = set() scan_site_id = None try: csv_report = csv.DictReader(io.StringIO(report_contents['raw'])) except Exception as e: raise PluginException(cause=f"Error: Failed to process query response for assets returned for " f"scan ID {scan_id}.", assistance=f"Exception returned was {e}") for row in csv_report: scan_asset_ids.add(int(row["asset_id"])) # Assign site ID for scan if scan_site_id is None: scan_site_id = row["site_id"] # Get assets of site of scan resource_helper = ResourceRequests(self.connection.session, self.logger) search_criteria = { "filters": [ { "field": "site-id", "operator": "in", "values": [scan_site_id] } ], "match": "all" } self.logger.info("Performing filtered asset search with criteria %s" % search_criteria) endpoint = endpoints.Asset.search(self.connection.console_url) site_assets = resource_helper.paged_resource_request(endpoint=endpoint, method='post', payload=search_criteria) # Filter assets to specific scan assets filtered_assets = [asset for asset in site_assets if asset["id"] in scan_asset_ids] return {Output.ASSETS: filtered_assets}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) search_criteria = params.get(Input.SEARCHCRITERIA) size = params.get(Input.SIZE, 0) sort_criteria = params.get(Input.SORT_CRITERIA, dict()) self.logger.info( f"Performing filtered asset search with criteria {search_criteria}" ) endpoint = endpoints.Asset.search(self.connection.console_url) parameters = list() for key, value in sort_criteria.items(): parameters.append(("sort", f"{key},{value}")) if size == 0: parameters.append(("size", 100)) resources = resource_helper.paged_resource_request( endpoint=endpoint, method="post", params=parameters, payload=search_criteria) elif size <= 100: parameters.append(("size", size)) resources = resource_helper.resource_request( endpoint=endpoint, method="post", params=parameters, payload=search_criteria) resources = resources["resources"] else: parameters.append(("size", 100)) number_of_results = size resources = resource_helper.paged_resource_request( endpoint=endpoint, method="post", params=parameters, payload=search_criteria, number_of_results=number_of_results, ) return {Output.ASSETS: resources}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) asset_id = params.get(Input.ASSET_ID) risk_score = params.get(Input.GET_RISK_SCORE, False) endpoint = endpoints.VulnerabilityResult.vulnerabilities_for_asset(self.connection.console_url, asset_id) resources = resource_helper.paged_resource_request(endpoint=endpoint, method="get") if not risk_score: return {Output.VULNERABILITIES: resources} else: resources = self.get_vulnerabilities(resources) return {Output.VULNERABILITIES: resources}
def get_sites_within_scope(self, site_regex): resource_helper = ResourceRequests(self.connection.session, self.logger) endpoint = endpoints.Site.sites(self.connection.console_url) sites = resource_helper.paged_resource_request(endpoint=endpoint) # Filter sites by regex regex = re.compile(site_regex, re.IGNORECASE) site_ids = [] for s in sites: if regex.match(s['name']): site_ids.append(s['id']) self.logger.info( f"Identified {len(site_ids)} sites within trigger scope based on regular expression filter" ) return site_ids
def run(self, params={}): """Run the trigger""" # get most recent vulnerability exception request - since they're sequential, find highest id resource_helper = ResourceRequests(self.connection.session, self.logger) endpoint = endpoints.VulnerabilityException.vulnerability_exceptions( self.connection.console_url) std_params = {"sort": "id,desc"} response = resource_helper.paged_resource_request(endpoint=endpoint, method="get", params=std_params) last_id = 0 for r in response: if r["id"] > last_id: last_id = r["id"] params["interval"] = params["frequency"] status_filter = [] for i in params.get("status_filter", []): status_filter.append(i.lower()) while True: # process all new exceptions. The inner loop is to handle grabbing # multiple exceptions since last cycle. It is broken when we run out # of new vulnerabity exceptions to process returning us to the outer loop # where we sleep for the configured amount of time. # We detect that we're out of work to do when we try and grab the # next higher exception id and we get an exception back instead of a # response containing a vulnerability exception. while True: endpoint = endpoints.VulnerabilityException.vulnerability_exception( self.connection.console_url, last_id + 1) # check if there is a new vulnerability exception try: response = resource_helper.resource_request( endpoint=endpoint, method="get") except Exception: break last_id += 1 # do we send it on it's way? if response.get("state").lower() not in status_filter: continue # send it on it's way self.send({Output.EXCEPTION: response}) # Sleep for configured frequency in minutes time.sleep(params.get(Input.FREQUENCY, 5) * 60)
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) endpoint = endpoints.User.users(self.connection.console_url) self.logger.info("Using %s ..." % endpoint) response = resource_helper.paged_resource_request(endpoint) # Filter response name = params.get('name') login = params.get('login') if name and (name != ''): name_regex = re.compile(name, re.IGNORECASE) response = [r for r in response if name_regex.match(r['name'])] if login and (login != ''): name_regex = re.compile(login, re.IGNORECASE) response = [r for r in response if name_regex.match(r['login'])] self.logger.info( f"Returning {len(response)} results based on filter...") return {"users": response}
def run(self, params={}): resource_helper = ResourceRequests(self.connection.session, self.logger) name = params.get("name") endpoint = endpoints.Site.sites(self.connection.console_url) self.logger.info("Using %s ..." % endpoint) sites = resource_helper.paged_resource_request(endpoint=endpoint) if name == "": name = None if name: regex = re.compile(name, re.IGNORECASE) filtered_sites = [] for s in sites: if regex.match(s["name"]): filtered_sites.append(s) self.logger.info("Returning %d sites based on filters..." % (len(filtered_sites))) sites = filtered_sites return {"sites": sites}