def run(self, params={}): # Generate unique identifier for report names identifier = uuid.uuid4() scan_id = params.get(Input.SCAN_ID) # Report to collect site ID and asset IDs of scan report_payload = { 'name': f"Rapid7-ScanAssets-InsightConnect-{identifier}", 'format': 'sql-query', 'query': 'SELECT site_id, asset_id ' 'FROM dim_site_scan AS dss ' 'JOIN dim_asset_scan AS das ON das.scan_id = dss.scan_id', 'version': '2.3.0', 'scope': {'scan': scan_id} } report_contents = util.adhoc_sql_report(self.connection, self.logger, report_payload) self.logger.info(f"Processing Assets of Scan ID {scan_id}") # Extract site ID and asset IDs scan_asset_ids = set() scan_site_id = None try: csv_report = csv.DictReader(io.StringIO(report_contents['raw'].decode('utf-8'))) except Exception as e: raise PluginException(cause=f"Error: Failed to process query response for assets returned for " f"scan ID {scan_id}.", assistance=f"Exception returned was {e}") for row in csv_report: scan_asset_ids.add(int(row["asset_id"])) # Assign site ID for scan if scan_site_id is None: scan_site_id = row["site_id"] # Get assets of site of scan resource_helper = ResourceHelper(self.connection.session, self.logger) search_criteria = { "filters": [ { "field": "site-id", "operator": "in", "values": [scan_site_id] } ], "match": "all" } self.logger.info("Performing filtered asset search with criteria %s" % search_criteria) endpoint = endpoints.Asset.search(self.connection.console_url) site_assets = resource_helper.paged_resource_request(endpoint=endpoint, method='post', payload=search_criteria) # Filter assets to specific scan assets filtered_assets = [asset for asset in site_assets if asset["id"] in scan_asset_ids] return {Output.ASSETS: filtered_assets}
def run(self, params={}): query = params.get(Input.QUERY) # Generate unique identifier for report name identifier = uuid.uuid4() # Configure payload for SQL report generation report_payload = { 'name': f"Rapid7-InsightConnect-AdhocReport-{identifier}", 'format': 'sql-query', 'query': query, 'version': '2.3.0', 'filters': json.loads(params.get(Input.FILTERS)) } # Add scope if set in action if params.get(Input.SCOPE) != "none" and len(params.get(Input.SCOPE_IDS)) > 0: if params.get(Input.SCOPE) == "scan": report_payload["scope"] = {params.get(Input.SCOPE): params.get(Input.SCOPE_IDS)[0]} else: report_payload["scope"] = {params.get(Input.SCOPE): params.get(Input.SCOPE_IDS)} report_contents = util.adhoc_sql_report(self.connection, self.logger, report_payload) try: base_64_report = base64.b64encode(report_contents['raw']) except base64.binascii.Error as e: raise PluginException(cause="Error: Failed to base64 encode report contents due to incorrect padding.", assistance=f"Exception returned was {e}") return { Output.REPORT: { "content": base_64_report.decode("utf-8"), "filename": "adhoc_sql_report.csv" } }
def get_site_scans(self, params): # Generate unique identifier for report names identifier = uuid.uuid4() # Gather site IDs of sites that match regular expression site_ids = NewScans.get_sites_within_scope( self, params.get(Input.SITE_NAME_FILTER)) # Gather sites and corresponding site IDs in scope report_payload = { "name": f"Rapid7-InsightConnect-NewScans-{identifier}", "format": "sql-query", "query": NewScans.scans_query( map(lambda x: "'" + x + "'", params.get(Input.STATUS_FILTER)), [str(site_id) for site_id in site_ids], ), "version": "2.3.0", } # Run report to get scans based on sites in scope # This is preferred over API endpoints due to endpoints returning all scans for agent site self.logger.info("Pulling scans") report_contents = util.adhoc_sql_report(self.connection, self.logger, report_payload) site_scans = defaultdict(list) try: csv_report = csv.DictReader(io.StringIO(report_contents["raw"])) except Exception as e: raise PluginException( cause= "Error: Failed to process query response while fetching site scans.", assistance=f"Exception returned was {e}", ) # Identify all scans that match sites from regular expression and status filter for row in csv_report: site_scan = { "scan_id": int(row["scan_id"]), "status": row["status"], "site_id": int(row["site_id"]), "site_name": row["site_name"], } site_scans[row["site_id"]].append(site_scan) return site_scans
def run(self, params={}): remediations_limit = params.get(Input.LIMIT) # Generate unique identifier for report names identifier = uuid.uuid4() # Report: Top Remediations report_payload = { "name": f"Rapid7-InsightConnect-TopRemediation-{identifier}", "format": "sql-query", "query": TopRemediations.remediations_query(remediations_limit), "version": "2.3.0", } # Add scope if set in action if (params.get(Input.SCOPE) != "none") and (len( params.get(Input.SCOPE_IDS)) > 0): report_payload["scope"] = { params.get(Input.SCOPE): params.get(Input.SCOPE_IDS) } self.logger.info("Generating top remediations for InsightVM and scope") report_contents = util.adhoc_sql_report(self.connection, self.logger, report_payload) # Structure returned remediations remediations = {} try: csv_report = csv.DictReader(io.StringIO(report_contents["raw"])) except Exception as e: raise PluginException( cause= "Error: Failed to process query response for top remediations.", assistance=f"Exception returned was {e}", ) for row in csv_report: remediation = { "solutionId": int(row["solution_id"]), "nexposeId": row["nexpose_id"], "summary": row["summary"], "fix": row["fix"], "assetCount": int(row["assets"]), "vulnerabilityCount": int(row["vulnerabilities"]), "riskScore": int(float(row["riskscore"])), "assets": [], "vulnerabilities": [], } remediations[row["solution_id"]] = remediation # Report: Gather Asset Details asset_report_payload = { "name": f"Rapid7-InsightConnect-TopRemediation-Asset-{identifier}", "format": "sql-query", "query": TopRemediations.assets_query(remediations_limit), "version": "2.3.0", } # Add scope if set in action if (params.get(Input.SCOPE) != "none") and (len( params.get(Input.SCOPE_IDS)) > 0): asset_report_payload["scope"] = { params.get(Input.SCOPE): params.get(Input.SCOPE_IDS) } # Structure and add remediation assets to remediations self.logger.info("Processing assets of top remediations") asset_report_contents = util.adhoc_sql_report(self.connection, self.logger, asset_report_payload) try: csv_report = csv.DictReader( io.StringIO(asset_report_contents["raw"])) except Exception as e: raise PluginException( cause= "Error: Failed to process query response for remediation assets.", assistance=f"Exception returned was {e}", ) for row in csv_report: # Only track assets up to the asset limit asset_limit = params.get(Input.ASSET_LIMIT) if (asset_limit == 0) or (len( remediations[row["solution_id"]]["assets"]) < asset_limit): asset = { "id": int(row["asset_id"]), "hostName": row["host_name"], "ip": row["ip_address"], "mac": row["mac_address"], "os": row["name"], "riskScore": int(float(row["riskscore"])), "criticalityTag": TopRemediations.highest_criticality( row["criticality_tag"].split(",")), } remediations[row["solution_id"]]["assets"].append(asset) # Report: Gather Vulnerabilitiy Details vulnerability_report_payload = { "name": f"Rapid7-InsightConnect-TopRemediation-Vulnerability-{identifier}", "format": "sql-query", "query": TopRemediations.vulnerabilities_query(remediations_limit), "version": "2.3.0", } # Add scope if set in action if (params.get(Input.SCOPE) != "none") and (len( params.get(Input.SCOPE_IDS)) > 0): vulnerability_report_payload["scope"] = { params.get(Input.SCOPE): params.get(Input.SCOPE_IDS) } # Structure and add remediation vulnerabilities to remediations self.logger.info("Processing vulnerabilities of top remediations") vulnerability_report_contents = util.adhoc_sql_report( self.connection, self.logger, vulnerability_report_payload) try: csv_report = csv.DictReader( io.StringIO(vulnerability_report_contents["raw"])) except Exception as e: raise PluginException( cause= "Error: Failed to process query response for remediation vulnerabilities.", assistance=f"Exception returned was {e}", ) for row in csv_report: # Only track vulnerabilities up to the vulnerability limit vuln_limit = params.get(Input.VULNERABILITY_LIMIT) if (vuln_limit == 0) or (len( remediations[row["solution_id"]]["vulnerabilities"]) < vuln_limit): vulnerability = { "id": int(row["vulnerability_id"]), "title": row["title"], "description": row["description"], "cvssScore": row["cvss_score"], "severity": int(row["severity_score"]), "riskScore": int(float(row["riskscore"])), } remediations[row["solution_id"]]["vulnerabilities"].append( vulnerability) self.logger.info( f"Top remediations processed, generated {len(remediations)} remediations" ) return {Output.REMEDIATIONS: list(remediations.values())}