def _enable_loki_logging(self): # if self.config.get("async", False): # mode = "async" # handler = logging_loki.LokiQueueHandler( # Queue(-1), # url=self.config.get("url"), # tags={"project": self.context.get_meta("project_name", "Unnamed Project")}, # auth=auth, # ) # else: # mode = "sync" # handler = logging_loki.LokiHandler( # url=self.config.get("url"), # tags={"project": self.context.get_meta("project_name", "Unnamed Project")}, # auth=auth, # ) # mode = "sync" handler = CarrierLokiLogHandler(self.config) # logging.getLogger("").addHandler(handler) log.info( "Enabled Loki logging in %s mode for Dusty {}".format( pkg_resources.require("dusty")[0].version ), mode )
def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "processing" in self.context.config["general"]: general_config = self.context.config["general"]["processing"] config = self.context.config["processing"] if not isinstance(config, dict): config = dict() for processor_name in config: # Merge general config merged_config = general_config.copy() merged_config.update(config[processor_name]) config[processor_name] = merged_config try: # Init processor instance processor = importlib.import_module( f"dusty.processing.{processor_name}.processor").Processor # Validate config processor.validate_config(config[processor_name]) # Add to context self.context.processing[processor.get_name()] = processor( self.context) except: log.exception("Failed to prepare processor %s", processor_name)
def execute(self, args): """ Run the command """ log.info("Starting") if args.call_from_legacy: log.warning("Called from legacy entry point") # Make instances context = RunContext(args) config = ConfigHelper(context) scanning = ScanningPerformer(context) processing = ProcessingPerformer(context) reporting = ReportingPerformer(context) # Add to context context.performers["scanning"] = scanning context.performers["processing"] = processing context.performers["reporting"] = reporting # Init config config.load(args.config_variable, args.config_file, args.suite) scanning.validate_config(context.config) processing.validate_config(context.config) reporting.validate_config(context.config) # Prepare scanning.prepare() processing.prepare() reporting.prepare() # Perform scanning.perform() processing.perform() reporting.perform() # Done log.info("Done")
def prepare(self): """ Prepare scanner """ scanners = ["semgrep"] scanner_configs = { "semgrep": { "ruleset": "/opt/semgrep/rulesets/findsecbugs.yml", } } if self.config.get("artifact_analysis", False): scanners.remove("semgrep") scanners.append("spotbugs") if self.config.get("composition_analysis", False): scanners.append("dependencycheck") self.config["comp_path"] = self.config.get("scan_path", self.config.get("code")) self.config["comp_opts"] = self.config.get("scan_opts", "") for scanner in scanners: log.info("Adding %s scanner", scanner) if scanner in scanner_configs: scanner_config = scanner_configs[scanner].copy() scanner_config.update(self.config) else: scanner_config = self.config self.context.performers["scanning"].schedule_scanner( "sast", scanner, scanner_config)
def _active_scan(self): log.info("Active scan against target %s", self.config.get("target")) if self.config.get("auth_script", None): scan_id = self._zap_api.ascan.scan_as_user( self.config.get("target"), self._zap_context, self._zap_user, recurse=True, scanpolicyname=self._scan_policy_name) else: scan_id = self._zap_api.ascan.scan( self.config.get("target"), scanpolicyname=self._scan_policy_name) # try: int(scan_id) except: # pylint: disable=W0702 log.warning( "ZAP failed to return scan ID (scan_id=%s). Please check that target URL is accessible from Carrier DAST container", scan_id) # pylint: disable=C0301 return # status.wait_for_completion( lambda: int(self._zap_api.ascan.status(scan_id)) < 100, lambda: int(self._zap_api.ascan.status(scan_id)), "Active scan progress: %d%%")
def _depot_read_config_object(self, obj): result = dict() if obj is None: return result data = depots.get_object(self.context, obj) if data is None: return result try: self.context.set_meta("depots_resolved_secrets", 0) result = self._depot_substitution( self._variable_substitution( yaml.load( os.path.expandvars(data), Loader=yaml.FullLoader ) ) ) log.info("Loaded %s from depots", obj) log.debug( "Resolved %d object secrets from depots", self.context.get_meta("depots_resolved_secrets", 0) ) return result except: return result
def _enable_loki_logging(self): loki_username = self.config.get("username", None) loki_password = self.config.get("password", None) auth = None if loki_username and loki_password: auth = (loki_username, loki_password) if self.config.get("async", False): mode = "async" handler = logging_loki.LokiQueueHandler( Queue(-1), url=self.config.get("url"), tags={"project": self.context.get_meta("project_name", "Unnamed Project")}, auth=auth, ) else: mode = "sync" handler = logging_loki.LokiHandler( url=self.config.get("url"), tags={"project": self.context.get_meta("project_name", "Unnamed Project")}, auth=auth, ) logging.getLogger("").addHandler(handler) log.info( "Enabled Loki logging in %s mode for Dusty {}".format( pkg_resources.require("dusty")[0].version ), mode )
def execute(self): """ Run the processor """ log.info("Injecting issue hashes") for item in self.context.findings: issue_hash = None # Legacy code: prepare issue hash if isinstance(item, DastFinding): title = re.sub('[^A-Za-zА-Яа-я0-9//\\\.\- _]+', '', item.title) # pylint: disable=W1401 issue_hash = hashlib.sha256( f'{title}_None_None__'.strip().encode( 'utf-8')).hexdigest() if isinstance(item, SastFinding): title = re.sub('[^A-Za-zА-Яа-я0-9//\\\.\- _]+', '', item.title) # pylint: disable=W1401 cwe = item.get_meta("legacy.cwe", "None") line = item.get_meta("legacy.line", "None") file = item.get_meta("legacy.file", "") issue_hash = hashlib.sha256( f'{title}_{cwe}_{line}_{file}_'.strip().encode( 'utf-8')).hexdigest() # Inject issue hash if issue_hash: item.set_meta("issue_hash", issue_hash) if isinstance(item, DastFinding): item.description += f"\n\n**Issue Hash:** {issue_hash}" if isinstance(item, SastFinding): item.description[0] += f"\n\n**Issue Hash:** {issue_hash}"
def _start_zap(self): """ Start ZAP daemon, create API client """ log.info("Starting ZAP daemon") bind_host = "127.0.0.1" if self.config.get("bind_all_interfaces", True): bind_host = "0.0.0.0" daemon_out = subprocess.DEVNULL if self.config.get("daemon_debug", False): daemon_out = sys.stdout zap_home_dir = tempfile.mkdtemp() log.debug("ZAP home directory: %s", zap_home_dir) self._zap_daemon = subprocess.Popen([ "/usr/bin/java", self.config.get("java_options", "-Xmx499m"), "-jar", constants.ZAP_PATH, "-dir", zap_home_dir, "-daemon", "-port", "8091", "-host", bind_host, "-config", "api.key=dusty", "-config", "api.addrs.addr.regex=true", "-config", "api.addrs.addr.name=.*", "-config", "ajaxSpider.browserId=htmlunit" ], stdout=daemon_out, stderr=daemon_out) self._zap_api = ZAPv2( apikey="dusty", proxies={ "http": "http://127.0.0.1:8091", "https": "http://127.0.0.1:8091" } )
def report(self): """ Report """ log.info("Sending mail to %s", self.config.get("mail_to")) presenter = EMailPresenter(self.context, self.config) # Prepare email environment = Environment(loader=PackageLoader( "dusty", f"{'/'.join(__name__.split('.')[1:-1])}/data"), autoescape=select_autoescape(["html", "xml"])) template = environment.get_template("email.html") html_body = template.render(presenter=presenter) # Send email helper = EmailHelper( self.context, self.config.get("server"), self.config.get("login"), self.config.get("password"), int(self.config.get("port", constants.DEFAULT_SERVER_PORT))) mail_to = [ item.strip() for item in self.config.get("mail_to").split(",") ] mail_cc = [ item.strip() for item in self.config.get("mail_cc", "").split(",") ] if mail_cc: helper.send_with_cc(mail_to, mail_cc, presenter.subject, html_body=html_body, attachments=presenter.attachments) else: helper.send(mail_to, presenter.subject, html_body=html_body, attachments=presenter.attachments)
def perform(self): """ Perform action """ log.info("Starting processing") # Run processors performed = set() perform_processing_iteration = True while perform_processing_iteration: perform_processing_iteration = False for processor_module_name in list(self.context.processors): if processor_module_name in performed: continue performed.add(processor_module_name) perform_processing_iteration = True processor = self.context.processors[processor_module_name] try: processor.execute() except: log.exception("Processor %s failed", processor_module_name) error = Error( tool=processor_module_name, error=f"Processor {processor_module_name} failed", details=f"```\n{traceback.format_exc()}\n```" ) self.context.errors.append(error) self.context.errors.extend(processor.get_errors())
def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "scanners" in self.context.config["general"]: general_config = self.context.config["general"]["scanners"] config = self.context.config["scanners"] for scanner_type in config: for scanner_name in config[scanner_type]: if not isinstance(config[scanner_type][scanner_name], dict): config[scanner_type][scanner_name] = dict() # Merge general config if scanner_type in general_config: merged_config = general_config[scanner_type].copy() merged_config.update(config[scanner_type][scanner_name]) config[scanner_type][scanner_name] = merged_config try: # Init scanner instance scanner = importlib.import_module( f"dusty.scanners.{scanner_type}.{scanner_name}.scanner" ).Scanner # Validate config scanner.validate_config(config[scanner_type][scanner_name]) # Add to context self.context.scanners[scanner.get_name()] = scanner( self.context) except: log.exception("Failed to prepare %s scanner %s", scanner_type, scanner_name)
def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "reporters" in self.context.config["general"]: general_config = self.context.config["general"]["reporters"] config = self.context.config["reporters"] if not isinstance(config, dict): config = dict() for reporter_name in config: # Merge general config merged_config = general_config.copy() merged_config.update(config[reporter_name]) config[reporter_name] = merged_config try: # Init reporter instance reporter = importlib.import_module( f"dusty.reporters.{reporter_name}.reporter").Reporter # Validate config reporter.validate_config(config[reporter_name]) # Add to context self.context.reporters[reporter.get_name()] = reporter( self.context) except: log.exception("Failed to prepare reporter %s", reporter_name)
def execute(self, args): """ Run the command """ log.info("Starting") # Make instances config = ConfigHelper scanning = ScanningPerformer processing = ProcessingPerformer reporting = ReportingPerformer # Make config data = CommentedMap() # Fill config config.fill_config(data) data_obj = data["suites"] data_obj.insert(len(data_obj), "example", CommentedMap(), comment="Example test suite") data_obj["example"].insert(0, "general", CommentedMap(), comment="General config") scanning.fill_config(data_obj["example"]) processing.fill_config(data_obj["example"]) reporting.fill_config(data_obj["example"]) # Save to file yaml = ruamel.yaml.YAML() with open(args.output_file, "wb") as output: yaml.dump(data, output) # Done log.info("Done")
def prepare(self): """ Prepare scanner """ scanners = ["brakeman"] for scanner in scanners: log.info("Adding %s scanner", scanner) self.context.performers["scanning"].schedule_scanner( "sast", scanner, self.config)
def report(self): """ Report """ log.info("Sending tool reports to Galloper") # Get options bucket = self.config.get("bucket") tgtobj = self.config.get("object") source = self.config.get("source") # Compress data with tempfile.TemporaryFile() as tgt_file: with zipfile.ZipFile(tgt_file, "w", zipfile.ZIP_DEFLATED) as zip_file: tgt_dir = os.path.abspath(source) for dirpath, _, filenames in os.walk(tgt_dir): if dirpath == tgt_dir: rel_dir = "" else: rel_dir = os.path.relpath(dirpath, tgt_dir) zip_file.write(dirpath, arcname=rel_dir) for filename in filenames: zip_file.write(os.path.join(dirpath, filename), arcname=os.path.join(rel_dir, filename)) tgt_file.seek(0) headers = dict() if os.environ.get("token"): headers["Authorization"] = f"Bearer {os.environ.get('token')}" url = f"{os.environ.get('galloper_url')}/api/v1/artifacts/" \ f"{os.environ.get('project_id')}/{bucket}/{tgtobj}" requests.post(url, headers=headers, files={"file": (f"{tgtobj}", tgt_file)})
def save_intermediates(self, output_file, config_file, task): """ Save scanner intermediates """ if self.config.get("save_intermediates_to", None): log.info("Saving intermediates") base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2]) try: # Make directory for artifacts os.makedirs(base, mode=0o755, exist_ok=True) # Save report shutil.copyfile( output_file, os.path.join(base, "report.xml") ) # Save config shutil.copyfile( config_file, os.path.join(base, "config.w3af") ) # Save output with open(os.path.join(base, "output.stdout"), "w") as output: output.write(task.stdout.decode("utf-8", errors="ignore")) with open(os.path.join(base, "output.stderr"), "w") as output: output.write(task.stderr.decode("utf-8", errors="ignore")) except: log.exception("Failed to save intermediates")
def _wait_for_zap_start(self): for _ in range(600): try: log.info("Started ZAP %s", self._zap_api.core.version) return True except IOError: time.sleep(1) return False
def report(self): """ Report """ if self.config.get("dynamic_jira", None): log.info("Using MultiJira reporting") self.report_multi() else: log.info("Using normal reporting") self.report_normal()
def execute(self): """ Run the processor """ log.info("Rewriting finding titles") for item in self.context.findings: if isinstance(item, (DastFinding, SastFinding)): if item.get_meta("rewrite_title_to", None): item.set_meta("original_title", item.title) item.title = item.get_meta("rewrite_title_to")
def execute(self): """ Run the processor """ severity = self.config.get("severity", constants.DEFAULT_SEVERITY) log.info("Filtering findings below %s level", severity) for item in self.context.findings: if SEVERITIES.index(item.get_meta("severity", SEVERITIES[-1])) > \ SEVERITIES.index(severity): item.set_meta("information_finding", True)
def report(self): """ Report """ if not self._rp_client: log.warning( "ReportPortal configuration/connection is invalid. Skipping RP reporting" ) return log.info("Reporting to ReportPortal") for item in self.context.findings: if item.get_meta("information_finding", False) or \ item.get_meta("false_positive_finding", False) or \ item.get_meta("excluded_finding", False): continue if isinstance(item, DastFinding): item_details = markdown.markdown_unescape(item.description) item_description = item_details tags = [ f'Tool: {item.get_meta("tool", "")}', f'TestType: {self.context.get_meta("testing_type", "DAST")}', f'Severity: {item.get_meta("severity", SEVERITIES[-1])}' ] if item.get_meta("confidence", None): tags.append(f'Confidence: {item.get_meta("confidence")}') self._rp_client.start_test_item(item.title, description=item_description, tags=tags) if item.get_meta("legacy.images", None): for attachment in item.get_meta("legacy.images"): self._rp_client.test_item_message( attachment["name"], "INFO", attachment) self._rp_client.test_item_message( "!!!MARKDOWN_MODE!!! %s " % item_details, "INFO") self._rp_client.test_item_message( item.get_meta("issue_hash", "<no_hash>"), "ERROR") self._rp_client.finish_test_item() elif isinstance(item, SastFinding): item_details = markdown.markdown_unescape("\n\n".join( item.description)) item_description = item_details tags = [ f'Tool: {item.get_meta("tool", "")}', f'TestType: {self.context.get_meta("testing_type", "SAST")}', f'Severity: {item.get_meta("severity", SEVERITIES[-1])}' ] if item.get_meta("confidence", None): tags.append(f'Confidence: {item.get_meta("confidence")}') self._rp_client.start_test_item(item.title, description=item_description, tags=tags) self._rp_client.test_item_message( "!!!MARKDOWN_MODE!!! %s " % item_details, "INFO") self._rp_client.test_item_message( item.get_meta("issue_hash", "<no_hash>"), "ERROR") self._rp_client.finish_test_item() else: log.warning("Unsupported finding type") continue # raise ValueError("Unsupported item type") self._rp_client.finish_test()
def prepare(self): """ Prepare scanner """ scanners = ["bandit"] if self.config.get("composition_analysis", False): scanners.append("safety") for scanner in scanners: log.info("Adding %s scanner", scanner) self.context.performers["scanning"].schedule_scanner( "sast", scanner, self.config)
def parse_findings(filename, scanner): """ Parse findings """ # Load JSON try: with open(filename, "r") as file: data = json.load(file) except: # pylint: disable=W0702 log.exception("Failed to load report JSON") return # Load CWE map cwe_map = json.loads( pkg_resources.resource_string( "dusty", f"{'/'.join(__name__.split('.')[1:-1])}/data/cwe_map_v4.2.json")) # Parse JSON if not isinstance(data, dict) or "vulnerabilities" not in data: log.info("No data in report") return # Make finding instances for item in data["vulnerabilities"]: vuln_severity = cvss_to_severity(item.get("cvss", 0.0)) vuln_cwe = item.get("cwe", "Vulnerability") # vuln_cwe_title = cwe_map[vuln_cwe] if vuln_cwe in cwe_map else vuln_cwe vuln_file_title = f" in {item.get('classMessage')}" if "classMessage" in item else "" vuln_title = f"{vuln_cwe_title}{vuln_file_title}" # vuln_file = item.get("classMessage", "").rsplit(" (", 1)[0] # vuln_info_chunks = list() if "longMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["longMessage"])) if "shortMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["shortMessage"])) vuln_info_chunks.append( f"**Class:** {markdown.markdown_escape(item['classMessage'])}") vuln_info_chunks.append( f"**Method:** {markdown.markdown_escape(item['method'])}") if "affectedFiles" in item: vuln_info_chunks.append( f"**Files:** {markdown.markdown_escape(', '.join(item['affectedFiles']))}" ) # finding = SastFinding(title=vuln_title, description=["\n\n".join(vuln_info_chunks)]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", vuln_severity) finding.set_meta("legacy.file", vuln_file) endpoints = list() if vuln_file: endpoints.append(namedtuple("Endpoint", ["raw"])(raw=vuln_file)) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def report(self): """ Report """ log.info("Starting reporting") # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.report() except: log.exception("Reporter %s failed", reporter_module_name)
def load(self, config_seed, config_variable, config_file, suite): """ Load and parse config """ config = self._load_config(config_seed, config_variable, config_file) if not self._validate_config_base(config, suite): raise ValueError("Invalid config") context_config = self._prepare_context_config(config, suite) self.context.suite = suite self.context.config = context_config log.debug("Resulting context config: %s", self.context.config) log.info("Loaded %s suite configuration", self.context.suite)
def save(self, state_key=None): """ Save state """ if state_key is None: state_key = self.get_state_key() try: result = depots.save_state(self.context, state_key, self.storage) if result is True: log.info("Saved state for %s", state_key) except: # pylint: disable=W0702 log.exception("Failed to save state")
def on_start(self): """ Called when testing starts """ log.info("Testing started") self.testing_start_time = time.time() # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_start() except: log.exception("Reporter %s failed", reporter_module_name)
def on_scanner_start(self, scanner): """ Called when scanner starts """ log.info("Started scanning with %s", scanner) self.scanner_start_time[scanner] = time.time() # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_scanner_start(scanner) except: log.exception("Reporter %s failed", reporter_module_name)
def load(self, state_key=None): """ Load state """ if state_key is None: state_key = self.get_state_key() try: state_data = depots.load_state(self.context, state_key) if isinstance(state_data, dict): self.storage = state_data log.info("Loaded state for %s", state_key) except: # pylint: disable=W0702 log.exception("Failed to load state")