def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "scanners" in self.context.config["general"]: general_config = self.context.config["general"]["scanners"] config = self.context.config["scanners"] for scanner_type in config: for scanner_name in config[scanner_type]: if not isinstance(config[scanner_type][scanner_name], dict): config[scanner_type][scanner_name] = dict() # Merge general config if scanner_type in general_config: merged_config = general_config[scanner_type].copy() merged_config.update(config[scanner_type][scanner_name]) config[scanner_type][scanner_name] = merged_config try: # Init scanner instance scanner = importlib.import_module( f"dusty.scanners.{scanner_type}.{scanner_name}.scanner" ).Scanner # Validate config scanner.validate_config(config[scanner_type][scanner_name]) # Add to context self.context.scanners[scanner.get_name()] = scanner( self.context) except: log.exception("Failed to prepare %s scanner %s", scanner_type, scanner_name)
def parse_findings(data, scanner): """ Parse findings """ # Parse JSON using legacy parser try: findings = BrakemanParser(data).items except: # pylint: disable=W0702 log.exception("Failed to parse brakeman report") log.warning( "Possibly ruby code path is invalid or not Ruby-on-Rails application" ) findings = list() # Make finding instances for item in findings: finding = SastFinding( title=item["title"], description=[ "\n\n".join([ markdown.markdown_escape(item['description']), f"**References:** {markdown.markdown_escape(item['references'])}", f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \ f":{item['line']}" ]) ] ) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", constants.BRAKEMAN_SEVERITY_MAPPING[item["severity"]]) finding.set_meta("legacy.file", item["file_path"]) finding.set_meta("legacy.line", item["line"]) finding.set_meta( "endpoints", [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])]) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def get_project_priorities(jira_client, project, issue_type="Bug"): """ Returns list of Jira priorities in project """ try: meta = jira_client.createmeta(projectKeys=project, issuetypeNames=issue_type, expand="projects.issuetypes.fields") logging.debug("Got metadata for %d projects", len(meta["projects"])) if not meta["projects"]: logging.error("No meta returned for %s with type %s", project, issue_type) return [] project_meta = meta["projects"][0] logging.debug("Got metadata for %d issuetypes", len(project_meta["issuetypes"])) if not project_meta["issuetypes"]: logging.error("No %s in %s", issue_type, project) return [] issue_types = project_meta["issuetypes"][0] if "priority" not in issue_types["fields"]: logging.error("No priority field in %s", project) return [] priorities = [ priority["name"] for priority in \ issue_types["fields"]["priority"]["allowedValues"] ] return priorities except: # pylint: disable=W0702 logging.exception("Failed to get meta for %s", project) return []
def connect(self): """ Establish connection to SMTP server """ try: self.connection = smtplib.SMTP(self.server, self.port, timeout=self.timeout) self.connection.ehlo() self.connection.starttls(context=ssl.create_default_context()) self.connection.ehlo() self.connection.login(self.login, self.password) except ssl.SSLError: log.warning("SSL error, retrying with unverified SSL context") self.connection = smtplib.SMTP(self.server, self.port, timeout=self.timeout) self.connection.ehlo() self.connection.starttls(context=ssl._create_unverified_context()) # pylint: disable=W0212 self.connection.ehlo() self.connection.login(self.login, self.password) except smtplib.SMTPServerDisconnected: log.warning( "Seems like SMTP with TSL didn't work, trying with SMTP_SSL") self.connection = smtplib.SMTP_SSL(host=self.server, port=self.port, timeout=self.timeout) self.connection.ehlo() self.connection.login(self.login, self.password) except: # pylint: disable=W0702 log.exception("Failed to connect to SMTP server") error = Error(tool="EMail", error="Failed to connect to SMTP server", details=f"```\n{traceback.format_exc()}\n```") self.context.errors.append(error) if self.connection: self.connection.quit()
def save_intermediates(self, output_file, config_file, task): """ Save scanner intermediates """ if self.config.get("save_intermediates_to", None): log.info("Saving intermediates") base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2]) try: # Make directory for artifacts os.makedirs(base, mode=0o755, exist_ok=True) # Save report shutil.copyfile( output_file, os.path.join(base, "report.xml") ) # Save config shutil.copyfile( config_file, os.path.join(base, "config.w3af") ) # Save output with open(os.path.join(base, "output.stdout"), "w") as output: output.write(task.stdout.decode("utf-8", errors="ignore")) with open(os.path.join(base, "output.stderr"), "w") as output: output.write(task.stderr.decode("utf-8", errors="ignore")) except: log.exception("Failed to save intermediates")
def execute(self): """ Run the scanner """ path = self.config.get("code") # Collect reports to parse reports = list() if os.path.isdir(path): for root, _, files in os.walk(path): for name in files: reports.append(os.path.join(root, name)) else: reports.append(path) if self.config.get("mail_report", True): if self.config.get("rename_mail_attachment", True): filename = self.config.get( "rename_pattern", "PTAI_{project_name}_{testing_type}_{scan_type}_{build_id}.html" ).format(**self.context.meta) attachment = (path, filename) self.set_meta("report_file", attachment) else: self.set_meta("report_file", path) # Parse reports for report in reports: try: parse_findings(report, self) except: error = f"Failed to parse PT AI report {report}" log.exception(error) self.errors.append( Error(tool=self.get_name(), error=error, details=f"```\n{traceback.format_exc()}\n```"))
def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "reporters" in self.context.config["general"]: general_config = self.context.config["general"]["reporters"] config = self.context.config["reporters"] if not isinstance(config, dict): config = dict() for reporter_name in config: # Merge general config merged_config = general_config.copy() merged_config.update(config[reporter_name]) config[reporter_name] = merged_config try: # Init reporter instance reporter = importlib.import_module( f"dusty.reporters.{reporter_name}.reporter").Reporter # Validate config reporter.validate_config(config[reporter_name]) # Add to context self.context.reporters[reporter.get_name()] = reporter( self.context) except: log.exception("Failed to prepare reporter %s", reporter_name)
def prepare(self): """ Prepare for action """ log.debug("Preparing") config = self.context.config.get("processing") config_items = [ item for item in list(config) if not isinstance(config[item], bool) or config[item] ] disabled_items = [ item for item in list(config) if isinstance(config[item], bool) and not config[item] ] # Schedule processors try: all_processors = dependency.resolve_name_order( config_items + [ item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items ], "dusty.processors.{}.processor", "Processor" ) except: all_processors = [ item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items ] + config_items for processor_name in all_processors: try: self.schedule_processor(processor_name, dict()) except: log.exception("Failed to prepare processor %s", processor_name) error = Error( tool=processor_name, error=f"Failed to prepare processor {processor_name}", details=f"```\n{traceback.format_exc()}\n```" ) self.context.errors.append(error) # Resolve depencies once again dependency.resolve_depencies(self.context.processors)
def parse_findings(data, scanner): """ Parse findings """ try: findings = GitleaksScanParser(data, scanner).items # Make finding instances for item in findings: description = item["description"] if scanner.config.get("additional_text", None): description = scanner.config.get( "additional_text") + "\n\n" + description finding = SastFinding( title=item["title"], description=[ "\n\n".join([ description, f"**File to review:** {markdown.markdown_escape(item['file_path'])}" ]) ]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", item["severity"]) finding.set_meta("legacy.file", item["file_path"]) endpoints = list() if item["file_path"]: endpoints.append(Endpoint(raw=item["file_path"])) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding) except: # pylint: disable=W0702 log.exception("Failed to parse findings")
def perform(self): """ Perform action """ log.info("Starting processing") # Run processors performed = set() perform_processing_iteration = True while perform_processing_iteration: perform_processing_iteration = False for processor_module_name in list(self.context.processors): if processor_module_name in performed: continue performed.add(processor_module_name) perform_processing_iteration = True processor = self.context.processors[processor_module_name] try: processor.execute() except: log.exception("Processor %s failed", processor_module_name) error = Error( tool=processor_module_name, error=f"Processor {processor_module_name} failed", details=f"```\n{traceback.format_exc()}\n```" ) self.context.errors.append(error) self.context.errors.extend(processor.get_errors())
def schedule_reporter(self, reporter_name, reporter_config): """ Schedule reporter run in current context after all already configured reporters """ try: # Init reporter instance reporter = importlib.import_module( f"dusty.reporters.{reporter_name}.reporter").Reporter if reporter.get_name() in self.context.reporters: log.debug("Reporter %s already scheduled", reporter_name) return # Prepare config config = self.context.config["reporters"] if reporter_name not in config or not isinstance( config[reporter_name], dict): config[reporter_name] = dict() if "reporters" in self.context.config["settings"]: general_config = self.context.config["settings"]["reporters"] merged_config = general_config.copy() merged_config.update(config[reporter_name]) config[reporter_name] = merged_config config[reporter_name].update(reporter_config) # Validate config reporter.validate_config(config[reporter_name]) # Add to context self.context.reporters[reporter.get_name()] = reporter( self.context) # Resolve depencies dependency.resolve_depencies(self.context.reporters) # Done log.debug("Scheduled reporter %s", reporter_name) except: log.exception("Failed to schedule reporter %s", reporter_name) error = Error(tool=reporter_name, error=f"Failed to schedule reporter {reporter_name}", details=f"```\n{traceback.format_exc()}\n```") self.context.errors.append(error)
def prepare(self): """ Prepare for action """ log.info("Preparing") general_config = dict() if "processing" in self.context.config["general"]: general_config = self.context.config["general"]["processing"] config = self.context.config["processing"] if not isinstance(config, dict): config = dict() for processor_name in config: # Merge general config merged_config = general_config.copy() merged_config.update(config[processor_name]) config[processor_name] = merged_config try: # Init processor instance processor = importlib.import_module( f"dusty.processing.{processor_name}.processor").Processor # Validate config processor.validate_config(config[processor_name]) # Add to context self.context.processing[processor.get_name()] = processor( self.context) except: log.exception("Failed to prepare processor %s", processor_name)
def parse_findings(filename, scanner): """ Parse findings """ # Load JSON try: with open(filename, "r") as file: data = json.load(file) except: # pylint: disable=W0702 log.exception("Failed to load report JSON") return # Load CWE map cwe_map = json.loads( pkg_resources.resource_string( "dusty", f"{'/'.join(__name__.split('.')[1:-1])}/data/cwe_map_v4.2.json")) # Parse JSON if not isinstance(data, dict) or "vulnerabilities" not in data: log.info("No data in report") return # Make finding instances for item in data["vulnerabilities"]: vuln_severity = cvss_to_severity(item.get("cvss", 0.0)) vuln_cwe = item.get("cwe", "Vulnerability") # vuln_cwe_title = cwe_map[vuln_cwe] if vuln_cwe in cwe_map else vuln_cwe vuln_file_title = f" in {item.get('classMessage')}" if "classMessage" in item else "" vuln_title = f"{vuln_cwe_title}{vuln_file_title}" # vuln_file = item.get("classMessage", "").rsplit(" (", 1)[0] # vuln_info_chunks = list() if "longMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["longMessage"])) if "shortMessage" in item: vuln_info_chunks.append( markdown.markdown_escape(item["shortMessage"])) vuln_info_chunks.append( f"**Class:** {markdown.markdown_escape(item['classMessage'])}") vuln_info_chunks.append( f"**Method:** {markdown.markdown_escape(item['method'])}") if "affectedFiles" in item: vuln_info_chunks.append( f"**Files:** {markdown.markdown_escape(', '.join(item['affectedFiles']))}" ) # finding = SastFinding(title=vuln_title, description=["\n\n".join(vuln_info_chunks)]) finding.set_meta("tool", scanner.get_name()) finding.set_meta("severity", vuln_severity) finding.set_meta("legacy.file", vuln_file) endpoints = list() if vuln_file: endpoints.append(namedtuple("Endpoint", ["raw"])(raw=vuln_file)) finding.set_meta("endpoints", endpoints) log.debug(f"Endpoints: {finding.get_meta('endpoints')}") scanner.findings.append(finding)
def save(self, state_key=None): """ Save state """ if state_key is None: state_key = self.get_state_key() try: result = depots.save_state(self.context, state_key, self.storage) if result is True: log.info("Saved state for %s", state_key) except: # pylint: disable=W0702 log.exception("Failed to save state")
def report(self): """ Report """ log.info("Starting reporting") # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.report() except: log.exception("Reporter %s failed", reporter_module_name)
def load(self, state_key=None): """ Load state """ if state_key is None: state_key = self.get_state_key() try: state_data = depots.load_state(self.context, state_key) if isinstance(state_data, dict): self.storage = state_data log.info("Loaded state for %s", state_key) except: # pylint: disable=W0702 log.exception("Failed to load state")
def on_scanner_start(self, scanner): """ Called when scanner starts """ log.info("Started scanning with %s", scanner) self.scanner_start_time[scanner] = time.time() # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_scanner_start(scanner) except: log.exception("Reporter %s failed", reporter_module_name)
def on_start(self): """ Called when testing starts """ log.info("Testing started") self.testing_start_time = time.time() # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_start() except: log.exception("Reporter %s failed", reporter_module_name)
def put_object(self, key, data): """ Put object by key """ try: if isinstance(data, str): data = data.encode("utf-8") with open(os.path.join(self.config.get("path"), key), "wb") as file: file.write(data) return True except: # pylint: disable=W0702 log.exception("Failed to put object") return False
def put_object(self, key, data): """ Put object by key """ try: if isinstance(data, str): data = data.encode("utf-8") data_obj = io.BytesIO(data) self.client.put_object(self.config.get("bucket", "carrier"), key, data_obj, len(data)) return True except: # pylint: disable=W0702 log.exception("Failed to put object") return False
def on_finish(self): """ Called when testing ends """ self.testing_finish_time = time.time() log.info("Testing finished (%d seconds)", int(self.testing_finish_time - self.testing_start_time)) # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_finish() except: log.exception("Reporter %s failed", reporter_module_name)
def main(): """ Main """ # Initialize argument parser parent = argparse.ArgumentParser(add_help=False) parent.add_argument("-d", "--debug", dest="log_level", help="enable debug output", action="store_const", const=DEBUG, default=INFO) parent.add_argument("--call-from-legacy", dest="call_from_legacy", help=argparse.SUPPRESS, action="store_true", default=False) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) subparsers = parser.add_subparsers( dest="command", title="commands", help="command to execute, use <command> -h to get command help") subparsers.required = True # Load commands commands = dict() for _, name, pkg in pkgutil.iter_modules(dusty.commands.__path__): if pkg: continue module = importlib.import_module("dusty.commands.{}".format(name)) argparser = subparsers.add_parser( module.Command.get_name(), parents=[parent], help=module.Command.get_description(), formatter_class=argparse.ArgumentDefaultsHelpFormatter) commands[module.Command.get_name()] = module.Command(argparser) # Parse arguments args = parser.parse_args() # Setup logging log.init(args.log_level) # Display welcome message log.info(f"Dusty {pkg_resources.require('dusty')[0].version} is starting") log.debug("Loaded commands: {}".format(", ".join(list(commands.keys())))) # Install interrupt signal handler signal.signal(signal.SIGINT, interrupt_handler) # Run selected command try: commands[args.command].execute(args) except: log.exception("Command failed") os._exit(1) # pylint: disable=W0212 # Display bye-bye message log.info("All done. Have a nice day")
def on_scanner_finish(self, scanner): """ Called when scanner ends """ # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.on_scanner_finish(scanner) except: log.exception("Reporter %s failed", reporter_module_name) error = Error(tool=reporter_module_name, error=f"Reporter {reporter_module_name} failed", details=f"```\n{traceback.format_exc()}\n```") self.context.errors.append(error)
def flush(self): """ Flush """ # Run reporters for reporter_module_name in self.context.reporters: reporter = self.context.reporters[reporter_module_name] try: reporter.flush() except: log.exception("Reporter %s failed", reporter_module_name) error = Error(tool=reporter_module_name, error=f"Reporter {reporter_module_name} failed", details=f"```\n{traceback.format_exc()}\n```") self.context.errors.append(error)
def save_intermediates(self, result): """ Save scanner intermediates """ if self.config.get("save_intermediates_to", None): log.info("Saving intermediates") base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2]) try: # Make directory for artifacts os.makedirs(base, mode=0o755, exist_ok=True) # Save report with open(os.path.join(base, "report.json"), "w") as output: json.dump(result, output) except: log.exception("Failed to save intermediates")
def save_intermediates(self, report_xml): """ Save scanner intermediates """ if self.config.get("save_intermediates_to", None): log.info("Saving intermediates") base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2]) try: # Make directory for artifacts os.makedirs(base, mode=0o755, exist_ok=True) # Save report with open(os.path.join(base, "report.xml"), "w") as report: report.write(report_xml.decode("utf-8", errors="ignore")) except: log.exception("Failed to save intermediates")
def unseed(config_seed): """ Get config from config seed """ if ":" not in config_seed: log.info("Config seed is empty or invalid, skipping") return None config_seed_tag = config_seed[:config_seed.find(":")] config_seed_data = config_seed[len(config_seed_tag) + 1:] try: seed = importlib.import_module( f"dusty.tools.seeds.{config_seed_tag}.seed") return seed.Seed().handle(config_seed_data) except: # pylint: disable=W0702 log.exception("Failed to unseed config, skipping seed") return None
def execute(self): """ Run the scanner """ try: self._start_zap() if not self._wait_for_zap_start(): log.error("ZAP failed to start") error = Error( tool=self.get_name(), error="ZAP failed to start", details="ZAP daemon failed to start" ) self.errors.append(error) return log.info("Target: %s", self.config.get("target")) self._prepare_context() self._setup_scan_policy() self._spider() self._wait_for_passive_scan() self._ajax_spider() self._wait_for_passive_scan() self._active_scan() self._wait_for_passive_scan() except: log.exception("Exception during ZAP scanning") error = Error( tool=self.get_name(), error=f"Exception during ZAP scanning", details=f"```\n{traceback.format_exc()}\n```" ) self.errors.append(error) finally: try: # Get report log.info("Getting ZAP report") zap_report = self._zap_api.core.jsonreport() # Parse JSON log.info("Processing findings") parse_findings(zap_report, self) except: log.exception("Exception during ZAP findings processing") error = Error( tool=self.get_name(), error=f"Exception during ZAP findings processing", details=f"```\n{traceback.format_exc()}\n```" ) self.errors.append(error) self._save_intermediates() pkg_resources.cleanup_resources() self._stop_zap()
def execute(self): """ Run the scanner """ log.debug(f"Config: {self.config}") try: self._start_zap() if not self._wait_for_zap_start(): log.error("ZAP failed to start") self.errors.append("ZAP daemon failed to start") return log.info("Target: %s", self.config.get("target")) except BaseException as exception: log.exception("Exception during ZAP scanning") self.errors.append(str(exception)) finally: self._stop_zap()
def perform(self): """ Perform action """ log.info("Starting result processing") # Collect all scanner results and errors for scanner_module_name in self.context.scanners: scanner = self.context.scanners[scanner_module_name] self.context.results.extend(scanner.get_results()) self.context.errors[scanner_module_name] = scanner.get_errors() # Run processors for processor_module_name in self.context.processing: processor = self.context.processing[processor_module_name] try: processor.execute() except: log.exception("Processor %s failed", processor_module_name)