Exemplo n.º 1
0
 def prepare(self):
     """ Prepare for action """
     log.debug("Preparing")
     config = self.context.config.get("processing")
     config_items = [
         item for item in list(config) if not isinstance(config[item], bool) or config[item]
     ]
     disabled_items = [
         item for item in list(config) if isinstance(config[item], bool) and not config[item]
     ]
     # Schedule processors
     try:
         all_processors = dependency.resolve_name_order(
             config_items + [
                 item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items
             ], "dusty.processors.{}.processor", "Processor"
         )
     except:
         all_processors = [
             item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items
         ] + config_items
     for processor_name in all_processors:
         try:
             self.schedule_processor(processor_name, dict())
         except:
             log.exception("Failed to prepare processor %s", processor_name)
             error = Error(
                 tool=processor_name,
                 error=f"Failed to prepare processor {processor_name}",
                 details=f"```\n{traceback.format_exc()}\n```"
             )
             self.context.errors.append(error)
     # Resolve depencies once again
     dependency.resolve_depencies(self.context.processors)
Exemplo n.º 2
0
def parse_findings(data, scanner):
    """ Parse findings """
    # Get deps
    deps = get_dependencies(
        scanner.config.get("code"), scanner.config.get("add_devdep", False)
    )
    # Parse JSON using legacy parser
    findings = NpmScanParser(data, deps).items
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    item['description'],
                    f"**URL:** {item['url']}",
                    f"**CWE:** {markdown.markdown_escape(item['cwe'])}",
                    f"**References:** {item['references']}",
                    f"**File to review:** {item['file_path']}"
                ])
            ]
        )
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", constants.NPM_SEVERITY_MAPPING[item["severity"]])
        finding.set_meta("legacy.file", item["file_path"])
        endpoints = list()
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 3
0
def parse_findings(data, scanner):
    """ Parse findings """
    log.debug("Parsing findings")
    item_regex = re.compile(
        "".join([
            "^(\[\+\] New Finding!!!)$", "\s*Name: (?P<name>.*)$",
            "\s*Url: (?P<url>.*)$",
            "\s*Description: (?P<description>[\s\S]*?)\n\n"
        ]), re.MULTILINE)
    for item in item_regex.finditer(data):
        # Make finding object
        description = list()
        description.append(markdown.markdown_escape(item.group("description")))
        description.append(
            f'\n**URL:** {markdown.markdown_escape(item.group("url"))}')
        description = "\n".join(description)
        finding = DastFinding(title=item.group("name"),
                              description=description)
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", "Info")
        # Endpoints (for backwards compatibility)
        endpoints = list()
        endpoint = url.parse_url(item.group("url"))
        endpoints.append(endpoint)
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        # Done
        scanner.findings.append(finding)
Exemplo n.º 4
0
 def schedule_reporter(self, reporter_name, reporter_config):
     """ Schedule reporter run in current context after all already configured reporters """
     try:
         # Init reporter instance
         reporter = importlib.import_module(
             f"dusty.reporters.{reporter_name}.reporter").Reporter
         if reporter.get_name() in self.context.reporters:
             log.debug("Reporter %s already scheduled", reporter_name)
             return
         # Prepare config
         config = self.context.config["reporters"]
         if reporter_name not in config or not isinstance(
                 config[reporter_name], dict):
             config[reporter_name] = dict()
         if "reporters" in self.context.config["settings"]:
             general_config = self.context.config["settings"]["reporters"]
             merged_config = general_config.copy()
             merged_config.update(config[reporter_name])
             config[reporter_name] = merged_config
         config[reporter_name].update(reporter_config)
         # Validate config
         reporter.validate_config(config[reporter_name])
         # Add to context
         self.context.reporters[reporter.get_name()] = reporter(
             self.context)
         # Resolve depencies
         dependency.resolve_depencies(self.context.reporters)
         # Done
         log.debug("Scheduled reporter %s", reporter_name)
     except:
         log.exception("Failed to schedule reporter %s", reporter_name)
         error = Error(tool=reporter_name,
                       error=f"Failed to schedule reporter {reporter_name}",
                       details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
Exemplo n.º 5
0
 def _start_zap(self):
     """ Start ZAP daemon, create API client """
     log.info("Starting ZAP daemon")
     bind_host = "127.0.0.1"
     if self.config.get("bind_all_interfaces", True):
         bind_host = "0.0.0.0"
     daemon_out = subprocess.DEVNULL
     if self.config.get("daemon_debug", False):
         daemon_out = sys.stdout
     zap_home_dir = tempfile.mkdtemp()
     log.debug("ZAP home directory: %s", zap_home_dir)
     self._zap_daemon = subprocess.Popen([
         "/usr/bin/java", self.config.get("java_options", "-Xmx499m"),
         "-jar", constants.ZAP_PATH,
         "-dir", zap_home_dir,
         "-daemon", "-port", "8091", "-host", bind_host,
         "-config", "api.key=dusty",
         "-config", "api.addrs.addr.regex=true",
         "-config", "api.addrs.addr.name=.*",
         "-config", "ajaxSpider.browserId=htmlunit"
     ], stdout=daemon_out, stderr=daemon_out)
     self._zap_api = ZAPv2(
         apikey="dusty",
         proxies={
             "http": "http://127.0.0.1:8091",
             "https": "http://127.0.0.1:8091"
         }
     )
Exemplo n.º 6
0
 def execute(self):
     """ Run the scanner """
     # Discover open ports
     include_ports = list()
     if self.config.get("include_ports", "0-65535"):
         include_ports.append(
             f'-p{self.config.get("include_ports", "0-65535")}')
     exclude_ports = list()
     if self.config.get("exclude_ports", None):
         exclude_ports.append("--exclude-ports")
         exclude_ports.append(f'{self.config.get("exclude_ports")}')
     target_url = url.parse_url(self.config.get("target"))
     task = subprocess.run(
         ["nmap", "-PN"] + include_ports + exclude_ports + [
             "--min-rate", "1000", "--max-retries", "0",
             "--max-rtt-timeout", "200ms", target_url.hostname
         ],
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Use discovered ports
     ports = list()
     tcp_ports = ""
     udp_ports = ""
     for each in re.findall(r'([0-9]*/[tcp|udp])', str(task.stdout)):
         if "/t" in each:
             tcp_ports += f'{each.replace("/t", "")},'
         elif "/u" in each:
             udp_ports += f'{each.replace("/u", "")},'
     if tcp_ports:
         ports.append(f"-pT:{tcp_ports[:-1]}")
     if udp_ports:
         ports.append(f"-pU:{udp_ports[:-1]}")
     if not ports:
         log.warning("No open ports found. Exiting")
         return
     # Make temporary files
     output_file_fd, output_file = tempfile.mkstemp()
     log.debug("Output file: %s", output_file)
     os.close(output_file_fd)
     # Scan target
     nmap_parameters = shlex.split(
         self.config.get("nmap_parameters", "-v -sVA"))
     nse_scripts = self.config.get(
         "nse_scripts",
         "ssl-date,http-mobileversion-checker,http-robots.txt,http-title,http-waf-detect,"
         "http-chrono,http-headers,http-comments-displayer,http-date")
     task = subprocess.run(["nmap"] + nmap_parameters + ports + [
         "--min-rate", "1000", "--max-retries", "0",
         f'--script={nse_scripts}', target_url.hostname, "-oX", output_file
     ],
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Parse findings
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
     # Remove temporary files
     os.remove(output_file)
Exemplo n.º 7
0
def parse_findings(data, scanner):
    """ Parse findings """
    # Parse JSON using legacy parser
    try:
        findings = BrakemanParser(data).items
    except:  # pylint: disable=W0702
        log.exception("Failed to parse brakeman report")
        log.warning(
            "Possibly ruby code path is invalid or not Ruby-on-Rails application"
        )
        findings = list()
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    markdown.markdown_escape(item['description']),
                    f"**References:** {markdown.markdown_escape(item['references'])}",
                    f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \
                        f":{item['line']}"
                ])
            ]
        )
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity",
                         constants.BRAKEMAN_SEVERITY_MAPPING[item["severity"]])
        finding.set_meta("legacy.file", item["file_path"])
        finding.set_meta("legacy.line", item["line"])
        finding.set_meta(
            "endpoints",
            [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])])
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 8
0
 def _depot_read_config_object(self, obj):
     result = dict()
     if obj is None:
         return result
     data = depots.get_object(self.context, obj)
     if data is None:
         return result
     try:
         self.context.set_meta("depots_resolved_secrets", 0)
         result = self._depot_substitution(
             self._variable_substitution(
                 yaml.load(
                     os.path.expandvars(data),
                     Loader=yaml.FullLoader
                 )
             )
         )
         log.info("Loaded %s from depots", obj)
         log.debug(
             "Resolved %d object secrets from depots",
             self.context.get_meta("depots_resolved_secrets", 0)
         )
         return result
     except:
         return result
Exemplo n.º 9
0
def parse_findings(result, scanner):
    """ Parse findings """
    # Parse JSON using legacy parser
    findings = NodeJsScanParser(result).items
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    markdown.markdown_escape(item['description']),
                    f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \
                        f":{item['line']}"
                ])
            ] + item["steps_to_reproduce"]
        )
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", item["severity"])
        finding.set_meta("legacy.file", item["file_path"])
        finding.set_meta("legacy.line", item["line"])
        endpoints = list()
        if item["file_path"]:
            endpoints.append(
                namedtuple("Endpoint", ["raw"])(raw=item["file_path"]))
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 10
0
def parse_findings(data, scanner):
    """ Parse findings """
    try:
        findings = GitleaksScanParser(data, scanner).items
        # Make finding instances
        for item in findings:
            description = item["description"]
            if scanner.config.get("additional_text", None):
                description = scanner.config.get(
                    "additional_text") + "\n\n" + description
            finding = SastFinding(
                title=item["title"],
                description=[
                    "\n\n".join([
                        description,
                        f"**File to review:** {markdown.markdown_escape(item['file_path'])}"
                    ])
                ])
            finding.set_meta("tool", scanner.get_name())
            finding.set_meta("severity", item["severity"])
            finding.set_meta("legacy.file", item["file_path"])
            endpoints = list()
            if item["file_path"]:
                endpoints.append(Endpoint(raw=item["file_path"]))
            finding.set_meta("endpoints", endpoints)
            log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
            scanner.findings.append(finding)
    except:  # pylint: disable=W0702
        log.exception("Failed to parse findings")
Exemplo n.º 11
0
def parse_findings(filename, scanner):
    """ Parse findings """
    # Parse JSON using legacy parser
    findings = DependencyCheckParser(filename).items
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    "Vulnerable dependency was found. Please upgrade component or check that vulnerable functionality is not used.",  # pylint: disable=C0301
                    markdown.markdown_escape(item["description"]),
                    f"**File to review:** {markdown.markdown_escape(item['file_path'])}"
                ])
            ] + item["steps_to_reproduce"])
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", item["severity"])
        finding.set_meta("legacy.file", item["file_path"])
        endpoints = list()
        if item["file_path"]:
            endpoints.append(
                namedtuple("Endpoint", ["raw"])(raw=item["file_path"]))
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 12
0
def get_project_priorities(jira_client, project, issue_type="Bug"):
    """ Returns list of Jira priorities in project """
    try:
        meta = jira_client.createmeta(projectKeys=project,
                                      issuetypeNames=issue_type,
                                      expand="projects.issuetypes.fields")
        logging.debug("Got metadata for %d projects", len(meta["projects"]))
        if not meta["projects"]:
            logging.error("No meta returned for %s with type %s", project,
                          issue_type)
            return []
        project_meta = meta["projects"][0]
        logging.debug("Got metadata for %d issuetypes",
                      len(project_meta["issuetypes"]))
        if not project_meta["issuetypes"]:
            logging.error("No %s in %s", issue_type, project)
            return []
        issue_types = project_meta["issuetypes"][0]
        if "priority" not in issue_types["fields"]:
            logging.error("No priority field in %s", project)
            return []
        priorities = [
            priority["name"] for priority in \
                issue_types["fields"]["priority"]["allowedValues"]
        ]
        return priorities
    except:  # pylint: disable=W0702
        logging.exception("Failed to get meta for %s", project)
        return []
Exemplo n.º 13
0
 def execute(self):
     """ Run the scanner """
     # Prepare parameters
     target_url = url.parse_url(self.config.get("target"))
     nikto_parameters = shlex.split(self.config.get("nikto_parameters", ""))
     # Make temporary files
     output_file_fd, output_file = tempfile.mkstemp()
     log.debug("Output file: %s", output_file)
     os.close(output_file_fd)
     # Prepare -Save option if needed
     save_findings = list()
     if self.config.get("save_intermediates_to", None):
         base = os.path.join(self.config.get("save_intermediates_to"), __name__.split(".")[-2])
         try:
             os.makedirs(base, mode=0o755, exist_ok=True)
             save_findings.append("-Save")
             save_findings.append(base)
         except:
             pass
     # Run scanner
     task = subprocess.run(["perl", "nikto.pl"] + nikto_parameters + [
         "-h", target_url.hostname, "-p", url.get_port(target_url),
         "-Format", "xml", "-output", output_file
     ] + save_findings, cwd="/opt/nikto/program", stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Parse findings
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
     # Remove temporary files
     os.remove(output_file)
Exemplo n.º 14
0
def prepare_jira_mapping(jira_service):
    """ Make Jira mapping (for projects that are using custom values) """
    if not jira_service or not jira_service.valid:
        return dict()
    jira_service.connect()
    issue_type = "Bug"
    if jira_service.fields["issuetype"]["name"] != "!default_issuetype":
        issue_type = jira_service.fields["issuetype"]["name"]
    project_priorities = get_project_priorities(jira_service.client,
                                                jira_service.project,
                                                issue_type)
    if not project_priorities:
        jira_service.client.close()
        return dict()
    logging.debug("%s %s priorities: %s", jira_service.project, issue_type,
                  str(project_priorities))
    c = const
    mapping = dict()
    for severity in c.JIRA_SEVERITIES:
        if severity not in project_priorities:
            for alternative in c.JIRA_ALTERNATIVES[severity]:
                if alternative in project_priorities:
                    logging.warning("Mapping %s %s Jira priority: %s -> %s",
                                    jira_service.project, issue_type, severity,
                                    alternative)
                    mapping[severity] = alternative
                    break
            if severity not in mapping:
                mapping[severity] = project_priorities[0]
                logging.error(
                    "Failed to find Jira mapping for %s, using %s as a fallback",
                    severity, mapping[severity])
    jira_service.client.close()
    return mapping
Exemplo n.º 15
0
def parse_findings(filename, scanner):
    """ Parse findings """
    # Parse JSON using legacy parser
    findings = SpotbugsParser(filename).items
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    item["description"],
                    f"**Category:** {markdown.markdown_escape(item['category'])}",
                    f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \
                        f":{item['line']}"
                ])
            ] + item["steps_to_reproduce"]
        )
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", constants.SPOTBUGS_SEVERITIES[item["severity"]])
        finding.set_meta("legacy.file", item["file_path"])
        finding.set_meta("legacy.line", item["line"])
        endpoints = list()
        if item["file_path"]:
            endpoints.append(namedtuple("Endpoint", ["raw"])(raw=item["file_path"]))
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 16
0
def parse_findings(output_file, scanner):  # pylint: disable=E,W,R,C
    """ Parse findings (code from dusty 1.0) """
    # Parse HTML report using legacy parser
    filtered_statuses = scanner.config.get(
        "filtered_statuses", constants.PTAI_DEFAULT_FILTERED_STATUSES)
    if isinstance(filtered_statuses, str):
        filtered_statuses = [
            item.strip() for item in filtered_statuses.split(",")
        ]
    findings = PTAIScanParser(output_file, filtered_statuses).items
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                html.escape(
                    markdown.markdown_escape(item["description"].replace(
                        "                        ", ""))) +
                f"\n\n**File to review:** {markdown.markdown_escape(item['file_path'])}"
            ] + [html.escape(data) for data in item["steps_to_reproduce"]])
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity",
                         constants.PTAI_SEVERITIES[item["severity"]])
        finding.set_meta("legacy.file", item["file_path"])
        finding.set_meta(
            "endpoints",
            [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])])
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 17
0
 def _setup_scan_policy(self):
     self._scan_policy_name = "Default Policy"
     # Use user-provided policy (if any)
     if self.config.get("scan_policy_data", None) or self.config.get(
             "scan_policy_from", None):
         log.info("Using user-provided scan policy")
         # Write to temp file if needed
         if self.config.get("scan_policy_data", None):
             policy_file_fd, policy_file = tempfile.mkstemp()
             os.close(policy_file_fd)
             with open(policy_file, "w") as policy:
                 log.debug("Scan policy data: '%s'",
                           self.config.get("scan_policy_data"))
                 policy.write(self.config.get("scan_policy_data"))
         else:
             policy_file = self.config.get("scan_policy_from")
         # Load policy into ZAP
         default_policies = self._zap_api.ascan.scan_policy_names
         log.info("Importing scan policy from %s", policy_file)
         self._zap_api.ascan.import_scan_policy(policy_file)
         current_policies = self._zap_api.ascan.scan_policy_names
         log.debug("Policies after load: %s", current_policies)
         # Remove temporary file
         if self.config.get("scan_policy_data", None):
             os.remove(policy_file)
         # Set name
         loaded_policy_names = list(
             set(current_policies) - set(default_policies))
         if loaded_policy_names:
             self._scan_policy_name = loaded_policy_names[0]
             log.info("Scan policy set to '%s'", self._scan_policy_name)
         return
     # Setup 'simple' scan policy
     self._scan_policies = [
         item.strip()
         for item in self.config.get("scan_types", "all").split(",")
     ]
     # Disable globally blacklisted rules
     for item in constants.ZAP_BLACKLISTED_RULES:
         self._zap_api.ascan.set_scanner_alert_threshold(
             id=item,
             alertthreshold="OFF",
             scanpolicyname=self._scan_policy_name)
         self._zap_api.pscan.set_scanner_alert_threshold(
             id=item, alertthreshold="OFF")
     if "all" not in self._scan_policies:
         # Disable all scanners first
         for item in self._zap_api.ascan.scanners(self._scan_policy_name):
             self._zap_api.ascan.set_scanner_alert_threshold(
                 id=item["id"],
                 alertthreshold="OFF",
                 scanpolicyname=self._scan_policy_name)
         # Enable scanners from suite
         for policy in self._scan_policies:
             for item in constants.ZAP_SCAN_POCILICES.get(policy, []):
                 self._zap_api.ascan.set_scanner_alert_threshold(
                     id=item,
                     alertthreshold="DEFAULT",
                     scanpolicyname=self._scan_policy_name)
Exemplo n.º 18
0
def parse_findings(filename, scanner):
    """ Parse findings """
    # Load JSON
    try:
        with open(filename, "r") as file:
            data = json.load(file)
    except:  # pylint: disable=W0702
        log.exception("Failed to load report JSON")
        return
    # Load CWE map
    cwe_map = json.loads(
        pkg_resources.resource_string(
            "dusty",
            f"{'/'.join(__name__.split('.')[1:-1])}/data/cwe_map_v4.2.json"))
    # Parse JSON
    if not isinstance(data, dict) or "vulnerabilities" not in data:
        log.info("No data in report")
        return
    # Make finding instances
    for item in data["vulnerabilities"]:
        vuln_severity = cvss_to_severity(item.get("cvss", 0.0))
        vuln_cwe = item.get("cwe", "Vulnerability")
        #
        vuln_cwe_title = cwe_map[vuln_cwe] if vuln_cwe in cwe_map else vuln_cwe
        vuln_file_title = f" in {item.get('classMessage')}" if "classMessage" in item else ""
        vuln_title = f"{vuln_cwe_title}{vuln_file_title}"
        #
        vuln_file = item.get("classMessage", "").rsplit(" (", 1)[0]
        #
        vuln_info_chunks = list()
        if "longMessage" in item:
            vuln_info_chunks.append(
                markdown.markdown_escape(item["longMessage"]))
        if "shortMessage" in item:
            vuln_info_chunks.append(
                markdown.markdown_escape(item["shortMessage"]))
        vuln_info_chunks.append(
            f"**Class:** {markdown.markdown_escape(item['classMessage'])}")
        vuln_info_chunks.append(
            f"**Method:** {markdown.markdown_escape(item['method'])}")
        if "affectedFiles" in item:
            vuln_info_chunks.append(
                f"**Files:** {markdown.markdown_escape(', '.join(item['affectedFiles']))}"
            )
        #
        finding = SastFinding(title=vuln_title,
                              description=["\n\n".join(vuln_info_chunks)])
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", vuln_severity)
        finding.set_meta("legacy.file", vuln_file)
        endpoints = list()
        if vuln_file:
            endpoints.append(namedtuple("Endpoint", ["raw"])(raw=vuln_file))
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Exemplo n.º 19
0
 def load(self, config_seed, config_variable, config_file, suite):
     """ Load and parse config """
     config = self._load_config(config_seed, config_variable, config_file)
     if not self._validate_config_base(config, suite):
         raise ValueError("Invalid config")
     context_config = self._prepare_context_config(config, suite)
     self.context.suite = suite
     self.context.config = context_config
     log.debug("Resulting context config: %s", self.context.config)
     log.info("Loaded %s suite configuration", self.context.suite)
Exemplo n.º 20
0
def html_to_text(html, escape=True):
    """ Convert HTML to markdown """
    try:
        text = inscriptis.get_text(html, display_links=True)
    except:  # pylint: disable=W0702
        log.debug("Exception during HTML to text conversion\n%s",
                  traceback.format_exc())
        text = ""
    if escape:
        text = markdown_escape(text)
    return text
Exemplo n.º 21
0
 def execute(self, args):
     """ Run the command """
     log.debug("Starting")
     if args.call_from_legacy:
         log.warning("Called from legacy entry point")
     # Init context
     context = RunContext(args)
     config = ConfigModel(context)
     if args.list_suites or not args.suite:
         suites = config.list_suites(args.config_seed, args.config_variable,
                                     args.config_file)
         if not args.suite:
             log.error("Suite is not defined. Use --help to get help")
         log.info("Available suites: %s", ", ".join(suites))
         return
     # Make instances
     scanning = ScanningPerformer(context)
     processing = ProcessingPerformer(context)
     reporting = ReportingPerformer(context)
     # Add to context
     context.performers["scanning"] = scanning
     context.performers["processing"] = processing
     context.performers["reporting"] = reporting
     # Init config
     config.load(args.config_seed, args.config_variable, args.config_file,
                 args.suite)
     scanning.validate_config(context.config)
     processing.validate_config(context.config)
     reporting.validate_config(context.config)
     # Add meta to context
     self._fill_context_meta(context)
     # Load state
     context.state.load()
     # Prepare reporters first
     reporting.prepare()
     # Run actions
     actions.run(context)
     # Prepare scanning and processing
     scanning.prepare()
     processing.prepare()
     # Perform
     scanning.perform()
     processing.perform()
     reporting.perform()
     # Done
     context.state.save()
     reporting.flush()
     log.debug("Done")
     # Show quality gate statistics if any
     for line in context.get_meta("quality_gate_stats", list()):
         log.info(line)
     # Fail quality gate if needed
     if context.get_meta("fail_quality_gate", False):
         os._exit(1)  # pylint: disable=W0212
Exemplo n.º 22
0
 def get_object(self, key):
     """ Get object by key """
     try:
         log.debug("Trying to get object '%s' (path = '%s')", key,
                   self.config.get("path"))
         with open(os.path.join(self.config.get("path"), key),
                   "rb") as file:
             data = file.read()
         return data
     except:  # pylint: disable=W0702
         log.debug("Got exception: %s", traceback.format_exc())
         return None
Exemplo n.º 23
0
def main():
    """ Main """
    # Initialize argument parser
    parent = argparse.ArgumentParser(add_help=False)
    parent.add_argument("-d",
                        "--debug",
                        dest="log_level",
                        help="enable debug output",
                        action="store_const",
                        const=DEBUG,
                        default=INFO)
    parent.add_argument("--call-from-legacy",
                        dest="call_from_legacy",
                        help=argparse.SUPPRESS,
                        action="store_true",
                        default=False)
    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    subparsers = parser.add_subparsers(
        dest="command",
        title="commands",
        help="command to execute, use <command> -h to get command help")
    subparsers.required = True
    # Load commands
    commands = dict()
    for _, name, pkg in pkgutil.iter_modules(dusty.commands.__path__):
        if pkg:
            continue
        module = importlib.import_module("dusty.commands.{}".format(name))
        argparser = subparsers.add_parser(
            module.Command.get_name(),
            parents=[parent],
            help=module.Command.get_description(),
            formatter_class=argparse.ArgumentDefaultsHelpFormatter)
        commands[module.Command.get_name()] = module.Command(argparser)
    # Parse arguments
    args = parser.parse_args()
    # Setup logging
    log.init(args.log_level)
    # Display welcome message
    log.info(f"Dusty {pkg_resources.require('dusty')[0].version} is starting")
    log.debug("Loaded commands: {}".format(", ".join(list(commands.keys()))))
    # Install interrupt signal handler
    signal.signal(signal.SIGINT, interrupt_handler)
    # Run selected command
    try:
        commands[args.command].execute(args)
    except:
        log.exception("Command failed")
        os._exit(1)  # pylint: disable=W0212
    # Display bye-bye message
    log.info("All done. Have a nice day")
Exemplo n.º 24
0
 def _request_raw(self, endpoint, json=None):
     """ Perform API request (directly) """
     api = self._connection
     if json is None:
         response = api.get(f"{self.server}{endpoint}",
                            timeout=self.timeout)
     else:
         response = api.post(f"{self.server}{endpoint}",
                             json=json,
                             timeout=self.timeout)
     log.debug("API response: %d [%s] %s", response.status_code,
               response.headers, response.text)
     return response
Exemplo n.º 25
0
def parse_findings(output_file, scanner):
    """ Parse findings (code from dusty 1.0) """
    log.debug("Parsing findings")
    # Load JSON
    with open(output_file, "rb") as json_file:
        data = json.load(json_file)
    # Walk results
    for issue in data:
        title = f'Open port {issue["ports"][0]["port"]} found on {issue["ip"]}'
        finding = DastFinding(title=title,
                              description=markdown.markdown_escape(title))
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", SEVERITIES[-1])
        scanner.findings.append(finding)
Exemplo n.º 26
0
 def execute(self):
     """ Run the scanner """
     log.debug(f"Config: {self.config}")
     try:
         self._start_zap()
         if not self._wait_for_zap_start():
             log.error("ZAP failed to start")
             self.errors.append("ZAP daemon failed to start")
             return
         log.info("Target: %s", self.config.get("target"))
     except BaseException as exception:
         log.exception("Exception during ZAP scanning")
         self.errors.append(str(exception))
     finally:
         self._stop_zap()
Exemplo n.º 27
0
 def execute(self):
     """ Run the scanner """
     # Make temporary directory
     output_dir = tempfile.mkdtemp()
     log.debug("Output directory: %s", output_dir)
     # Run task
     task = subprocess.run([
         "insider", "-force", "-no-html",
         "-target", self.config.get("code"), "-tech", self.config.get("tech")
     ], cwd=output_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     output_file = os.path.join(output_dir, os.listdir(output_dir)[0])
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
Exemplo n.º 28
0
 def execute(self):
     """ Run the scanner """
     # Make temporary directory
     output_dir = tempfile.mkdtemp()
     log.debug("Output directory: %s", output_dir)
     output_file = os.path.join(output_dir, "retirejs.json")
     # Run task
     task = subprocess.run([
         "retire", f"--jspath={self.config.get('code')}", "--outputformat=json",
         f"--outputpath={output_file}", "--includemeta", "--exitwith=0"
     ], cwd=output_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
Exemplo n.º 29
0
 def report(self):
     """ Report """
     # Summary
     for item in self.context.findings:
         if item.get_meta("false_positive_finding", False) or item.get_meta("information_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         details = ''
         if isinstance(item, DastFinding):
             details = markdown.markdown_to_html(item.description)
         elif isinstance(item, SastFinding):
             details = markdown.markdown_to_html("<br/>".join(item.description))
         log.debug(self.ado.create_finding(item.title, details, item.get_meta("severity", SEVERITIES[-1]),
                                           assignee=self.assignee, issue_hash=item.get_meta("issue_hash", "")))
     log.info("Creating findings")
Exemplo n.º 30
0
 def execute(self):
     """ Run the scanner """
     # Get target host IP
     target_url = url.parse_url(self.config.get("target"))
     host = target_url.hostname
     if not url.find_ip(host):
         task = subprocess.run(["getent", "hosts", host],
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
         log.log_subprocess_result(task)
         host = url.find_ip(task.stdout.decode("utf-8", errors="ignore"))
         if host:
             host = host[0].strip()
     if not host:
         log.warning("No target IP found. Exiting")
         return
     # Prepare config
     include_ports = list()
     if self.config.get("include_ports", "0-65535"):
         include_ports.append("-p")
         include_ports.append(
             f'{self.config.get("include_ports", "0-65535")}')
         include_ports.append(
             f'-pU:{self.config.get("include_ports", "0-65535")}')
     exclude_ports = list()
     if self.config.get("exclude_ports", None):
         exclude_ports.append("--exclude-ports")
         exclude_ports.append(f'{self.config.get("exclude_ports")}')
     # Make temporary files
     output_file_fd, output_file = tempfile.mkstemp()
     log.debug("Output file: %s", output_file)
     os.close(output_file_fd)
     # Scan target
     task = subprocess.run(["masscan", host] + include_ports + [
         "--rate",
         "1000",
         "-oJ",
         output_file,
     ] + exclude_ports,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Parse findings
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
     # Remove temporary files
     os.remove(output_file)