Exemple #1
0
 def execute(self):
     """ Run the scanner """
     try:
         self._start_zap()
         if not self._wait_for_zap_start():
             log.error("ZAP failed to start")
             error = Error(
                 tool=self.get_name(),
                 error="ZAP failed to start",
                 details="ZAP daemon failed to start"
             )
             self.errors.append(error)
             return
         log.info("Target: %s", self.config.get("target"))
         self._prepare_context()
         self._setup_scan_policy()
         self._spider()
         self._wait_for_passive_scan()
         self._ajax_spider()
         self._wait_for_passive_scan()
         self._active_scan()
         self._wait_for_passive_scan()
     except:
         log.exception("Exception during ZAP scanning")
         error = Error(
             tool=self.get_name(),
             error=f"Exception during ZAP scanning",
             details=f"```\n{traceback.format_exc()}\n```"
         )
         self.errors.append(error)
     finally:
         try:
             # Get report
             log.info("Getting ZAP report")
             zap_report = self._zap_api.core.jsonreport()
             # Parse JSON
             log.info("Processing findings")
             parse_findings(zap_report, self)
         except:
             log.exception("Exception during ZAP findings processing")
             error = Error(
                 tool=self.get_name(),
                 error=f"Exception during ZAP findings processing",
                 details=f"```\n{traceback.format_exc()}\n```"
             )
             self.errors.append(error)
         self._save_intermediates()
         pkg_resources.cleanup_resources()
         self._stop_zap()
Exemple #2
0
 def connect(self):
     """ Establish connection to SMTP server """
     try:
         self.connection = smtplib.SMTP(self.server,
                                        self.port,
                                        timeout=self.timeout)
         self.connection.ehlo()
         self.connection.starttls(context=ssl.create_default_context())
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except ssl.SSLError:
         log.warning("SSL error, retrying with unverified SSL context")
         self.connection = smtplib.SMTP(self.server,
                                        self.port,
                                        timeout=self.timeout)
         self.connection.ehlo()
         self.connection.starttls(context=ssl._create_unverified_context())  # pylint: disable=W0212
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except smtplib.SMTPServerDisconnected:
         log.warning(
             "Seems like SMTP with TSL didn't work, trying with SMTP_SSL")
         self.connection = smtplib.SMTP_SSL(host=self.server,
                                            port=self.port,
                                            timeout=self.timeout)
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except:  # pylint: disable=W0702
         log.exception("Failed to connect to SMTP server")
         error = Error(tool="EMail",
                       error="Failed to connect to SMTP server",
                       details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
         if self.connection:
             self.connection.quit()
Exemple #3
0
 def execute(self):
     """ Run the scanner """
     path = self.config.get("code")
     # Collect reports to parse
     reports = list()
     if os.path.isdir(path):
         for root, _, files in os.walk(path):
             for name in files:
                 reports.append(os.path.join(root, name))
     else:
         reports.append(path)
         if self.config.get("mail_report", True):
             if self.config.get("rename_mail_attachment", True):
                 filename = self.config.get(
                     "rename_pattern",
                     "PTAI_{project_name}_{testing_type}_{scan_type}_{build_id}.html"
                 ).format(**self.context.meta)
                 attachment = (path, filename)
                 self.set_meta("report_file", attachment)
             else:
                 self.set_meta("report_file", path)
     # Parse reports
     for report in reports:
         try:
             parse_findings(report, self)
         except:
             error = f"Failed to parse PT AI report {report}"
             log.exception(error)
             self.errors.append(
                 Error(tool=self.get_name(),
                       error=error,
                       details=f"```\n{traceback.format_exc()}\n```"))
Exemple #4
0
 def schedule_reporter(self, reporter_name, reporter_config):
     """ Schedule reporter run in current context after all already configured reporters """
     try:
         # Init reporter instance
         reporter = importlib.import_module(
             f"dusty.reporters.{reporter_name}.reporter").Reporter
         if reporter.get_name() in self.context.reporters:
             log.debug("Reporter %s already scheduled", reporter_name)
             return
         # Prepare config
         config = self.context.config["reporters"]
         if reporter_name not in config or not isinstance(
                 config[reporter_name], dict):
             config[reporter_name] = dict()
         if "reporters" in self.context.config["settings"]:
             general_config = self.context.config["settings"]["reporters"]
             merged_config = general_config.copy()
             merged_config.update(config[reporter_name])
             config[reporter_name] = merged_config
         config[reporter_name].update(reporter_config)
         # Validate config
         reporter.validate_config(config[reporter_name])
         # Add to context
         self.context.reporters[reporter.get_name()] = reporter(
             self.context)
         # Resolve depencies
         dependency.resolve_depencies(self.context.reporters)
         # Done
         log.debug("Scheduled reporter %s", reporter_name)
     except:
         log.exception("Failed to schedule reporter %s", reporter_name)
         error = Error(tool=reporter_name,
                       error=f"Failed to schedule reporter {reporter_name}",
                       details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
Exemple #5
0
 def perform(self):
     """ Perform action """
     log.info("Starting processing")
     # Run processors
     performed = set()
     perform_processing_iteration = True
     while perform_processing_iteration:
         perform_processing_iteration = False
         for processor_module_name in list(self.context.processors):
             if processor_module_name in performed:
                 continue
             performed.add(processor_module_name)
             perform_processing_iteration = True
             processor = self.context.processors[processor_module_name]
             try:
                 processor.execute()
             except:
                 log.exception("Processor %s failed", processor_module_name)
                 error = Error(
                     tool=processor_module_name,
                     error=f"Processor {processor_module_name} failed",
                     details=f"```\n{traceback.format_exc()}\n```"
                 )
                 self.context.errors.append(error)
             self.context.errors.extend(processor.get_errors())
Exemple #6
0
 def prepare(self):
     """ Prepare for action """
     log.debug("Preparing")
     config = self.context.config.get("processing")
     config_items = [
         item for item in list(config) if not isinstance(config[item], bool) or config[item]
     ]
     disabled_items = [
         item for item in list(config) if isinstance(config[item], bool) and not config[item]
     ]
     # Schedule processors
     try:
         all_processors = dependency.resolve_name_order(
             config_items + [
                 item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items
             ], "dusty.processors.{}.processor", "Processor"
         )
     except:
         all_processors = [
             item for item in constants.DEFAULT_PROCESSORS if item not in disabled_items
         ] + config_items
     for processor_name in all_processors:
         try:
             self.schedule_processor(processor_name, dict())
         except:
             log.exception("Failed to prepare processor %s", processor_name)
             error = Error(
                 tool=processor_name,
                 error=f"Failed to prepare processor {processor_name}",
                 details=f"```\n{traceback.format_exc()}\n```"
             )
             self.context.errors.append(error)
     # Resolve depencies once again
     dependency.resolve_depencies(self.context.processors)
Exemple #7
0
 def flush(self):
     """ Flush """
     # Run reporters
     for reporter_module_name in self.context.reporters:
         reporter = self.context.reporters[reporter_module_name]
         try:
             reporter.flush()
         except:
             log.exception("Reporter %s failed", reporter_module_name)
             error = Error(tool=reporter_module_name,
                           error=f"Reporter {reporter_module_name} failed",
                           details=f"```\n{traceback.format_exc()}\n```")
             self.context.errors.append(error)
Exemple #8
0
 def on_scanner_finish(self, scanner):
     """ Called when scanner ends """
     # Run reporters
     for reporter_module_name in self.context.reporters:
         reporter = self.context.reporters[reporter_module_name]
         try:
             reporter.on_scanner_finish(scanner)
         except:
             log.exception("Reporter %s failed", reporter_module_name)
             error = Error(tool=reporter_module_name,
                           error=f"Reporter {reporter_module_name} failed",
                           details=f"```\n{traceback.format_exc()}\n```")
             self.context.errors.append(error)
Exemple #9
0
 def schedule_scanner(self, scanner_type, scanner_name, scanner_config):
     """ Schedule scanner run in current context after all already configured scanners """
     try:
         # Init scanner instance
         scanner = importlib.import_module(
             f"dusty.scanners.{scanner_type}.{scanner_name}.scanner"
         ).Scanner
         if scanner.get_name() in self.context.scanners:
             log.debug("Scanner %s.%s already scheduled", scanner_type,
                       scanner_name)
             return
         # Prepare config
         config = self.context.config["scanners"]
         if scanner_type not in config:
             config[scanner_type] = dict()
         if scanner_name not in config[scanner_type] or \
                 not isinstance(config[scanner_type][scanner_name], dict):
             config[scanner_type][scanner_name] = dict()
         general_config = dict()
         if "settings" in self.context.config:
             general_config = self.context.config["settings"]
         if scanner_type in general_config:
             merged_config = general_config[scanner_type].copy()
             merged_config.update(config[scanner_type][scanner_name])
             config[scanner_type][scanner_name] = merged_config
         config[scanner_type][scanner_name].update(scanner_config)
         # Validate config
         scanner.validate_config(config[scanner_type][scanner_name])
         # Add to context
         scanner = scanner(self.context)
         self.context.scanners[scanner.get_name()] = scanner
         # Resolve depencies
         dependency.resolve_depencies(self.context.scanners)
         # Prepare scanner
         scanner.prepare()
         # Done
         log.debug("Scheduled scanner %s.%s", scanner_type, scanner_name)
     except:
         log.exception("Failed to schedule %s scanner %s", scanner_type,
                       scanner_name)
         error = Error(
             tool=f"{scanner_type}.{scanner_name}",
             error=
             f"Failed to schedule {scanner_type} scanner {scanner_name}",
             details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
Exemple #10
0
 def send_with_cc(self,
                  mail_to,
                  mail_cc,
                  subject,
                  html_body="",
                  attachments=None):  # pylint: disable=R0913
     """ Send mail """
     message = MIMEMultipart("alternative")
     message["From"] = self.login
     message["To"] = ", ".join(mail_to)
     message["Cc"] = ", ".join(mail_cc)
     message["Subject"] = subject
     message.attach(MIMEText(html_body, "html"))
     if attachments:
         if isinstance(attachments, str):
             attachments = [attachments]
         for item in attachments:
             if isinstance(item, tuple):
                 filepath, filename = item
             else:
                 filepath = item
                 filename = item.split('/')[-1]
             with open(filepath, "rb") as file:
                 part = MIMEBase("application", "octet-stream")
                 part.set_payload(file.read())
             encoders.encode_base64(part)
             part.add_header("Content-Disposition",
                             f"attachment; filename= {filename}")
             message.attach(part)
     try:
         self.connect()
         self.connection.sendmail(message["From"], mail_to + mail_cc,
                                  message.as_string())
     except:  # pylint: disable=W0702
         log.exception("Failed to send email")
         error = Error(tool="EMail",
                       error="Failed to send email",
                       details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
     finally:
         if self.connection:
             self.connection.quit()
Exemple #11
0
 def prepare(self):
     """ Prepare for action """
     log.debug("Preparing")
     config = self.context.config["scanners"]
     # Schedule scanners
     for scanner_type in list(config):
         for scanner_name in list(config[scanner_type]):
             if isinstance(config[scanner_type][scanner_name], bool) and \
                     not config[scanner_type][scanner_name]:
                 continue
             try:
                 self.schedule_scanner(scanner_type, scanner_name, dict())
             except:
                 log.exception("Failed to prepare %s scanner %s",
                               scanner_type, scanner_name)
                 error = Error(
                     tool=f"{scanner_type}.{scanner_name}",
                     error=
                     f"Failed to prepare {scanner_type} scanner {scanner_name}",
                     details=f"```\n{traceback.format_exc()}\n```")
                 self.context.errors.append(error)
     # Resolve depencies once again
     dependency.resolve_depencies(self.context.scanners)
Exemple #12
0
 def prepare(self):
     """ Prepare for action """
     log.debug("Preparing")
     config = self.context.config["reporters"]
     config_items = [
         item for item in list(config)
         if not isinstance(config[item], bool) or config[item]
     ]
     disabled_items = [
         item for item in list(config)
         if isinstance(config[item], bool) and not config[item]
     ]
     # Schedule reporters
     try:
         all_reporters = dependency.resolve_name_order(
             config_items + [
                 item for item in constants.DEFAULT_REPORTERS
                 if item not in disabled_items
             ], "dusty.reporters.{}.reporter", "Reporter")
     except:
         all_reporters = [
             item for item in constants.DEFAULT_REPORTERS
             if item not in disabled_items
         ] + config_items
     for reporter_name in all_reporters:
         try:
             self.schedule_reporter(reporter_name, dict())
         except:
             log.exception("Failed to prepare reporter %s", reporter_name)
             error = Error(
                 tool=reporter_name,
                 error=f"Failed to prepare reporter {reporter_name}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.context.errors.append(error)
     # Resolve depencies once again
     dependency.resolve_depencies(self.context.reporters)
Exemple #13
0
 def report(self):
     """ Report """
     log.info("Starting reporting")
     # Run reporters
     performed = set()
     perform_report_iteration = True
     while perform_report_iteration:
         perform_report_iteration = False
         for reporter_module_name in list(self.context.reporters):
             if reporter_module_name in performed:
                 continue
             performed.add(reporter_module_name)
             perform_report_iteration = True
             reporter = self.context.reporters[reporter_module_name]
             try:
                 reporter.report()
             except:
                 log.exception("Reporter %s failed", reporter_module_name)
                 error = Error(
                     tool=reporter_module_name,
                     error=f"Reporter {reporter_module_name} failed",
                     details=f"```\n{traceback.format_exc()}\n```")
                 self.context.errors.append(error)
             self.context.errors.extend(reporter.get_errors())
Exemple #14
0
def parse_findings(output_file, scanner):
    """ Parse findings (code from dusty 1.0) """
    log.debug("Parsing findings")
    nscan = parse(output_file)
    root = nscan.getroot()
    # Check validity
    if "nmaprun" not in root.tag:
        log.error(
            "Exception during Nmap findings processing: invalid XML file")
        error = Error(
            tool=scanner.get_name(),
            error=f"Exception during Nmap findings processing",
            details=f"Output file doesn't seem to be a valid Nmap xml file.")
        scanner.errors.append(error)
        return
    dupes = dict()
    hostInfo = ""
    for host in root.iter("host"):
        ip = host.find("address[@addrtype='ipv4']").attrib["addr"]
        fqdn = None
        if host.find("hostnames/hostname[@type='PTR']") is not None:
            fqdn = host.find("hostnames/hostname[@type='PTR']").attrib["name"]
        #
        for os in root.iter("os"):
            if ip is not None:
                hostInfo += "IP Address: %s\n" % ip
            if fqdn is not None:
                fqdn += "FQDN: %s\n" % ip
            for osv in os.iter("osmatch"):
                if "name" in osv.attrib:
                    hostInfo += "Host OS: %s\n" % osv.attrib["name"]
                if "accuracy" in osv.attrib:
                    hostInfo += "Accuracy: {0}%\n".format(
                        osv.attrib["accuracy"])
            hostInfo += "\n"
        #
        xpath_port_selector = "ports/port[state/@state='open']"
        if scanner.config.get("include_unfiltered", False):
            xpath_port_selector = "ports/port[state/@state=('open','unfiltered')]"
        #
        for portelem in elementpath.select(host, xpath_port_selector):
            port = portelem.attrib["portid"]
            protocol = portelem.attrib["protocol"]
            #
            title = f"Open port: {ip}:{port}/{protocol}"
            description = hostInfo
            description += f"Port: {port}\n"
            serviceinfo = ""
            #
            if portelem.find("service") is not None:
                if "product" in portelem.find("service").attrib:
                    serviceinfo += "Product: %s\n" % portelem.find(
                        "service").attrib["product"]
                #
                if "version" in portelem.find("service").attrib:
                    serviceinfo += "Version: %s\n" % portelem.find(
                        "service").attrib["version"]
                #
                if "extrainfo" in portelem.find("service").attrib:
                    serviceinfo += "Extra Info: %s\n" % portelem.find(
                        "service").attrib["extrainfo"]
                #
                description += serviceinfo
            #
            description += "\n\n"
            #
            dupe_key = f"{port}_{protocol}_{ip}"
            if dupe_key in dupes:
                find = dupes[dupe_key]
                if description is not None:
                    find["description"] += description
            else:
                find = {
                    "title": title,
                    "description": description,
                    "endpoints": list()
                }
                find["endpoints"].append(f"{ip}:{port}/{protocol}")
                dupes[dupe_key] = find
    # Create finding objects
    for item in dupes.values():
        finding = DastFinding(title=item["title"],
                              description=markdown.markdown_escape(
                                  item["description"]))
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", SEVERITIES[-1])
        # Endpoints (for backwards compatibility)
        endpoints = list()
        for entry in item["endpoints"]:
            endpoint = url.parse_url(entry)
            if endpoint in endpoints:
                continue
            endpoints.append(endpoint)
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        # Done
        scanner.findings.append(finding)
Exemple #15
0
 def report_multi(self):
     """ Report """
     wrappers_config = dict()
     wrappers_config[None] = self.config
     for key, value in self.config.get("dynamic_jira").items():
         wrappers_config[re.compile(key)] = value
     #
     wrappers = dict()
     for wrapper_key, wrapper_config in wrappers_config.items():
         wrapper = JiraWrapper(
             wrapper_config.get("url"),
             wrapper_config.get("username"),
             wrapper_config.get("password"),
             wrapper_config.get("project"),
             wrapper_config.get("fields"),
         )
         #
         if not wrapper.valid:
             # Save default mapping to meta as a fallback
             default_mapping = constants.JIRA_SEVERITY_MAPPING
             default_mapping.update(
                 self.config.get("custom_mapping", dict()))
             self.set_meta("mapping", default_mapping)
             # Report error
             log.error(
                 "Jira configuration is invalid. Skipping Jira reporting")
             raise RuntimeError("Jira configuration is invalid")
         #
         wrappers[wrapper_key] = dict()
         wrappers[wrapper_key]["wrapper"] = wrapper
         wrappers[wrapper_key]["config"] = wrapper_config
         #
         if wrapper_config.get("separate_epic_linkage", False) and \
                 "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["epic_link"] = wrapper_config.get(
                 "fields").pop("Epic Link")
         #
         wrappers[wrapper_key]["raw_epic_link"] = None
         if wrapper_config.get("separate_epic_linkage", False):
             wrappers[wrapper_key]["raw_epic_link"] = wrappers[wrapper_key][
                 "epic_link"]
         elif "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["raw_epic_link"] = wrapper_config.get(
                 "fields")["Epic Link"]
         #
         wrappers[wrapper_key]["priority_mapping"] = wrapper_config.get(
             "custom_mapping", prepare_jira_mapping(wrapper))
         wrappers[wrapper_key]["mapping_meta"] = dict(
             wrappers[wrapper_key]["priority_mapping"])
         #
     self.set_meta("wrapper", wrappers[None]["wrapper"])
     self.set_meta("raw_epic_link", wrappers[None]["raw_epic_link"])
     #
     dynamic_label_mapping = dict()
     if self.config.get("dynamic_labels", None):
         try:
             for key, value in self.config.get("dynamic_labels").items():
                 dynamic_label_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic label mapping")
     #
     dynamic_field_mapping = dict()
     if self.config.get("dynamic_fields", None):
         try:
             for key, value in self.config.get("dynamic_fields").items():
                 dynamic_field_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic field mapping")
     #
     findings = list()
     for item in self.context.findings:  # pylint: disable=R1702
         #
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         #
         if isinstance(item, (DastFinding, SastFinding)):
             #
             dynamic_labels = list()
             dynamic_fields = list()
             dynamic_wrapper = wrappers[None]
             #
             for endpoint in item.get_meta("endpoints", list()):
                 #
                 for pattern, addon_label in dynamic_label_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_labels.append(addon_label)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic label")
                 #
                 for pattern, addon_fields in dynamic_field_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_fields.append(addon_fields)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic field")
                 #
                 for pattern, addon_jira in wrappers.items():
                     if pattern is None:
                         continue
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_wrapper = addon_jira
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic JIRA")
             #
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if dynamic_wrapper["priority_mapping"] and \
                     priority in dynamic_wrapper["priority_mapping"]:
                 priority = dynamic_wrapper["priority_mapping"][priority]
             dynamic_wrapper["mapping_meta"][severity] = priority
             #
             if isinstance(item, DastFinding):
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     item.description.replace("\\.", "."),
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "DAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             elif isinstance(item, SastFinding):
                 #
                 description_chunks = [
                     item.replace("\\.", ".").replace(
                         "<pre>", "{code:collapse=true}\n\n").replace(
                             "</pre>",
                             "\n\n{code}").replace("<br />", "\n")
                     for item in item.description
                 ]
                 #
                 if len("\n\n".join(description_chunks)
                        ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                     description = description_chunks[0]
                     chunks = description_chunks[1:]
                     comments = list()
                     new_line_str = '  \n  \n'
                     for chunk in chunks:
                         if not comments or (
                                 len(comments[-1]) + len(new_line_str) + len(chunk)
                             ) >= \
                                 constants.JIRA_COMMENT_MAX_SIZE:
                             comments.append(cut_jira_comment(chunk))
                         else:  # Last comment can handle one more chunk
                             comments[
                                 -1] += new_line_str + cut_jira_comment(
                                     chunk)
                 else:
                     description = "\n\n".join(description_chunks)
                     comments = list()
                 #
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     description,
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "SAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "comments":
                     comments,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             #
         #
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     #
     self.set_meta("mapping", wrappers[None]["mapping_meta"])
     #
     for finding in findings:
         if finding["wrapper"]["config"].get("max_description_size", False):
             if len(finding["description"]) > \
                     int(finding["wrapper"]["config"].get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(finding["wrapper"]["config"].get(
                     "max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(finding["wrapper"]["config"].get(
                         "max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     #
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     #
     new_tickets = list()
     existing_tickets = list()
     #
     for _, local_wrapper in wrappers.items():
         local_wrapper["wrapper"].connect()
     #
     for finding in findings:
         try:
             config_labels = finding["wrapper"]["config"].get(
                 "additional_labels", None)
             if config_labels is None:
                 config_labels = list()
             if not isinstance(config_labels, list):
                 config_labels = [
                     item.strip() for item in config_labels.split(",")
                 ]
             #
             field_overrides = dict()
             for dynamic_field in finding["dynamic_fields"]:
                 field_overrides.update(dynamic_field)
             #
             issue, created = finding["wrapper"]["wrapper"].create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding["additional_labels"] +
                 config_labels,  # additional_labels  # pylint: disable=C0301
                 field_overrides=field_overrides,
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     finding["wrapper"]["wrapper"].add_comment_to_issue(
                         issue, comment)
             if created and finding["wrapper"]["config"].get(
                     "separate_epic_linkage", False):
                 try:
                     finding["wrapper"][
                         "wrapper"].client.add_issues_to_epic(
                             finding["wrapper"]["epic_link"],
                             [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key),
                                   finding["wrapper"]["epic_link"])
             try:
                 result_priority = str(issue.fields.priority)
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             #
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{finding['wrapper']['config'].get('url')}/browse/{issue.key}",  # pylint: disable=C0301
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 str(issue.fields.assignee),
                 "raw_created":
                 str(issue.fields.created),
                 "raw_severity":
                 finding["raw"].get_meta("severity", SEVERITIES[-1]),
                 "raw_jira_url":
                 finding["wrapper"]["config"].get("url"),
                 "raw_jira_project":
                 finding["wrapper"]["config"].get("project"),
                 "raw_jira_epic":
                 finding["wrapper"]["raw_epic_link"],
                 "raw_jira_fields":
                 finding["wrapper"]["config"].get("fields"),
                 "raw_addon_fields":
                 field_overrides,
                 "raw_addon_labels":
                 finding["additional_labels"] + config_labels,
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     #
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
Exemple #16
0
 def report(self):
     """ Report """
     # Remove "Epic Link" from fields if requested
     if self.config.get("separate_epic_linkage", False) and \
             "Epic Link" in self.config.get("fields"):
         epic_link = self.config.get("fields").pop("Epic Link")
     # Prepare wrapper
     log.info("Creating legacy wrapper instance")
     wrapper = JiraWrapper(self.config.get("url"),
                           self.config.get("username"),
                           self.config.get("password"),
                           self.config.get("project"),
                           self.config.get("fields"))
     if not wrapper.valid:
         # Save default mapping to meta as a fallback
         default_mapping = constants.JIRA_SEVERITY_MAPPING
         default_mapping.update(self.config.get("custom_mapping", dict()))
         self.set_meta("mapping", default_mapping)
         # Report error
         log.error("Jira configuration is invalid. Skipping Jira reporting")
         raise RuntimeError("Jira configuration is invalid")
     log.debug("Legacy wrapper is valid")
     # Prepare findings
     priority_mapping = self.config.get("custom_mapping",
                                        prepare_jira_mapping(wrapper))
     mapping_meta = dict(priority_mapping)
     findings = list()
     for item in self.context.findings:
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         if isinstance(item, DastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 item.description.replace("\\.", "."),
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "DAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "raw":
                 item
             })
         elif isinstance(item, SastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             description_chunks = [
                 item.replace("\\.", ".").replace(
                     "<pre>", "{code:collapse=true}\n\n").replace(
                         "</pre>", "\n\n{code}").replace("<br />", "\n")
                 for item in item.description
             ]
             if len("\n\n".join(description_chunks)
                    ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                 description = description_chunks[0]
                 chunks = description_chunks[1:]
                 comments = list()
                 new_line_str = '  \n  \n'
                 for chunk in chunks:
                     if not comments or (len(comments[-1]) + len(new_line_str) + len(chunk)) >= \
                             constants.JIRA_COMMENT_MAX_SIZE:
                         comments.append(cut_jira_comment(chunk))
                     else:  # Last comment can handle one more chunk
                         comments[-1] += new_line_str + cut_jira_comment(
                             chunk)
             else:
                 description = "\n\n".join(description_chunks)
                 comments = list()
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 description,
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "SAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "comments":
                 comments,
                 "raw":
                 item
             })
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     # Cut description if length above configured limit
     if self.config.get("max_description_size", False):
         for finding in findings:
             if len(finding["description"]) > int(
                     self.config.get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(
                     self.config.get("max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(self.config.get("max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     # Sort findings by severity-tool-title
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     # Submit issues
     wrapper.connect()
     new_tickets = list()
     existing_tickets = list()
     for finding in findings:
         try:
             issue, created = wrapper.create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding[
                     "additional_labels"]  # additional_labels
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     wrapper.add_comment_to_issue(issue, comment)
             if created and self.config.get("separate_epic_linkage", False):
                 try:
                     wrapper.client.add_issues_to_epic(
                         epic_link, [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key), epic_link)
             try:
                 result_priority = issue.fields.priority
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{self.config.get('url')}/browse/{issue.key}",
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 issue.fields.assignee
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
     self.set_meta("mapping", mapping_meta)
Exemple #17
0
 def perform(self):
     """ Perform action """
     log.info("Starting scanning")
     reporting = self.context.performers.get("reporting", None)
     # Create executors
     executor = dict()
     settings = self.context.config["settings"]
     for scanner_type in self.context.config["scanners"]:
         max_workers = settings.get(scanner_type,
                                    dict()).get("max_concurrent_scanners",
                                                1)
         executor[scanner_type] = concurrent.futures.ThreadPoolExecutor(
             max_workers=max_workers)
         log.info("Made %s executor with %d workers", scanner_type.upper(),
                  max_workers)
     # Starting scanning
     if reporting:
         reporting.on_start()
     # Submit scanners
     futures = list()
     future_map = dict()
     future_dep_map = dict()
     for item in self.context.scanners:
         scanner = self.context.scanners[item]
         scanner_type = scanner.__class__.__module__.split(".")[-3]
         scanner_module = scanner.__class__.__module__.split(".")[-2]
         depencies = list()
         for dep in scanner.depends_on() + scanner.run_after():
             if dep in future_dep_map:
                 depencies.append(future_dep_map[dep])
         future = executor[scanner_type].submit(self._execute_scanner,
                                                scanner, depencies)
         future_dep_map[scanner_module] = future
         future_map[future] = item
         futures.append(future)
     # Wait for executors to start and finish
     started = set()
     finished = set()
     while True:
         # Check for started executors
         for future in futures:
             if future not in started and (future.running()
                                           or future.done()):
                 item = future_map[future]
                 scanner = self.context.scanners[item]
                 if not scanner.get_meta("meta_scanner", False):
                     log.info(
                         f"Started {item} ({scanner.get_description()})")
                     if reporting:
                         reporting.on_scanner_start(item)
                 # Add to started set
                 started.add(future)
         # Check for finished executors
         for future in futures:
             if future not in finished and future.done():
                 item = future_map[future]
                 try:
                     future.result()
                 except:
                     log.exception("Scanner %s failed", item)
                     error = Error(
                         tool=item,
                         error=f"Scanner {item} failed",
                         details=f"```\n{traceback.format_exc()}\n```")
                     self.context.errors.append(error)
                 # Collect scanner findings and errors
                 scanner = self.context.scanners[item]
                 scanner_type = scanner.__class__.__module__.split(".")[-3]
                 for result in scanner.get_findings():
                     result.set_meta("scanner_type", scanner_type)
                     self.context.findings.append(result)
                 for error in scanner.get_errors():
                     error.set_meta("scanner_type", scanner_type)
                     self.context.errors.append(error)
                 if not scanner.get_meta("meta_scanner", False):
                     if reporting:
                         reporting.on_scanner_finish(item)
                 # Add to finished set
                 finished.add(future)
         # Exit if all executors done
         if self._all_futures_done(futures):
             break
         # Sleep for some short time
         time.sleep(constants.EXECUTOR_STATUS_CHECK_INTERVAL)
     # All scanners completed
     if reporting:
         reporting.on_finish()
Exemple #18
0
 def execute(self):  # pylint: disable=R0912
     """ Run the scanner """
     helper = QualysHelper(self.context,
                           self.config.get("qualys_api_server"),
                           self.config.get("qualys_login"),
                           self.config.get("qualys_password"),
                           retries=self.config.get("retries", 20),
                           retry_delay=self.config.get("retry_delay", 60.0),
                           timeout=self.config.get("timeout", 240))
     log.info("Qualys WAS version: %s", helper.get_version())
     timestamp = datetime.utcfromtimestamp(int(
         time())).strftime("%Y-%m-%d %H:%M:%S")
     sleep_interval = self.config.get("sleep_interval", 15.0)
     status_check_interval = self.config.get("status_check_interval", 90.0)
     # Create/get project
     project_name = "{}_{}".format(
         self.context.get_meta("project_name", "UnnamedProject"),
         self.context.get_meta("project_description",
                               "Undescribed Project"))
     if self.config.get("random_name", False):
         project_name = f"{project_name}_{self.id_generator(8)}"
     log.info("Searching for existing webapp")
     webapp_id = helper.search_for_webapp(project_name)
     if webapp_id is None:
         log.info("Creating webapp")
         webapp_id = helper.create_webapp(
             project_name,
             self.config.get("target"),
             self.config.get("qualys_option_profile_id"),
             excludes=self.config.get("exclude", None))
         sleep(sleep_interval)
     # Create auth record if needed
     auth_id = None
     if self.config.get("auth_script", None):
         log.info("Creating auth record")
         auth_name = f"{project_name} SeleniumAuthScript {timestamp}"
         auth_data = self.render_selenium_script(
             self.config.get("auth_script"),
             self.config.get("auth_login", ""),
             self.config.get("auth_password", ""),
             self.config.get("target"))
         auth_id = helper.create_selenium_auth_record(
             auth_name, auth_data,
             self.config.get("logged_in_indicator", "selenium"))
         sleep(sleep_interval)
         helper.add_auth_record_to_webapp(webapp_id, project_name, auth_id)
     # Start scan
     log.info("Starting scan")
     scan_name = f"{project_name} WAS {timestamp}"
     scan_auth = {"isDefault": True}
     if auth_id is not None:
         scan_auth = {"id": auth_id}
     scan_scanner = {"type": "EXTERNAL"}
     if self.config.get("qualys_scanner_type", "EXTERNAL") == "INTERNAL" and \
             self.config.get("qualys_scanner_pool", None):
         scanner_pool = self.config.get("qualys_scanner_pool")
         if isinstance(scanner_pool, str):
             scanner_pool = [
                 item.strip() for item in scanner_pool.split(",")
             ]
         scan_scanner = {
             "type": "INTERNAL",
             "friendlyName": random.choice(scanner_pool)
         }
     scan_id = helper.start_scan(
         scan_name, webapp_id, self.config.get("qualys_option_profile_id"),
         scan_scanner, scan_auth)
     sleep(sleep_interval)
     # Wait for scan to finish
     while helper.get_scan_status(scan_id) in ["SUBMITTED", "RUNNING"]:
         log.info("Waiting for scan to finish")
         sleep(status_check_interval)
     # Wait for results to finish processing
     while helper.get_scan_results_status(scan_id) in [
             "UNKNOWN", "TO_BE_PROCESSED", "PROCESSING"
     ]:
         log.info("Waiting for scan results to finish processing")
         sleep(status_check_interval)
     scan_result = helper.get_scan_results_status(scan_id)
     if scan_result in ["NO_HOST_ALIVE", "NO_WEB_SERVICE"]:
         error = Error(
             tool=self.get_name(),
             error=f"Qualys failed to access target",
             details="Qualys failed to access target " \
                     "(e.g. connection failed or target is not accessible). " \
                     "Please check scanner type/pool and target URL."
         )
         self.errors.append(error)
     if scan_result in [
             "SCAN_RESULTS_INVALID", "SERVICE_ERROR", "SCAN_INTERNAL_ERROR"
     ]:
         error = Error(
             tool=self.get_name(),
             error=f"Qualys internal error occured",
             details="Qualys failed to perform scan (internal scan error occured). " \
                     "Please re-run the scan and check config if error persists."
         )
         self.errors.append(error)
     # Request report
     log.info("Requesting report")
     report_name = f"{project_name} WAS {timestamp} FOR Scan {scan_id}"
     report_id = helper.create_report(
         report_name, webapp_id,
         self.config.get("qualys_report_template_id"))
     sleep(sleep_interval)
     # Wait for report to be created
     while helper.get_report_status(report_id) in ["RUNNING"]:
         log.info("Waiting for report to be created")
         sleep(status_check_interval)
     # Download report
     log.info("Downloading report XML")
     report_xml = helper.download_report(report_id)
     # Delete assets
     log.info("Deleting assets")
     helper.delete_asset("report", report_id)
     helper.delete_asset("wasscan", scan_id)
     if auth_id is not None:
         helper.delete_asset("webappauthrecord", auth_id)
     helper.delete_asset("webapp", webapp_id)
     # Parse findings
     parse_findings(report_xml, self)
     # Save intermediates
     self.save_intermediates(report_xml)