Beispiel #1
0
 def _active_scan(self):
     log.info("Active scan against target %s", self.config.get("target"))
     if self.config.get("auth_script", None):
         scan_id = self._zap_api.ascan.scan_as_user(
             self.config.get("target"),
             self._zap_context,
             self._zap_user,
             recurse=True,
             scanpolicyname=self._scan_policy_name)
     else:
         scan_id = self._zap_api.ascan.scan(
             self.config.get("target"),
             scanpolicyname=self._scan_policy_name)
     #
     try:
         int(scan_id)
     except:  # pylint: disable=W0702
         log.warning(
             "ZAP failed to return scan ID (scan_id=%s). Please check that target URL is accessible from Carrier DAST container",
             scan_id)  # pylint: disable=C0301
         return
     #
     status.wait_for_completion(
         lambda: int(self._zap_api.ascan.status(scan_id)) < 100,
         lambda: int(self._zap_api.ascan.status(scan_id)),
         "Active scan progress: %d%%")
Beispiel #2
0
def prepare_jira_mapping(jira_service):
    """ Make Jira mapping (for projects that are using custom values) """
    if not jira_service or not jira_service.valid:
        return dict()
    jira_service.connect()
    issue_type = "Bug"
    if jira_service.fields["issuetype"]["name"] != "!default_issuetype":
        issue_type = jira_service.fields["issuetype"]["name"]
    project_priorities = get_project_priorities(jira_service.client,
                                                jira_service.project,
                                                issue_type)
    if not project_priorities:
        jira_service.client.close()
        return dict()
    logging.debug("%s %s priorities: %s", jira_service.project, issue_type,
                  str(project_priorities))
    c = const
    mapping = dict()
    for severity in c.JIRA_SEVERITIES:
        if severity not in project_priorities:
            for alternative in c.JIRA_ALTERNATIVES[severity]:
                if alternative in project_priorities:
                    logging.warning("Mapping %s %s Jira priority: %s -> %s",
                                    jira_service.project, issue_type, severity,
                                    alternative)
                    mapping[severity] = alternative
                    break
            if severity not in mapping:
                mapping[severity] = project_priorities[0]
                logging.error(
                    "Failed to find Jira mapping for %s, using %s as a fallback",
                    severity, mapping[severity])
    jira_service.client.close()
    return mapping
Beispiel #3
0
 def execute(self):
     """ Run the scanner """
     # Discover open ports
     include_ports = list()
     if self.config.get("include_ports", "0-65535"):
         include_ports.append(
             f'-p{self.config.get("include_ports", "0-65535")}')
     exclude_ports = list()
     if self.config.get("exclude_ports", None):
         exclude_ports.append("--exclude-ports")
         exclude_ports.append(f'{self.config.get("exclude_ports")}')
     target_url = url.parse_url(self.config.get("target"))
     task = subprocess.run(
         ["nmap", "-PN"] + include_ports + exclude_ports + [
             "--min-rate", "1000", "--max-retries", "0",
             "--max-rtt-timeout", "200ms", target_url.hostname
         ],
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Use discovered ports
     ports = list()
     tcp_ports = ""
     udp_ports = ""
     for each in re.findall(r'([0-9]*/[tcp|udp])', str(task.stdout)):
         if "/t" in each:
             tcp_ports += f'{each.replace("/t", "")},'
         elif "/u" in each:
             udp_ports += f'{each.replace("/u", "")},'
     if tcp_ports:
         ports.append(f"-pT:{tcp_ports[:-1]}")
     if udp_ports:
         ports.append(f"-pU:{udp_ports[:-1]}")
     if not ports:
         log.warning("No open ports found. Exiting")
         return
     # Make temporary files
     output_file_fd, output_file = tempfile.mkstemp()
     log.debug("Output file: %s", output_file)
     os.close(output_file_fd)
     # Scan target
     nmap_parameters = shlex.split(
         self.config.get("nmap_parameters", "-v -sVA"))
     nse_scripts = self.config.get(
         "nse_scripts",
         "ssl-date,http-mobileversion-checker,http-robots.txt,http-title,http-waf-detect,"
         "http-chrono,http-headers,http-comments-displayer,http-date")
     task = subprocess.run(["nmap"] + nmap_parameters + ports + [
         "--min-rate", "1000", "--max-retries", "0",
         f'--script={nse_scripts}', target_url.hostname, "-oX", output_file
     ],
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Parse findings
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
     # Remove temporary files
     os.remove(output_file)
Beispiel #4
0
def parse_findings(data, scanner):
    """ Parse findings """
    # Parse JSON using legacy parser
    try:
        findings = BrakemanParser(data).items
    except:  # pylint: disable=W0702
        log.exception("Failed to parse brakeman report")
        log.warning(
            "Possibly ruby code path is invalid or not Ruby-on-Rails application"
        )
        findings = list()
    # Make finding instances
    for item in findings:
        finding = SastFinding(
            title=item["title"],
            description=[
                "\n\n".join([
                    markdown.markdown_escape(item['description']),
                    f"**References:** {markdown.markdown_escape(item['references'])}",
                    f"**File to review:** {markdown.markdown_escape(item['file_path'])}" \
                        f":{item['line']}"
                ])
            ]
        )
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity",
                         constants.BRAKEMAN_SEVERITY_MAPPING[item["severity"]])
        finding.set_meta("legacy.file", item["file_path"])
        finding.set_meta("legacy.line", item["line"])
        finding.set_meta(
            "endpoints",
            [namedtuple("Endpoint", ["raw"])(raw=item["file_path"])])
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        scanner.findings.append(finding)
Beispiel #5
0
 def execute(self, args):
     """ Run the command """
     log.info("Starting")
     if args.call_from_legacy:
         log.warning("Called from legacy entry point")
     # Make instances
     context = RunContext(args)
     config = ConfigHelper(context)
     scanning = ScanningPerformer(context)
     processing = ProcessingPerformer(context)
     reporting = ReportingPerformer(context)
     # Add to context
     context.performers["scanning"] = scanning
     context.performers["processing"] = processing
     context.performers["reporting"] = reporting
     # Init config
     config.load(args.config_variable, args.config_file, args.suite)
     scanning.validate_config(context.config)
     processing.validate_config(context.config)
     reporting.validate_config(context.config)
     # Prepare
     scanning.prepare()
     processing.prepare()
     reporting.prepare()
     # Perform
     scanning.perform()
     processing.perform()
     reporting.perform()
     # Done
     log.info("Done")
Beispiel #6
0
 def connect(self):
     """ Establish connection to SMTP server """
     try:
         self.connection = smtplib.SMTP(self.server,
                                        self.port,
                                        timeout=self.timeout)
         self.connection.ehlo()
         self.connection.starttls(context=ssl.create_default_context())
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except ssl.SSLError:
         log.warning("SSL error, retrying with unverified SSL context")
         self.connection = smtplib.SMTP(self.server,
                                        self.port,
                                        timeout=self.timeout)
         self.connection.ehlo()
         self.connection.starttls(context=ssl._create_unverified_context())  # pylint: disable=W0212
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except smtplib.SMTPServerDisconnected:
         log.warning(
             "Seems like SMTP with TSL didn't work, trying with SMTP_SSL")
         self.connection = smtplib.SMTP_SSL(host=self.server,
                                            port=self.port,
                                            timeout=self.timeout)
         self.connection.ehlo()
         self.connection.login(self.login, self.password)
     except:  # pylint: disable=W0702
         log.exception("Failed to connect to SMTP server")
         error = Error(tool="EMail",
                       error="Failed to connect to SMTP server",
                       details=f"```\n{traceback.format_exc()}\n```")
         self.context.errors.append(error)
         if self.connection:
             self.connection.quit()
Beispiel #7
0
 def report(self):
     """ Report """
     if not self._rp_client:
         log.warning(
             "ReportPortal configuration/connection is invalid. Skipping RP reporting"
         )
         return
     log.info("Reporting to ReportPortal")
     for item in self.context.findings:
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         if isinstance(item, DastFinding):
             item_details = markdown.markdown_unescape(item.description)
             item_description = item_details
             tags = [
                 f'Tool: {item.get_meta("tool", "")}',
                 f'TestType: {self.context.get_meta("testing_type", "DAST")}',
                 f'Severity: {item.get_meta("severity", SEVERITIES[-1])}'
             ]
             if item.get_meta("confidence", None):
                 tags.append(f'Confidence: {item.get_meta("confidence")}')
             self._rp_client.start_test_item(item.title,
                                             description=item_description,
                                             tags=tags)
             if item.get_meta("legacy.images", None):
                 for attachment in item.get_meta("legacy.images"):
                     self._rp_client.test_item_message(
                         attachment["name"], "INFO", attachment)
             self._rp_client.test_item_message(
                 "!!!MARKDOWN_MODE!!! %s " % item_details, "INFO")
             self._rp_client.test_item_message(
                 item.get_meta("issue_hash", "<no_hash>"), "ERROR")
             self._rp_client.finish_test_item()
         elif isinstance(item, SastFinding):
             item_details = markdown.markdown_unescape("\n\n".join(
                 item.description))
             item_description = item_details
             tags = [
                 f'Tool: {item.get_meta("tool", "")}',
                 f'TestType: {self.context.get_meta("testing_type", "SAST")}',
                 f'Severity: {item.get_meta("severity", SEVERITIES[-1])}'
             ]
             if item.get_meta("confidence", None):
                 tags.append(f'Confidence: {item.get_meta("confidence")}')
             self._rp_client.start_test_item(item.title,
                                             description=item_description,
                                             tags=tags)
             self._rp_client.test_item_message(
                 "!!!MARKDOWN_MODE!!! %s " % item_details, "INFO")
             self._rp_client.test_item_message(
                 item.get_meta("issue_hash", "<no_hash>"), "ERROR")
             self._rp_client.finish_test_item()
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     self._rp_client.finish_test()
Beispiel #8
0
 def __init__(self, url, user, password, project, fields=None):
     self.valid = True
     self.url = url
     self.password = password
     self.user = user
     try:
         self.connect()
     except:
         logging.error("Failed to connect to Jira")
         self.valid = False
         return
     self.projects = [project.key for project in self.client.projects()]
     self.project = project.upper()
     if self.project not in self.projects:
         # self.client.close()
         logging.warning("Requested project not found in Jira projects")
         # self.valid = False
         # return
     self.fields = {}
     self.watchers = []
     if isinstance(fields, dict):
         if 'watchers' in fields.keys():
             self.watchers = [item.strip() for item in fields.pop('watchers').split(",")]
         all_jira_fields = self.client.fields()
         for key, value in fields.items():
             if value:
                 if isinstance(value, str) and const.JIRA_FIELD_DO_NOT_USE_VALUE in value:
                     self.fields[key] = value
                     continue
                 jira_keys = [item for item in all_jira_fields if item["id"] == key]
                 if not jira_keys:
                     jira_keys = [item for item in all_jira_fields
                                  if item["name"].lower() == key.lower().replace('_', ' ')]
                 if len(jira_keys) == 1:
                     jira_key = jira_keys[0]
                     key_type = jira_key['schema']['type']
                 else:
                     logging.warning(f'Cannot recognize field {key}. This field will not be used.')
                     continue
                 if key_type in ['string', 'number', 'any'] or isinstance(value, dict):
                     _value = value
                 elif key_type == 'array':
                     if isinstance(value, str):
                         _value = [item.strip() for item in value.split(",")]
                     elif isinstance(value, int):
                         _value = [value]
                     else:
                         _value = value
                 else:
                     _value = {'name': value}
                 self.fields[jira_key['id']] = _value
     if not self.fields.get('issuetype', None):
         self.fields['issuetype'] = {'name': '!default_issuetype'}
     self.client.close()
     self.created_jira_tickets = list()
Beispiel #9
0
 def execute(self, args):
     """ Run the command """
     log.debug("Starting")
     if args.call_from_legacy:
         log.warning("Called from legacy entry point")
     # Init context
     context = RunContext(args)
     config = ConfigModel(context)
     if args.list_suites or not args.suite:
         suites = config.list_suites(args.config_seed, args.config_variable,
                                     args.config_file)
         if not args.suite:
             log.error("Suite is not defined. Use --help to get help")
         log.info("Available suites: %s", ", ".join(suites))
         return
     # Make instances
     scanning = ScanningPerformer(context)
     processing = ProcessingPerformer(context)
     reporting = ReportingPerformer(context)
     # Add to context
     context.performers["scanning"] = scanning
     context.performers["processing"] = processing
     context.performers["reporting"] = reporting
     # Init config
     config.load(args.config_seed, args.config_variable, args.config_file,
                 args.suite)
     scanning.validate_config(context.config)
     processing.validate_config(context.config)
     reporting.validate_config(context.config)
     # Add meta to context
     self._fill_context_meta(context)
     # Load state
     context.state.load()
     # Prepare reporters first
     reporting.prepare()
     # Run actions
     actions.run(context)
     # Prepare scanning and processing
     scanning.prepare()
     processing.prepare()
     # Perform
     scanning.perform()
     processing.perform()
     reporting.perform()
     # Done
     context.state.save()
     reporting.flush()
     log.debug("Done")
     # Show quality gate statistics if any
     for line in context.get_meta("quality_gate_stats", list()):
         log.info(line)
     # Fail quality gate if needed
     if context.get_meta("fail_quality_gate", False):
         os._exit(1)  # pylint: disable=W0212
Beispiel #10
0
 def execute(self):
     """ Run the processor """
     log.info("Checking quality gate status")
     thresholds = self.config.get("thresholds", dict())
     # Count issues by severity
     results_by_severity = dict()
     for item in self.context.findings:
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         severity = item.get_meta("severity", SEVERITIES[-1])
         if severity not in results_by_severity:
             results_by_severity[severity] = 0
         results_by_severity[severity] += 1
     # Prepare stats data
     stats_data = dict()
     for severity in SEVERITIES:
         stats_data["total"] = "OK"
         stats_data[severity] = {
             "findings": results_by_severity.get(severity, "-"),
             "threshold": thresholds.get(severity, "-"),
             "status": "OK"
         }
     # Check quality gate
     for severity in results_by_severity:
         if severity not in thresholds:
             continue
         #
         severity_results = results_by_severity[severity]
         policy_threshold = thresholds[severity]
         #
         if severity_results > policy_threshold:
             log.warning("Quality gate failed: %s -> %d > %d", severity,
                         severity_results, policy_threshold)
             self.context.set_meta("fail_quality_gate", True)
             stats_data[severity]["status"] = "FAIL"
             stats_data["total"] = "FAIL"
     # Prepare stats
     stats = list()
     stats.append("============= Quality gate stats ============")
     stats.append(
         "Severity  : {:<9} {:<5} {:<7} {:<4} {:<4}".format(*SEVERITIES))
     stats.append("Findings  : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
         *[stats_data[severity]["findings"] for severity in SEVERITIES]))
     stats.append("Threshold : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
         *[stats_data[severity]["threshold"] for severity in SEVERITIES]))
     stats.append("Status    : {:<9} {:<5} {:<7} {:<4} {:<4}".format(
         *[stats_data[severity]["status"] for severity in SEVERITIES]))
     stats.append("============= Quality gate: {:<4} ============".format(
         stats_data["total"]))
     self.context.set_meta("quality_gate_stats", stats)
Beispiel #11
0
 def execute(self):
     """ Run the scanner """
     # Get target host IP
     target_url = url.parse_url(self.config.get("target"))
     host = target_url.hostname
     if not url.find_ip(host):
         task = subprocess.run(["getent", "hosts", host],
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
         log.log_subprocess_result(task)
         host = url.find_ip(task.stdout.decode("utf-8", errors="ignore"))
         if host:
             host = host[0].strip()
     if not host:
         log.warning("No target IP found. Exiting")
         return
     # Prepare config
     include_ports = list()
     if self.config.get("include_ports", "0-65535"):
         include_ports.append("-p")
         include_ports.append(
             f'{self.config.get("include_ports", "0-65535")}')
         include_ports.append(
             f'-pU:{self.config.get("include_ports", "0-65535")}')
     exclude_ports = list()
     if self.config.get("exclude_ports", None):
         exclude_ports.append("--exclude-ports")
         exclude_ports.append(f'{self.config.get("exclude_ports")}')
     # Make temporary files
     output_file_fd, output_file = tempfile.mkstemp()
     log.debug("Output file: %s", output_file)
     os.close(output_file_fd)
     # Scan target
     task = subprocess.run(["masscan", host] + include_ports + [
         "--rate",
         "1000",
         "-oJ",
         output_file,
     ] + exclude_ports,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE)
     log.log_subprocess_result(task)
     # Parse findings
     parse_findings(output_file, self)
     # Save intermediates
     self.save_intermediates(output_file, task)
     # Remove temporary files
     os.remove(output_file)
Beispiel #12
0
 def _spider(self):
     log.info("Spidering target: %s", self.config.get("target"))
     if self.config.get("auth_script", None):
         scan_id = self._zap_api.spider.scan_as_user(
             self._zap_context,
             self._zap_user,
             self.config.get("target"),
             recurse=True,
             subtreeonly=True)
     else:
         scan_id = self._zap_api.spider.scan(self.config.get("target"))
     #
     try:
         int(scan_id)
     except:  # pylint: disable=W0702
         log.warning(
             "ZAP failed to return scan ID (scan_id=%s). Please check that target URL is accessible from Carrier DAST container",
             scan_id)  # pylint: disable=C0301
         return
     #
     status.wait_for_completion(
         lambda: int(self._zap_api.spider.status(scan_id)) < 100,
         lambda: int(self._zap_api.spider.status(scan_id)),
         "Spidering progress: %d%%")
Beispiel #13
0
 def run(self):
     """ Run action """
     # Patch dulwich to work without valid UID/GID
     dulwich.repo.__original__get_default_identity = dulwich.repo._get_default_identity  # pylint: disable=W0212
     dulwich.repo._get_default_identity = _dulwich_repo_get_default_identity  # pylint: disable=W0212
     # Patch dulwich to use paramiko SSH client
     dulwich.client.get_ssh_vendor = ParamikoSSHVendor
     # Patch paramiko to skip key verification
     paramiko.transport.Transport._verify_key = _paramiko_transport_verify_key  # pylint: disable=W0212
     # Set USERNAME if needed
     try:
         getpass.getuser()
     except:  # pylint: disable=W0702
         os.environ["USERNAME"] = "******"
     # Get options
     source = self.config.get("source")
     target = self.config.get("target")
     branch = self.config.get("branch", "master")
     depth = self.config.get("depth", None)
     # Prepare auth
     auth_args = dict()
     if self.config.get("username", None) is not None:
         auth_args["username"] = self.config.get("username")
     if self.config.get("password", None) is not None:
         auth_args["password"] = self.config.get("password")
     if self.config.get("key", None) is not None:
         auth_args["key_filename"] = self.config.get("key")
     if self.config.get("key_data", None) is not None:
         key_obj = io.StringIO(
             self.config.get("key_data").replace("|", "\n"))
         pkey = paramiko.RSAKey.from_private_key(key_obj)
         # Patch paramiko to use our key
         paramiko.client.SSHClient._auth = _paramiko_client_SSHClient_auth(  # pylint: disable=W0212
             paramiko.client.SSHClient._auth,
             pkey  # pylint: disable=W0212
         )
     # Clone repository
     log.info("Cloning repository %s into %s", source, target)
     repository = porcelain.clone(source,
                                  target,
                                  checkout=False,
                                  depth=depth,
                                  errstream=log.DebugLogStream(),
                                  **auth_args)
     # Get current HEAD tree (default branch)
     try:
         head_tree = repository[b"HEAD"]
     except:  # pylint: disable=W0702
         head_tree = None
     # Get target tree (requested branch)
     branch_b = branch.encode("utf-8")
     try:
         target_tree = repository[b"refs/remotes/origin/" + branch_b]
     except:  # pylint: disable=W0702
         target_tree = None
     # Checkout branch
     if target_tree is not None:
         log.info("Checking out branch %s", branch)
         repository[b"refs/heads/" +
                    branch_b] = repository[b"refs/remotes/origin/" +
                                           branch_b]
         repository.refs.set_symbolic_ref(b"HEAD",
                                          b"refs/heads/" + branch_b)
         repository.reset_index(repository[b"HEAD"].tree)
     elif head_tree is not None:
         try:
             default_branch_name = repository.refs.follow(b"HEAD")[0][1]
             if default_branch_name.startswith(refs.LOCAL_BRANCH_PREFIX):
                 default_branch_name = default_branch_name[
                     len(refs.LOCAL_BRANCH_PREFIX):]
             default_branch_name = default_branch_name.decode("utf-8")
             log.warning(
                 "Branch %s was not found. Checking out default branch %s",
                 branch, default_branch_name)
         except:  # pylint: disable=W0702
             log.warning(
                 "Branch %s was not found. Trying to check out default branch",
                 branch)
         try:
             repository.reset_index(repository[b"HEAD"].tree)
         except:  # pylint: disable=W0702
             log.exception("Failed to checkout default branch")
     else:
         log.error(
             "Branch %s was not found and default branch is not set. Skipping checkout"
         )
     # Delete .git if requested
     if self.config.get("delete_git_dir", False):
         log.info("Deleting .git directory")
         shutil.rmtree(os.path.join(target, ".git"))
Beispiel #14
0
 def report_multi(self):
     """ Report """
     wrappers_config = dict()
     wrappers_config[None] = self.config
     for key, value in self.config.get("dynamic_jira").items():
         wrappers_config[re.compile(key)] = value
     #
     wrappers = dict()
     for wrapper_key, wrapper_config in wrappers_config.items():
         wrapper = JiraWrapper(
             wrapper_config.get("url"),
             wrapper_config.get("username"),
             wrapper_config.get("password"),
             wrapper_config.get("project"),
             wrapper_config.get("fields"),
         )
         #
         if not wrapper.valid:
             # Save default mapping to meta as a fallback
             default_mapping = constants.JIRA_SEVERITY_MAPPING
             default_mapping.update(
                 self.config.get("custom_mapping", dict()))
             self.set_meta("mapping", default_mapping)
             # Report error
             log.error(
                 "Jira configuration is invalid. Skipping Jira reporting")
             raise RuntimeError("Jira configuration is invalid")
         #
         wrappers[wrapper_key] = dict()
         wrappers[wrapper_key]["wrapper"] = wrapper
         wrappers[wrapper_key]["config"] = wrapper_config
         #
         if wrapper_config.get("separate_epic_linkage", False) and \
                 "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["epic_link"] = wrapper_config.get(
                 "fields").pop("Epic Link")
         #
         wrappers[wrapper_key]["raw_epic_link"] = None
         if wrapper_config.get("separate_epic_linkage", False):
             wrappers[wrapper_key]["raw_epic_link"] = wrappers[wrapper_key][
                 "epic_link"]
         elif "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["raw_epic_link"] = wrapper_config.get(
                 "fields")["Epic Link"]
         #
         wrappers[wrapper_key]["priority_mapping"] = wrapper_config.get(
             "custom_mapping", prepare_jira_mapping(wrapper))
         wrappers[wrapper_key]["mapping_meta"] = dict(
             wrappers[wrapper_key]["priority_mapping"])
         #
     self.set_meta("wrapper", wrappers[None]["wrapper"])
     self.set_meta("raw_epic_link", wrappers[None]["raw_epic_link"])
     #
     dynamic_label_mapping = dict()
     if self.config.get("dynamic_labels", None):
         try:
             for key, value in self.config.get("dynamic_labels").items():
                 dynamic_label_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic label mapping")
     #
     dynamic_field_mapping = dict()
     if self.config.get("dynamic_fields", None):
         try:
             for key, value in self.config.get("dynamic_fields").items():
                 dynamic_field_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic field mapping")
     #
     findings = list()
     for item in self.context.findings:  # pylint: disable=R1702
         #
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         #
         if isinstance(item, (DastFinding, SastFinding)):
             #
             dynamic_labels = list()
             dynamic_fields = list()
             dynamic_wrapper = wrappers[None]
             #
             for endpoint in item.get_meta("endpoints", list()):
                 #
                 for pattern, addon_label in dynamic_label_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_labels.append(addon_label)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic label")
                 #
                 for pattern, addon_fields in dynamic_field_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_fields.append(addon_fields)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic field")
                 #
                 for pattern, addon_jira in wrappers.items():
                     if pattern is None:
                         continue
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_wrapper = addon_jira
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic JIRA")
             #
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if dynamic_wrapper["priority_mapping"] and \
                     priority in dynamic_wrapper["priority_mapping"]:
                 priority = dynamic_wrapper["priority_mapping"][priority]
             dynamic_wrapper["mapping_meta"][severity] = priority
             #
             if isinstance(item, DastFinding):
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     item.description.replace("\\.", "."),
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "DAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             elif isinstance(item, SastFinding):
                 #
                 description_chunks = [
                     item.replace("\\.", ".").replace(
                         "<pre>", "{code:collapse=true}\n\n").replace(
                             "</pre>",
                             "\n\n{code}").replace("<br />", "\n")
                     for item in item.description
                 ]
                 #
                 if len("\n\n".join(description_chunks)
                        ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                     description = description_chunks[0]
                     chunks = description_chunks[1:]
                     comments = list()
                     new_line_str = '  \n  \n'
                     for chunk in chunks:
                         if not comments or (
                                 len(comments[-1]) + len(new_line_str) + len(chunk)
                             ) >= \
                                 constants.JIRA_COMMENT_MAX_SIZE:
                             comments.append(cut_jira_comment(chunk))
                         else:  # Last comment can handle one more chunk
                             comments[
                                 -1] += new_line_str + cut_jira_comment(
                                     chunk)
                 else:
                     description = "\n\n".join(description_chunks)
                     comments = list()
                 #
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     description,
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "SAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "comments":
                     comments,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             #
         #
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     #
     self.set_meta("mapping", wrappers[None]["mapping_meta"])
     #
     for finding in findings:
         if finding["wrapper"]["config"].get("max_description_size", False):
             if len(finding["description"]) > \
                     int(finding["wrapper"]["config"].get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(finding["wrapper"]["config"].get(
                     "max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(finding["wrapper"]["config"].get(
                         "max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     #
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     #
     new_tickets = list()
     existing_tickets = list()
     #
     for _, local_wrapper in wrappers.items():
         local_wrapper["wrapper"].connect()
     #
     for finding in findings:
         try:
             config_labels = finding["wrapper"]["config"].get(
                 "additional_labels", None)
             if config_labels is None:
                 config_labels = list()
             if not isinstance(config_labels, list):
                 config_labels = [
                     item.strip() for item in config_labels.split(",")
                 ]
             #
             field_overrides = dict()
             for dynamic_field in finding["dynamic_fields"]:
                 field_overrides.update(dynamic_field)
             #
             issue, created = finding["wrapper"]["wrapper"].create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding["additional_labels"] +
                 config_labels,  # additional_labels  # pylint: disable=C0301
                 field_overrides=field_overrides,
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     finding["wrapper"]["wrapper"].add_comment_to_issue(
                         issue, comment)
             if created and finding["wrapper"]["config"].get(
                     "separate_epic_linkage", False):
                 try:
                     finding["wrapper"][
                         "wrapper"].client.add_issues_to_epic(
                             finding["wrapper"]["epic_link"],
                             [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key),
                                   finding["wrapper"]["epic_link"])
             try:
                 result_priority = str(issue.fields.priority)
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             #
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{finding['wrapper']['config'].get('url')}/browse/{issue.key}",  # pylint: disable=C0301
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 str(issue.fields.assignee),
                 "raw_created":
                 str(issue.fields.created),
                 "raw_severity":
                 finding["raw"].get_meta("severity", SEVERITIES[-1]),
                 "raw_jira_url":
                 finding["wrapper"]["config"].get("url"),
                 "raw_jira_project":
                 finding["wrapper"]["config"].get("project"),
                 "raw_jira_epic":
                 finding["wrapper"]["raw_epic_link"],
                 "raw_jira_fields":
                 finding["wrapper"]["config"].get("fields"),
                 "raw_addon_fields":
                 field_overrides,
                 "raw_addon_labels":
                 finding["additional_labels"] + config_labels,
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     #
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
Beispiel #15
0
 def validate_config(config):
     """ Validate config """
     if "processing" not in config:
         log.warning("No processing defined in config")
Beispiel #16
0
 def _process_depots(self, current_context_config):  # pylint: disable=R0912
     context_config = current_context_config
     # Check depot config section sanity
     if "depots" not in context_config["settings"]:
         context_config["settings"]["depots"] = dict()
     depot_sections = ["secret", "object", "state"]
     for depot_section in depot_sections:
         if depot_section not in context_config["settings"]["depots"]:
             context_config["settings"]["depots"][depot_section] = dict()
     # Support legacy depot configuration
     legacy_depot_names = [
         item
         for item in list(context_config["settings"].get("depots", dict()))
         if item not in depot_sections
     ]
     legacy_section_map = {
         SecretDepotModel: "secret",
         ObjectDepotModel: "object",
         StateDepotModel: "state"
     }
     for depot_name in legacy_depot_names:
         try:
             depot_class = importlib.import_module(
                 f"dusty.tools.depots.{depot_name}.depot").Depot
             for depot_type in legacy_section_map:
                 if issubclass(depot_class, depot_type):
                     depot_section = legacy_section_map[depot_type]
                     context_config["settings"]["depots"][depot_section][depot_name] = \
                         context_config["settings"]["depots"][depot_name]
                     log.info("Legacy depot %s added to section %s",
                              depot_name, depot_section)
             context_config["settings"]["depots"].pop(depot_name)
         except:
             log.exception("Failed to process legacy depot %s", depot_name)
     # Make depot instances
     for depot_section in depot_sections:
         for depot_name in list(
                 context_config["settings"]["depots"][depot_section]):
             try:
                 depot_class = importlib.import_module(
                     f"dusty.tools.depots.{depot_name}.depot").Depot
                 if depot_class.get_name(
                 ) in self.context.depots[depot_section]:
                     continue  # Depot already enabled in first iteration
                 depot = depot_class(
                     self.context, context_config["settings"]["depots"]
                     [depot_section][depot_name])
                 self.context.depots[depot_section][
                     depot.get_name()] = depot
                 log.info("Enabled %s depot %s", depot_section, depot_name)
                 if isinstance(depot, SecretDepotModel):
                     self.context.set_meta("depots_resolved_secrets", 0)
                     context_config = self._depot_substitution(
                         context_config)
                     log.info(
                         "Resolved %d secrets from depots",
                         self.context.get_meta("depots_resolved_secrets",
                                               0))
             except ModuleNotFoundError:
                 log.warning(
                     "Depot %s is not enabled, probably needed module is not loaded yet",
                     depot_name)
             except:
                 log.exception("Failed to enable depot %s", depot_name)
     #
     return context_config
Beispiel #17
0
 def report(self):
     """ Report """
     # Remove "Epic Link" from fields if requested
     if self.config.get("separate_epic_linkage", False) and \
             "Epic Link" in self.config.get("fields"):
         epic_link = self.config.get("fields").pop("Epic Link")
     # Prepare wrapper
     log.info("Creating legacy wrapper instance")
     wrapper = JiraWrapper(self.config.get("url"),
                           self.config.get("username"),
                           self.config.get("password"),
                           self.config.get("project"),
                           self.config.get("fields"))
     if not wrapper.valid:
         # Save default mapping to meta as a fallback
         default_mapping = constants.JIRA_SEVERITY_MAPPING
         default_mapping.update(self.config.get("custom_mapping", dict()))
         self.set_meta("mapping", default_mapping)
         # Report error
         log.error("Jira configuration is invalid. Skipping Jira reporting")
         raise RuntimeError("Jira configuration is invalid")
     log.debug("Legacy wrapper is valid")
     # Prepare findings
     priority_mapping = self.config.get("custom_mapping",
                                        prepare_jira_mapping(wrapper))
     mapping_meta = dict(priority_mapping)
     findings = list()
     for item in self.context.findings:
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         if isinstance(item, DastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 item.description.replace("\\.", "."),
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "DAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "raw":
                 item
             })
         elif isinstance(item, SastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             description_chunks = [
                 item.replace("\\.", ".").replace(
                     "<pre>", "{code:collapse=true}\n\n").replace(
                         "</pre>", "\n\n{code}").replace("<br />", "\n")
                 for item in item.description
             ]
             if len("\n\n".join(description_chunks)
                    ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                 description = description_chunks[0]
                 chunks = description_chunks[1:]
                 comments = list()
                 new_line_str = '  \n  \n'
                 for chunk in chunks:
                     if not comments or (len(comments[-1]) + len(new_line_str) + len(chunk)) >= \
                             constants.JIRA_COMMENT_MAX_SIZE:
                         comments.append(cut_jira_comment(chunk))
                     else:  # Last comment can handle one more chunk
                         comments[-1] += new_line_str + cut_jira_comment(
                             chunk)
             else:
                 description = "\n\n".join(description_chunks)
                 comments = list()
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 description,
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "SAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "comments":
                 comments,
                 "raw":
                 item
             })
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     # Cut description if length above configured limit
     if self.config.get("max_description_size", False):
         for finding in findings:
             if len(finding["description"]) > int(
                     self.config.get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(
                     self.config.get("max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(self.config.get("max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     # Sort findings by severity-tool-title
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     # Submit issues
     wrapper.connect()
     new_tickets = list()
     existing_tickets = list()
     for finding in findings:
         try:
             issue, created = wrapper.create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding[
                     "additional_labels"]  # additional_labels
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     wrapper.add_comment_to_issue(issue, comment)
             if created and self.config.get("separate_epic_linkage", False):
                 try:
                     wrapper.client.add_issues_to_epic(
                         epic_link, [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key), epic_link)
             try:
                 result_priority = issue.fields.priority
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{self.config.get('url')}/browse/{issue.key}",
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 issue.fields.assignee
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
     self.set_meta("mapping", mapping_meta)
Beispiel #18
0
 def validate_config(config):
     """ Validate config """
     if "scanners" not in config:
         log.warning("No scanners defined in config")
         config["scanners"] = dict()
Beispiel #19
0
    def create_issue(self,
                     title,
                     priority,
                     description,
                     issue_hash,
                     attachments=None,
                     get_or_create=True,
                     additional_labels=None):
        def replace_defaults(value):
            if isinstance(value,
                          str) and const.JIRA_FIELD_USE_DEFAULT_VALUE in value:
                for default_key in default_fields.keys():
                    if default_key in value:
                        value = value.replace(default_key,
                                              default_fields[default_key])
            return value

        default_fields = {
            '!default_issuetype': 'Bug',
            '!default_summary': title,
            '!default_description': description,
            '!default_priority': priority
        }
        description = self.markdown_to_jira_markdown(description)
        issue_data = {
            'project': {
                'key': self.project
            },
            'issuetype': 'Bug',
            'summary': title,
            'description': description,
            'priority': {
                'name': priority
            }
        }
        fields = deepcopy(self.fields)
        for key, value in fields.items():
            if isinstance(value, str):
                if const.JIRA_FIELD_DO_NOT_USE_VALUE in value:
                    issue_data.pop(key)
                else:
                    issue_data[key] = replace_defaults(value)
            elif isinstance(value, list):
                for item in value:
                    value[value.index(item)] = replace_defaults(item)
                if issue_data.get(key):
                    issue_data[key].extend(value)
                else:
                    issue_data[key] = value
            elif isinstance(value, dict):
                for _key, _value in value.items():
                    value[_key] = replace_defaults(_value)
                issue_data[key] = value
            elif not key in issue_data:
                issue_data[key] = value
            else:
                logging.warning(
                    'field {} is already set and has \'{}\' value'.format(
                        key, issue_data[key]))
        _labels = []
        if additional_labels and isinstance(additional_labels, list):
            _labels.extend(additional_labels)
        if issue_data.get('labels', None):
            issue_data['labels'].extend(_labels)
        else:
            issue_data['labels'] = _labels
        jira_request = self.JIRA_REQUEST.format(issue_data["project"]["key"],
                                                issue_hash, issue_hash)
        if get_or_create:
            issue, created = self.get_or_create_issue(jira_request, issue_data)
        else:
            issue = self.post_issue(issue_data)
            created = True
        try:
            if attachments:
                for attachment in attachments:
                    if 'binary_content' in attachment:
                        self.add_attachment(
                            issue.key,
                            attachment=attachment['binary_content'],
                            filename=attachment['message'])
            for watcher in self.watchers:
                self.client.add_watcher(issue.id, watcher)
        except:
            if os.environ.get("debug", False):
                logging.error(format_exc())
        finally:
            try:
                result_priority = issue.fields.priority
            except:
                result_priority = "Default"
            self.created_jira_tickets.append({
                'description':
                issue.fields.summary,
                'priority':
                result_priority,
                'key':
                issue.key,
                'link':
                self.url + '/browse/' + issue.key,
                'new':
                created,
                'assignee':
                issue.fields.assignee,
                'status':
                issue.fields.status.name,
                'open_date':
                issue.fields.created
            })
        return issue, created
Beispiel #20
0
 def execute(self):  # pylint: disable=R0912
     """ Run the scanner """
     helper = QualysHelper(
         self.context,
         self.config.get("qualys_api_server"),
         self.config.get("qualys_login"),
         self.config.get("qualys_password"),
         retries=self.config.get("retries", 10),
         retry_delay=self.config.get("retry_delay", 30.0),
         timeout=self.config.get("timeout", 120)
     )
     log.info("Qualys WAS version: %s", helper.get_version())
     timestamp = datetime.utcfromtimestamp(int(time())).strftime("%Y-%m-%d %H:%M:%S")
     sleep_interval = self.config.get("sleep_interval", 10.0)
     status_check_interval = self.config.get("status_check_interval", 60.0)
     # Create/get project
     project_name = "{}_{}".format(
         self.context.get_meta("project_name", "UnnamedProject"),
         self.context.get_meta("project_description", "Undescribed Project")
     )
     if self.config.get("random_name", False):
         project_name = f"{project_name}_{self.id_generator(8)}"
     log.info("Searching for existing webapp")
     webapp_id = helper.search_for_webapp(project_name)
     if webapp_id is None:
         log.info("Creating webapp")
         webapp_id = helper.create_webapp(
             project_name,
             self.config.get("target"),
             self.config.get("qualys_option_profile_id"),
             excludes=self.config.get("exclude", None)
         )
         sleep(sleep_interval)
     # Create auth record if needed
     auth_id = None
     if self.config.get("auth_script", None):
         log.info("Creating auth record")
         auth_name = f"{project_name} SeleniumAuthScript {timestamp}"
         auth_data = self.render_selenium_script(
             self.config.get("auth_script"),
             self.config.get("auth_login", ""),
             self.config.get("auth_password", ""),
             self.config.get("target")
         )
         auth_id = helper.create_selenium_auth_record(
             auth_name, auth_data,
             self.config.get("logged_in_indicator", "selenium")
         )
         sleep(sleep_interval)
         helper.add_auth_record_to_webapp(webapp_id, project_name, auth_id)
     # Start scan
     log.info("Starting scan")
     scan_name = f"{project_name} WAS {timestamp}"
     scan_auth = {"isDefault": True}
     if auth_id is not None:
         scan_auth = {"id": auth_id}
     scan_scanner = {"type": "EXTERNAL"}
     if self.config.get("qualys_scanner_type", "EXTERNAL") == "INTERNAL" and \
             self.config.get("qualys_scanner_pool", None):
         scanner_pool = self.config.get("qualys_scanner_pool")
         if isinstance(scanner_pool, str):
             scanner_pool = [item.strip() for item in scanner_pool.split(",")]
         scan_scanner = {
             "type": "INTERNAL",
             "friendlyName": random.choice(scanner_pool)
         }
     scan_id = helper.start_scan(
         scan_name, webapp_id,
         self.config.get("qualys_option_profile_id"),
         scan_scanner, scan_auth
     )
     sleep(sleep_interval)
     # Wait for scan to finish
     while helper.get_scan_status(scan_id) in ["SUBMITTED", "RUNNING"]:
         log.info("Waiting for scan to finish")
         sleep(status_check_interval)
     # Wait for results to finish processing
     if helper.get_scan_results_status(scan_id) == "UNKNOWN":
         log.warning(
             "Unable to find scan results status. Scan status: %s",
             helper.get_scan_status(scan_id)
         )
     while helper.get_scan_results_status(scan_id) in ["TO_BE_PROCESSED", "PROCESSING"]:
         log.info("Waiting for scan results to finish processing")
         sleep(status_check_interval)
     scan_result = helper.get_scan_results_status(scan_id)
     if scan_result in ["NO_HOST_ALIVE", "NO_WEB_SERVICE"]:
         error = Error(
             tool=self.get_name(),
             error=f"Qualys failed to access target",
             details="Qualys failed to access target " \
                     "(e.g. connection failed or target is not accessible). " \
                     "Please check scanner type/pool and target URL."
         )
         self.errors.append(error)
     if scan_result in ["SCAN_RESULTS_INVALID", "SERVICE_ERROR", "SCAN_INTERNAL_ERROR"]:
         error = Error(
             tool=self.get_name(),
             error=f"Qualys internal error occured",
             details="Qualys failed to perform scan (internal scan error occured). " \
                     "Please re-run the scan and check config if error persists."
         )
         self.errors.append(error)
     # Request report
     log.info("Requesting report")
     report_name = f"{project_name} WAS {timestamp} FOR Scan {scan_id}"
     report_id = helper.create_report(
         report_name, webapp_id,
         self.config.get("qualys_report_template_id")
     )
     sleep(sleep_interval)
     # Wait for report to be created
     while helper.get_report_status(report_id) in ["RUNNING"]:
         log.info("Waiting for report to be created")
         sleep(status_check_interval)
     # Download report
     log.info("Downloading report XML")
     report_xml = helper.download_report(report_id)
     # Delete assets
     log.info("Deleting assets")
     helper.delete_asset("report", report_id)
     helper.delete_asset("wasscan", scan_id)
     if auth_id is not None:
         helper.delete_asset("webappauthrecord", auth_id)
     helper.delete_asset("webapp", webapp_id)
     # Parse findings
     parse_findings(report_xml, self)
     # Save intermediates
     self.save_intermediates(report_xml)
Beispiel #21
0
 def validate_config(config):
     """ Validate config """
     if "reporters" not in config:
         log.warning("No reporters defined in config")
         config["reporters"] = dict()