Exemple #1
0
def resolve_depencies(modules_ordered_dict):
    """ Resolve depencies """
    # Prepare module name map
    module_name_map = dict()
    for item in modules_ordered_dict:
        try:
            module_name_map[modules_ordered_dict[item].__class__.__module__.split(".")[-2]] = \
                modules_ordered_dict[item]
        except IndexError:
            module_name_map[modules_ordered_dict[item].__module__.split(".")[-2]] = \
                modules_ordered_dict[item]
    # Check required depencies
    for module_name in module_name_map:
        for dependency in module_name_map[module_name].depends_on():
            if dependency not in module_name_map:
                log.error("Dependency %s not present (required by %s)", dependency, module_name)
                raise RuntimeError("Required dependency not present")
    # Walk modules
    module_order = list()
    visited_modules = set()
    for module_name in module_name_map:
        if module_name not in module_order:
            _walk_module_depencies(module_name, module_name_map, module_order, visited_modules)
    # Re-order modules
    for module_name in module_order:
        modules_ordered_dict.move_to_end(module_name_map[module_name].get_name())
Exemple #2
0
 def list_suites(self, config_seed, config_variable, config_file):
     """ List available suites from config """
     config = self._load_config(config_seed, config_variable, config_file)
     if "suites" not in config:
         log.error("Suites are not defined")
         return list()
     return list(config["suites"])
Exemple #3
0
def prepare_jira_mapping(jira_service):
    """ Make Jira mapping (for projects that are using custom values) """
    if not jira_service or not jira_service.valid:
        return dict()
    jira_service.connect()
    issue_type = "Bug"
    if jira_service.fields["issuetype"]["name"] != "!default_issuetype":
        issue_type = jira_service.fields["issuetype"]["name"]
    project_priorities = get_project_priorities(jira_service.client,
                                                jira_service.project,
                                                issue_type)
    if not project_priorities:
        jira_service.client.close()
        return dict()
    logging.debug("%s %s priorities: %s", jira_service.project, issue_type,
                  str(project_priorities))
    c = const
    mapping = dict()
    for severity in c.JIRA_SEVERITIES:
        if severity not in project_priorities:
            for alternative in c.JIRA_ALTERNATIVES[severity]:
                if alternative in project_priorities:
                    logging.warning("Mapping %s %s Jira priority: %s -> %s",
                                    jira_service.project, issue_type, severity,
                                    alternative)
                    mapping[severity] = alternative
                    break
            if severity not in mapping:
                mapping[severity] = project_priorities[0]
                logging.error(
                    "Failed to find Jira mapping for %s, using %s as a fallback",
                    severity, mapping[severity])
    jira_service.client.close()
    return mapping
Exemple #4
0
 def validate_config(config):
     """ Validate config """
     required = ["server", "login", "password", "mail_to"]
     not_set = [item for item in required if item not in config]
     if not_set:
         error = f"Required configuration options not set: {', '.join(not_set)}"
         log.error(error)
         raise ValueError(error)
Exemple #5
0
 def validate_config(config):
     """ Validate config """
     required = ["rp_project_name", "rp_launch_name", "rp_host", "rp_token"]
     not_set = [item for item in required if item not in config]
     if not_set:
         error = f"Required configuration options not set: {', '.join(not_set)}"
         log.error(error)
         raise ValueError(error)
Exemple #6
0
 def validate_config(config):
     """ Validate config """
     required = ["code"]
     not_set = [item for item in required if item not in config]
     if not_set:
         error = f"Required configuration options not set: {', '.join(not_set)}"
         log.error(error)
         raise ValueError(error)
Exemple #7
0
 def validate_config(config):
     """ Validate config """
     required = ["url", "username", "password", "project"]
     not_set = [item for item in required if item not in config]
     if not_set:
         error = f"Required configuration options not set: {', '.join(not_set)}"
         log.error(error)
         raise ValueError(error)
Exemple #8
0
 def __init__(self, url, user, password, project, fields=None):
     self.valid = True
     self.url = url
     self.password = password
     self.user = user
     try:
         self.connect()
     except:
         logging.error("Failed to connect to Jira")
         self.valid = False
         return
     self.projects = [project.key for project in self.client.projects()]
     self.project = project.upper()
     if self.project not in self.projects:
         # self.client.close()
         logging.warning("Requested project not found in Jira projects")
         # self.valid = False
         # return
     self.fields = {}
     self.watchers = []
     if isinstance(fields, dict):
         if 'watchers' in fields.keys():
             self.watchers = [item.strip() for item in fields.pop('watchers').split(",")]
         all_jira_fields = self.client.fields()
         for key, value in fields.items():
             if value:
                 if isinstance(value, str) and const.JIRA_FIELD_DO_NOT_USE_VALUE in value:
                     self.fields[key] = value
                     continue
                 jira_keys = [item for item in all_jira_fields if item["id"] == key]
                 if not jira_keys:
                     jira_keys = [item for item in all_jira_fields
                                  if item["name"].lower() == key.lower().replace('_', ' ')]
                 if len(jira_keys) == 1:
                     jira_key = jira_keys[0]
                     key_type = jira_key['schema']['type']
                 else:
                     logging.warning(f'Cannot recognize field {key}. This field will not be used.')
                     continue
                 if key_type in ['string', 'number', 'any'] or isinstance(value, dict):
                     _value = value
                 elif key_type == 'array':
                     if isinstance(value, str):
                         _value = [item.strip() for item in value.split(",")]
                     elif isinstance(value, int):
                         _value = [value]
                     else:
                         _value = value
                 else:
                     _value = {'name': value}
                 self.fields[jira_key['id']] = _value
     if not self.fields.get('issuetype', None):
         self.fields['issuetype'] = {'name': '!default_issuetype'}
     self.client.close()
     self.created_jira_tickets = list()
Exemple #9
0
 def execute(self, args):
     """ Run the command """
     log.debug("Starting")
     if args.call_from_legacy:
         log.warning("Called from legacy entry point")
     # Init context
     context = RunContext(args)
     config = ConfigModel(context)
     if args.list_suites or not args.suite:
         suites = config.list_suites(args.config_seed, args.config_variable,
                                     args.config_file)
         if not args.suite:
             log.error("Suite is not defined. Use --help to get help")
         log.info("Available suites: %s", ", ".join(suites))
         return
     # Make instances
     scanning = ScanningPerformer(context)
     processing = ProcessingPerformer(context)
     reporting = ReportingPerformer(context)
     # Add to context
     context.performers["scanning"] = scanning
     context.performers["processing"] = processing
     context.performers["reporting"] = reporting
     # Init config
     config.load(args.config_seed, args.config_variable, args.config_file,
                 args.suite)
     scanning.validate_config(context.config)
     processing.validate_config(context.config)
     reporting.validate_config(context.config)
     # Add meta to context
     self._fill_context_meta(context)
     # Load state
     context.state.load()
     # Prepare reporters first
     reporting.prepare()
     # Run actions
     actions.run(context)
     # Prepare scanning and processing
     scanning.prepare()
     processing.prepare()
     # Perform
     scanning.perform()
     processing.perform()
     reporting.perform()
     # Done
     context.state.save()
     reporting.flush()
     log.debug("Done")
     # Show quality gate statistics if any
     for line in context.get_meta("quality_gate_stats", list()):
         log.info(line)
     # Fail quality gate if needed
     if context.get_meta("fail_quality_gate", False):
         os._exit(1)  # pylint: disable=W0212
Exemple #10
0
 def validate_config(config):
     """ Validate config """
     required = [
         "qualys_api_server", "qualys_login", "qualys_password",
         "qualys_option_profile_id", "qualys_report_template_id",
         "qualys_scanner_type", "target"
     ]
     not_set = [item for item in required if item not in config]
     if not_set:
         error = f"Required configuration options not set: {', '.join(not_set)}"
         log.error(error)
         raise ValueError(error)
Exemple #11
0
 def execute(self):
     """ Run the scanner """
     # Check if we are running inside SAST container
     if njsscan is None:
         log.error("NodeJsScan is not installed in this environment")
         return
     # Replace print function to hide njsscan print()s
     original_print = print
     builtins.print = lambda *args, **kwargs: log.debug(" ".join(
         [str(item) for item in args]))
     try:
         # Prepare excludes
         excludes = self.config.get("excludes", list())
         if not isinstance(excludes, list):
             excludes = [item.strip() for item in excludes.split(",")]
         log.debug("Excludes: %s", excludes)
         # Collect files to scan
         scan_target = list()
         base = os.path.normpath(self.config.get("code"))
         for root, _, files in os.walk(base):
             # Normalize relative dir path
             subpath = os.path.normpath(root)[len(base):]
             if subpath.startswith(os.sep):
                 subpath = subpath[len(os.sep):]
             # Check if dir (or any parent) is in excludes
             skip_dir = False
             for item in excludes:
                 if item.endswith(os.sep) and subpath.startswith(item):
                     skip_dir = True
             # Skip dir if needed
             if subpath + os.sep in excludes or skip_dir:
                 log.debug("Skipping dir %s", root)
                 continue
             # Iterate files
             for name in files:
                 target = os.path.join(root, name)
                 # Skip file if in excludes (direct match)
                 if os.path.join(subpath, name) in excludes:
                     log.debug("Skipping file %s", target)
                     continue
                 # Add to files to scan
                 scan_target.append(target)
         # Run scanner
         result = njsscan.scan_file(scan_target)
     finally:
         # Restore print function
         builtins.print = original_print
     # Parse result
     parse_findings(result, self)
     # Save intermediates
     self.save_intermediates(result)
Exemple #12
0
 def execute(self):
     """ Run the scanner """
     try:
         self._start_zap()
         if not self._wait_for_zap_start():
             log.error("ZAP failed to start")
             error = Error(
                 tool=self.get_name(),
                 error="ZAP failed to start",
                 details="ZAP daemon failed to start"
             )
             self.errors.append(error)
             return
         log.info("Target: %s", self.config.get("target"))
         self._prepare_context()
         self._setup_scan_policy()
         self._spider()
         self._wait_for_passive_scan()
         self._ajax_spider()
         self._wait_for_passive_scan()
         self._active_scan()
         self._wait_for_passive_scan()
     except:
         log.exception("Exception during ZAP scanning")
         error = Error(
             tool=self.get_name(),
             error=f"Exception during ZAP scanning",
             details=f"```\n{traceback.format_exc()}\n```"
         )
         self.errors.append(error)
     finally:
         try:
             # Get report
             log.info("Getting ZAP report")
             zap_report = self._zap_api.core.jsonreport()
             # Parse JSON
             log.info("Processing findings")
             parse_findings(zap_report, self)
         except:
             log.exception("Exception during ZAP findings processing")
             error = Error(
                 tool=self.get_name(),
                 error=f"Exception during ZAP findings processing",
                 details=f"```\n{traceback.format_exc()}\n```"
             )
             self.errors.append(error)
         self._save_intermediates()
         pkg_resources.cleanup_resources()
         self._stop_zap()
Exemple #13
0
 def execute(self):
     """ Run the scanner """
     log.debug(f"Config: {self.config}")
     try:
         self._start_zap()
         if not self._wait_for_zap_start():
             log.error("ZAP failed to start")
             self.errors.append("ZAP daemon failed to start")
             return
         log.info("Target: %s", self.config.get("target"))
     except BaseException as exception:
         log.exception("Exception during ZAP scanning")
         self.errors.append(str(exception))
     finally:
         self._stop_zap()
Exemple #14
0
def _walk_module_depencies(module_name, module_name_map, module_order, visited_modules):
    # Collect depencies
    depencies = list()
    depencies.extend(module_name_map[module_name].depends_on())
    for optional_dependency in module_name_map[module_name].run_after():
        if optional_dependency in module_name_map:
            depencies.append(optional_dependency)
    # Resolve
    visited_modules.add(module_name)
    for dependency in depencies:
        if dependency not in module_order:
            if dependency in visited_modules:
                log.error("Circular dependency (%s <-> %s)", dependency, module_name)
                raise RuntimeError("Circular dependency present")
            _walk_module_depencies(dependency, module_name_map, module_order, visited_modules)
    # Add to resolved order
    module_order.append(module_name)
Exemple #15
0
 def _create_vault_client(self):
     client = hvac.Client(url=self.config["url"],
                          verify=self.config.get("ssl_verify", False),
                          namespace=self.config.get("namespace", None))
     if "auth_token" in self.config:
         client.token = self.config["auth_token"]
     if "auth_username" in self.config:
         client.auth_userpass(self.config.get("auth_username"),
                              self.config.get("auth_password", ""))
     if "auth_role_id" in self.config:
         client.auth_approle(self.config.get("auth_role_id"),
                             self.config.get("auth_secret_id", ""))
     if not client.is_authenticated():
         error = "Vault authentication failed"
         log.error(error)
         raise ValueError(error)
     return client
Exemple #16
0
 def get_or_create_issue(self, search_string, issue_data):
     issuetype = issue_data['issuetype']
     created = False
     jira_results = self.client.search_issues(search_string)
     issues = []
     for each in jira_results:
         if each.fields.summary == issue_data.get('summary', None):
             issues.append(each)
     if len(issues) == 1:
         issue = issues[0]
         if len(issues) > 1:
             logging.error('  more then 1 issue with the same summary')
         else:
             logging.info(f'  {issuetype["name"]} already exists: {issue.key}')
     else:
         issue = self.post_issue(issue_data)
         created = True
     return issue, created
Exemple #17
0
def get_project_priorities(jira_client, project, issue_type="Bug"):
    """ Returns list of Jira priorities in project """
    try:
        meta = jira_client.createmeta(projectKeys=project,
                                      issuetypeNames=issue_type,
                                      expand="projects.issuetypes.fields")
        logging.debug("Got metadata for %d projects", len(meta["projects"]))
        if not meta["projects"]:
            logging.error("No meta returned for %s with type %s", project,
                          issue_type)
            return []
        project_meta = meta["projects"][0]
        logging.debug("Got metadata for %d issuetypes",
                      len(project_meta["issuetypes"]))
        if not project_meta["issuetypes"]:
            logging.error("No %s in %s", issue_type, project)
            return []
        issue_types = project_meta["issuetypes"][0]
        if "priority" not in issue_types["fields"]:
            logging.error("No priority field in %s", project)
            return []
        priorities = [
            priority["name"] for priority in \
                issue_types["fields"]["priority"]["allowedValues"]
        ]
        return priorities
    except:  # pylint: disable=W0702
        logging.exception("Failed to get meta for %s", project)
        return []
Exemple #18
0
 def report(self):
     """ Report """
     log.info("Sending jUnit report to Galloper")
     # Get options
     bucket = self.config.get("bucket")
     tgtobj = self.config.get("object")
     # Get jUnit report path
     junit_report_file = \
         self.context.performers["reporting"].get_module_meta("junit", "report_file", None)
     if not junit_report_file:
         log.error("jUnit report not present")
         return
     # Send to Galloper
     with open(junit_report_file, "rb") as tgt_file:
         headers = dict()
         if os.environ.get("token"):
             headers["Authorization"] = f"Bearer {os.environ.get('token')}"
         url = f"{os.environ.get('galloper_url')}/api/v1/artifacts/" \
                 f"{os.environ.get('project_id')}/{bucket}/{tgtobj}"
         requests.post(
             url, headers=headers, files={
                 "file": (f"{tgtobj}", tgt_file)
             }
         )
Exemple #19
0
 def _validate_config_base(self, config):
     if config.get(constants.CONFIG_VERSION_KEY,
                   0) != constants.CURRENT_CONFIG_VERSION:
         log.error("Invalid config version")
         return False
     if "suites" not in config:
         log.error("Suites are not defined")
         return False
     if not config["suites"].get(self.context.suite, None):
         log.error("Suite is not defined: %s", self.context.suite)
         return False
     return True
Exemple #20
0
 def _validate_config_base(self, config, suite):
     if config.get(constants.CONFIG_VERSION_KEY, 0) != constants.CURRENT_CONFIG_VERSION:
         log.error("Invalid config version")
         return False
     if "global" not in config:
         config["global"] = dict()
     if "suites" not in config:
         log.error("Suites are not defined")
         return False
     if not config["suites"].get(suite, None):
         log.error("Suite is not defined: %s", suite)
         log.info("Available suites: %s", ", ".join(list(config["suites"])))
         return False
     if "settings" not in config["suites"][suite]:
         config["suites"][suite]["settings"] = dict()
     return True
Exemple #21
0
 def validate_config(config):
     """ Validate config """
     if "reporters" not in config:
         log.error("No reporters defined in config")
         raise ValueError("No reporters configuration present")
Exemple #22
0
 def validate_config(config):
     """ Validate config """
     log.debug(f"Config: {config}")
     if "target" not in config:
         log.error("No target defined in config")
         raise ValueError("No target configuration present")
Exemple #23
0
def parse_findings(output_file, scanner):
    """ Parse findings (code from dusty 1.0) """
    log.debug("Parsing findings")
    nscan = parse(output_file)
    root = nscan.getroot()
    # Check validity
    if "nmaprun" not in root.tag:
        log.error(
            "Exception during Nmap findings processing: invalid XML file")
        error = Error(
            tool=scanner.get_name(),
            error=f"Exception during Nmap findings processing",
            details=f"Output file doesn't seem to be a valid Nmap xml file.")
        scanner.errors.append(error)
        return
    dupes = dict()
    hostInfo = ""
    for host in root.iter("host"):
        ip = host.find("address[@addrtype='ipv4']").attrib["addr"]
        fqdn = None
        if host.find("hostnames/hostname[@type='PTR']") is not None:
            fqdn = host.find("hostnames/hostname[@type='PTR']").attrib["name"]
        #
        for os in root.iter("os"):
            if ip is not None:
                hostInfo += "IP Address: %s\n" % ip
            if fqdn is not None:
                fqdn += "FQDN: %s\n" % ip
            for osv in os.iter("osmatch"):
                if "name" in osv.attrib:
                    hostInfo += "Host OS: %s\n" % osv.attrib["name"]
                if "accuracy" in osv.attrib:
                    hostInfo += "Accuracy: {0}%\n".format(
                        osv.attrib["accuracy"])
            hostInfo += "\n"
        #
        xpath_port_selector = "ports/port[state/@state='open']"
        if scanner.config.get("include_unfiltered", False):
            xpath_port_selector = "ports/port[state/@state=('open','unfiltered')]"
        #
        for portelem in elementpath.select(host, xpath_port_selector):
            port = portelem.attrib["portid"]
            protocol = portelem.attrib["protocol"]
            #
            title = f"Open port: {ip}:{port}/{protocol}"
            description = hostInfo
            description += f"Port: {port}\n"
            serviceinfo = ""
            #
            if portelem.find("service") is not None:
                if "product" in portelem.find("service").attrib:
                    serviceinfo += "Product: %s\n" % portelem.find(
                        "service").attrib["product"]
                #
                if "version" in portelem.find("service").attrib:
                    serviceinfo += "Version: %s\n" % portelem.find(
                        "service").attrib["version"]
                #
                if "extrainfo" in portelem.find("service").attrib:
                    serviceinfo += "Extra Info: %s\n" % portelem.find(
                        "service").attrib["extrainfo"]
                #
                description += serviceinfo
            #
            description += "\n\n"
            #
            dupe_key = f"{port}_{protocol}_{ip}"
            if dupe_key in dupes:
                find = dupes[dupe_key]
                if description is not None:
                    find["description"] += description
            else:
                find = {
                    "title": title,
                    "description": description,
                    "endpoints": list()
                }
                find["endpoints"].append(f"{ip}:{port}/{protocol}")
                dupes[dupe_key] = find
    # Create finding objects
    for item in dupes.values():
        finding = DastFinding(title=item["title"],
                              description=markdown.markdown_escape(
                                  item["description"]))
        finding.set_meta("tool", scanner.get_name())
        finding.set_meta("severity", SEVERITIES[-1])
        # Endpoints (for backwards compatibility)
        endpoints = list()
        for entry in item["endpoints"]:
            endpoint = url.parse_url(entry)
            if endpoint in endpoints:
                continue
            endpoints.append(endpoint)
        finding.set_meta("endpoints", endpoints)
        log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
        # Done
        scanner.findings.append(finding)
Exemple #24
0
 def report_multi(self):
     """ Report """
     wrappers_config = dict()
     wrappers_config[None] = self.config
     for key, value in self.config.get("dynamic_jira").items():
         wrappers_config[re.compile(key)] = value
     #
     wrappers = dict()
     for wrapper_key, wrapper_config in wrappers_config.items():
         wrapper = JiraWrapper(
             wrapper_config.get("url"),
             wrapper_config.get("username"),
             wrapper_config.get("password"),
             wrapper_config.get("project"),
             wrapper_config.get("fields"),
         )
         #
         if not wrapper.valid:
             # Save default mapping to meta as a fallback
             default_mapping = constants.JIRA_SEVERITY_MAPPING
             default_mapping.update(
                 self.config.get("custom_mapping", dict()))
             self.set_meta("mapping", default_mapping)
             # Report error
             log.error(
                 "Jira configuration is invalid. Skipping Jira reporting")
             raise RuntimeError("Jira configuration is invalid")
         #
         wrappers[wrapper_key] = dict()
         wrappers[wrapper_key]["wrapper"] = wrapper
         wrappers[wrapper_key]["config"] = wrapper_config
         #
         if wrapper_config.get("separate_epic_linkage", False) and \
                 "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["epic_link"] = wrapper_config.get(
                 "fields").pop("Epic Link")
         #
         wrappers[wrapper_key]["raw_epic_link"] = None
         if wrapper_config.get("separate_epic_linkage", False):
             wrappers[wrapper_key]["raw_epic_link"] = wrappers[wrapper_key][
                 "epic_link"]
         elif "Epic Link" in wrapper_config.get("fields"):
             wrappers[wrapper_key]["raw_epic_link"] = wrapper_config.get(
                 "fields")["Epic Link"]
         #
         wrappers[wrapper_key]["priority_mapping"] = wrapper_config.get(
             "custom_mapping", prepare_jira_mapping(wrapper))
         wrappers[wrapper_key]["mapping_meta"] = dict(
             wrappers[wrapper_key]["priority_mapping"])
         #
     self.set_meta("wrapper", wrappers[None]["wrapper"])
     self.set_meta("raw_epic_link", wrappers[None]["raw_epic_link"])
     #
     dynamic_label_mapping = dict()
     if self.config.get("dynamic_labels", None):
         try:
             for key, value in self.config.get("dynamic_labels").items():
                 dynamic_label_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic label mapping")
     #
     dynamic_field_mapping = dict()
     if self.config.get("dynamic_fields", None):
         try:
             for key, value in self.config.get("dynamic_fields").items():
                 dynamic_field_mapping[re.compile(key)] = value
         except:  # pylint: disable=W0702
             log.exception("Failed to add dynamic field mapping")
     #
     findings = list()
     for item in self.context.findings:  # pylint: disable=R1702
         #
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         #
         if isinstance(item, (DastFinding, SastFinding)):
             #
             dynamic_labels = list()
             dynamic_fields = list()
             dynamic_wrapper = wrappers[None]
             #
             for endpoint in item.get_meta("endpoints", list()):
                 #
                 for pattern, addon_label in dynamic_label_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_labels.append(addon_label)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic label")
                 #
                 for pattern, addon_fields in dynamic_field_mapping.items():
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_fields.append(addon_fields)
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic field")
                 #
                 for pattern, addon_jira in wrappers.items():
                     if pattern is None:
                         continue
                     try:
                         if pattern.match(endpoint.raw):
                             dynamic_wrapper = addon_jira
                     except:  # pylint: disable=W0702
                         log.exception("Failed to add dynamic JIRA")
             #
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if dynamic_wrapper["priority_mapping"] and \
                     priority in dynamic_wrapper["priority_mapping"]:
                 priority = dynamic_wrapper["priority_mapping"][priority]
             dynamic_wrapper["mapping_meta"][severity] = priority
             #
             if isinstance(item, DastFinding):
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     item.description.replace("\\.", "."),
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "DAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             elif isinstance(item, SastFinding):
                 #
                 description_chunks = [
                     item.replace("\\.", ".").replace(
                         "<pre>", "{code:collapse=true}\n\n").replace(
                             "</pre>",
                             "\n\n{code}").replace("<br />", "\n")
                     for item in item.description
                 ]
                 #
                 if len("\n\n".join(description_chunks)
                        ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                     description = description_chunks[0]
                     chunks = description_chunks[1:]
                     comments = list()
                     new_line_str = '  \n  \n'
                     for chunk in chunks:
                         if not comments or (
                                 len(comments[-1]) + len(new_line_str) + len(chunk)
                             ) >= \
                                 constants.JIRA_COMMENT_MAX_SIZE:
                             comments.append(cut_jira_comment(chunk))
                         else:  # Last comment can handle one more chunk
                             comments[
                                 -1] += new_line_str + cut_jira_comment(
                                     chunk)
                 else:
                     description = "\n\n".join(description_chunks)
                     comments = list()
                 #
                 findings.append({
                     "title":
                     item.title,
                     "priority":
                     priority,
                     "description":
                     description,
                     "issue_hash":
                     item.get_meta("issue_hash", "<no_hash>"),
                     "additional_labels": [
                         label.replace(" ", "_") for label in [
                             item.get_meta("tool", "scanner"),
                             self.context.get_meta("testing_type", "SAST"),
                             item.get_meta("severity", SEVERITIES[-1])
                         ]
                     ] + dynamic_labels,
                     "dynamic_fields":
                     dynamic_fields,
                     "comments":
                     comments,
                     "raw":
                     item,
                     "wrapper":
                     dynamic_wrapper,
                 })
             #
         #
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     #
     self.set_meta("mapping", wrappers[None]["mapping_meta"])
     #
     for finding in findings:
         if finding["wrapper"]["config"].get("max_description_size", False):
             if len(finding["description"]) > \
                     int(finding["wrapper"]["config"].get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(finding["wrapper"]["config"].get(
                     "max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(finding["wrapper"]["config"].get(
                         "max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     #
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     #
     new_tickets = list()
     existing_tickets = list()
     #
     for _, local_wrapper in wrappers.items():
         local_wrapper["wrapper"].connect()
     #
     for finding in findings:
         try:
             config_labels = finding["wrapper"]["config"].get(
                 "additional_labels", None)
             if config_labels is None:
                 config_labels = list()
             if not isinstance(config_labels, list):
                 config_labels = [
                     item.strip() for item in config_labels.split(",")
                 ]
             #
             field_overrides = dict()
             for dynamic_field in finding["dynamic_fields"]:
                 field_overrides.update(dynamic_field)
             #
             issue, created = finding["wrapper"]["wrapper"].create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding["additional_labels"] +
                 config_labels,  # additional_labels  # pylint: disable=C0301
                 field_overrides=field_overrides,
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     finding["wrapper"]["wrapper"].add_comment_to_issue(
                         issue, comment)
             if created and finding["wrapper"]["config"].get(
                     "separate_epic_linkage", False):
                 try:
                     finding["wrapper"][
                         "wrapper"].client.add_issues_to_epic(
                             finding["wrapper"]["epic_link"],
                             [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key),
                                   finding["wrapper"]["epic_link"])
             try:
                 result_priority = str(issue.fields.priority)
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             #
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{finding['wrapper']['config'].get('url')}/browse/{issue.key}",  # pylint: disable=C0301
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 str(issue.fields.assignee),
                 "raw_created":
                 str(issue.fields.created),
                 "raw_severity":
                 finding["raw"].get_meta("severity", SEVERITIES[-1]),
                 "raw_jira_url":
                 finding["wrapper"]["config"].get("url"),
                 "raw_jira_project":
                 finding["wrapper"]["config"].get("project"),
                 "raw_jira_epic":
                 finding["wrapper"]["raw_epic_link"],
                 "raw_jira_fields":
                 finding["wrapper"]["config"].get("fields"),
                 "raw_addon_fields":
                 field_overrides,
                 "raw_addon_labels":
                 finding["additional_labels"] + config_labels,
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     #
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
Exemple #25
0
def interrupt_handler(signal_, frame):  # pylint: disable=W0613
    """ Handle interrupt signals """
    log.error("Got interrupt signal. Terminating")
    os._exit(1)  # pylint: disable=W0212
Exemple #26
0
 def run(self):
     """ Run action """
     # Patch dulwich to work without valid UID/GID
     dulwich.repo.__original__get_default_identity = dulwich.repo._get_default_identity  # pylint: disable=W0212
     dulwich.repo._get_default_identity = _dulwich_repo_get_default_identity  # pylint: disable=W0212
     # Patch dulwich to use paramiko SSH client
     dulwich.client.get_ssh_vendor = ParamikoSSHVendor
     # Patch paramiko to skip key verification
     paramiko.transport.Transport._verify_key = _paramiko_transport_verify_key  # pylint: disable=W0212
     # Set USERNAME if needed
     try:
         getpass.getuser()
     except:  # pylint: disable=W0702
         os.environ["USERNAME"] = "******"
     # Get options
     source = self.config.get("source")
     target = self.config.get("target")
     branch = self.config.get("branch", "master")
     depth = self.config.get("depth", None)
     # Prepare auth
     auth_args = dict()
     if self.config.get("username", None) is not None:
         auth_args["username"] = self.config.get("username")
     if self.config.get("password", None) is not None:
         auth_args["password"] = self.config.get("password")
     if self.config.get("key", None) is not None:
         auth_args["key_filename"] = self.config.get("key")
     if self.config.get("key_data", None) is not None:
         key_obj = io.StringIO(
             self.config.get("key_data").replace("|", "\n"))
         pkey = paramiko.RSAKey.from_private_key(key_obj)
         # Patch paramiko to use our key
         paramiko.client.SSHClient._auth = _paramiko_client_SSHClient_auth(  # pylint: disable=W0212
             paramiko.client.SSHClient._auth,
             pkey  # pylint: disable=W0212
         )
     # Clone repository
     log.info("Cloning repository %s into %s", source, target)
     repository = porcelain.clone(source,
                                  target,
                                  checkout=False,
                                  depth=depth,
                                  errstream=log.DebugLogStream(),
                                  **auth_args)
     # Get current HEAD tree (default branch)
     try:
         head_tree = repository[b"HEAD"]
     except:  # pylint: disable=W0702
         head_tree = None
     # Get target tree (requested branch)
     branch_b = branch.encode("utf-8")
     try:
         target_tree = repository[b"refs/remotes/origin/" + branch_b]
     except:  # pylint: disable=W0702
         target_tree = None
     # Checkout branch
     if target_tree is not None:
         log.info("Checking out branch %s", branch)
         repository[b"refs/heads/" +
                    branch_b] = repository[b"refs/remotes/origin/" +
                                           branch_b]
         repository.refs.set_symbolic_ref(b"HEAD",
                                          b"refs/heads/" + branch_b)
         repository.reset_index(repository[b"HEAD"].tree)
     elif head_tree is not None:
         try:
             default_branch_name = repository.refs.follow(b"HEAD")[0][1]
             if default_branch_name.startswith(refs.LOCAL_BRANCH_PREFIX):
                 default_branch_name = default_branch_name[
                     len(refs.LOCAL_BRANCH_PREFIX):]
             default_branch_name = default_branch_name.decode("utf-8")
             log.warning(
                 "Branch %s was not found. Checking out default branch %s",
                 branch, default_branch_name)
         except:  # pylint: disable=W0702
             log.warning(
                 "Branch %s was not found. Trying to check out default branch",
                 branch)
         try:
             repository.reset_index(repository[b"HEAD"].tree)
         except:  # pylint: disable=W0702
             log.exception("Failed to checkout default branch")
     else:
         log.error(
             "Branch %s was not found and default branch is not set. Skipping checkout"
         )
     # Delete .git if requested
     if self.config.get("delete_git_dir", False):
         log.info("Deleting .git directory")
         shutil.rmtree(os.path.join(target, ".git"))
Exemple #27
0
 def execute(self, args):
     """ Run the command """
     log.debug("Starting")
     # Check args
     if not args.source or not args.target:
         log.error("Please specify source and target.")
         return
     # Patch dulwich to work without valid UID/GID
     dulwich.repo.__original__get_default_identity = dulwich.repo._get_default_identity  # pylint: disable=W0212
     dulwich.repo._get_default_identity = _dulwich_repo_get_default_identity  # pylint: disable=W0212
     # Patch dulwich to use paramiko SSH client
     dulwich.client.get_ssh_vendor = ParamikoSSHVendor
     # Patch paramiko to skip key verification
     paramiko.transport.Transport._verify_key = _paramiko_transport_verify_key  # pylint: disable=W0212
     # Set USERNAME if needed
     try:
         getpass.getuser()
     except:  # pylint: disable=W0702
         os.environ["USERNAME"] = "******"
     # Fill args
     depth = None
     if args.depth:
         depth = args.depth
     # Prepare auth
     auth_args = dict()
     # Take from env variables
     if args.username_variable and args.username_variable in os.environ:
         auth_args["username"] = os.environ[args.username_variable]
         os.environ["USERNAME"] = os.environ[args.username_variable]
     if args.password_variable and args.password_variable in os.environ:
         auth_args["password"] = os.environ[args.password_variable]
     if args.key_variable and args.key_variable in os.environ:
         auth_args["key_filename"] = os.environ[args.key_variable]
     if args.key_data_variable and args.key_data_variable in os.environ:
         key_obj = io.StringIO(os.environ[args.key_data_variable].replace(
             "|", "\n"))
         pkey = paramiko.RSAKey.from_private_key(key_obj)
         # Patch paramiko to use our key
         paramiko.client.SSHClient._auth = _paramiko_client_SSHClient_auth(  # pylint: disable=W0212
             paramiko.client.SSHClient._auth,
             pkey  # pylint: disable=W0212
         )
     # Take from commandline parameters
     if args.username:
         auth_args["username"] = args.username
         os.environ["USERNAME"] = args.username
     if args.password:
         auth_args["password"] = args.password
     if args.key:
         auth_args["key_filename"] = args.key
     if args.key_data:
         key_obj = io.StringIO(args.key_data.replace("|", "\n"))
         pkey = paramiko.RSAKey.from_private_key(key_obj)
         # Patch paramiko to use our key
         paramiko.client.SSHClient._auth = _paramiko_client_SSHClient_auth(  # pylint: disable=W0212
             paramiko.client.SSHClient._auth,
             pkey  # pylint: disable=W0212
         )
     # Clone repository
     log.info("Cloning repository %s into %s", args.source, args.target)
     repository = porcelain.clone(args.source,
                                  args.target,
                                  checkout=False,
                                  depth=depth,
                                  errstream=log.DebugLogStream(),
                                  **auth_args)
     # Checkout branch
     log.info("Checking out branch %s", args.branch)
     branch = args.branch.encode("utf-8")
     repository[b"refs/heads/" +
                branch] = repository[b"refs/remotes/origin/" + branch]
     repository.refs.set_symbolic_ref(b"HEAD", b"refs/heads/" + branch)
     repository.reset_index(repository[b"HEAD"].tree)
Exemple #28
0
def parse_findings(data, scanner):
    """ Parse findings """
    log.debug("Parsing findings")
    parser = etree.XMLParser(remove_blank_text=True,
                             no_network=True,
                             recover=True)
    obj = etree.fromstring(data, parser)
    qids = obj.xpath("/WAS_WEBAPP_REPORT/GLOSSARY/QID_LIST/QID")
    disabled_titles = constants.QUALYS_DISABLED_TITLES
    for qid in qids:
        qid_title = qid.findtext("TITLE")
        if qid_title not in disabled_titles:
            _qid = qid.findtext("QID")
            qid_solution = qid.findtext("SOLUTION")
            qid_description = qid.findtext("DESCRIPTION")
            qid_impact = qid.findtext("IMPACT")
            qid_category = qid.findtext("CATEGORY")
            qid_severity = "Info"
            owasp = qid.findtext("OWASP") if qid.findtext("OWASP") else ""
            wasc = qid.findtext("WASC") if qid.findtext("WASC") else ""
            cwe = qid.findtext("CWE") if qid.findtext("CWE") else ""
            cvss_base = qid.findtext("CVSS_BASE") if qid.findtext(
                "CVSS_BASE") else ""
            if qid.xpath("SEVERITY"):
                qid_severity = constants.QUALYS_SEVERITIES[int(
                    qid.findtext("SEVERITY"))]
            references = []
            entrypoints = []
            if "Information Gathered" in qid_category:
                qid_severity = "Info"
                records = obj.xpath(
                    f'//INFORMATION_GATHERED_LIST/INFORMATION_GATHERED/QID[text()="{_qid}"]/..'
                )
                for record in records:
                    references.append(
                        html.escape(
                            base64.b64decode(record.findtext("DATA")).decode(
                                "utf-8", errors="ignore")))
            else:
                records = obj.xpath(
                    f'//VULNERABILITY_LIST/VULNERABILITY/QID[text()="{_qid}"]/..'
                )
                for record in records:
                    record_url = record.findtext('URL')
                    access_pass = [
                        a.text for a in records[0].xpath('ACCESS_PATH/URL')
                    ]
                    method = record.findtext('PAYLOADS/PAYLOAD/REQUEST/METHOD')
                    if not method:
                        log.error("Bad record: %s", str(record))
                        method = ""
                    request = record.findtext('PAYLOADS/PAYLOAD/REQUEST/URL')
                    request = html.escape(request)
                    response = record.findtext(
                        'PAYLOADS/PAYLOAD/RESPONSE/CONTENTS')
                    response = html.escape(
                        base64.b64decode(response).decode("utf-8",
                                                          errors="ignore"))
                    entrypoints.append(record_url)
                    entrypoints.extend(access_pass)
                    references.append(
                        f"{method.upper()}: {request}\n\nResponse: {response}\n\n"
                    )
            for reference in references:
                description = f"{markdown.html_to_text(qid_description)}\n\n"
                if qid_impact:
                    description += f"**Impact:**\n {markdown.html_to_text(qid_impact)}\n\n"
                if qid_solution:
                    description += f"**Mitigation:**\n {markdown.html_to_text(qid_solution)}\n\n"
                if reference:
                    description += f"**References:**\n {markdown.markdown_escape(reference)}\n\n"
                if cwe:
                    description += f"**CWE:** {markdown.markdown_escape(cwe)}\n\n"
                if owasp:
                    description += f"**OWASP:** {markdown.markdown_escape(owasp)}\n\n"
                if wasc:
                    description += f"**WASC:** {markdown.markdown_escape(wasc)}\n\n"
                if cvss_base:
                    description += f"**CVSS_BASE:** {markdown.markdown_escape(cvss_base)}\n\n"
                # Make finding object
                finding = DastFinding(title=f"{qid_title} - {qid_category}",
                                      description=description)
                finding.set_meta("tool", scanner.get_name())
                finding.set_meta("severity", qid_severity)
                # Endpoints (for backwards compatibility)
                endpoints = list()
                for item in entrypoints:
                    endpoint = url.parse_url(item)
                    if endpoint in endpoints:
                        continue
                    endpoints.append(endpoint)
                finding.set_meta("endpoints", endpoints)
                log.debug(f"Endpoints: {finding.get_meta('endpoints')}")
                # Done
                scanner.findings.append(finding)
Exemple #29
0
 def report(self):
     """ Report """
     # Remove "Epic Link" from fields if requested
     if self.config.get("separate_epic_linkage", False) and \
             "Epic Link" in self.config.get("fields"):
         epic_link = self.config.get("fields").pop("Epic Link")
     # Prepare wrapper
     log.info("Creating legacy wrapper instance")
     wrapper = JiraWrapper(self.config.get("url"),
                           self.config.get("username"),
                           self.config.get("password"),
                           self.config.get("project"),
                           self.config.get("fields"))
     if not wrapper.valid:
         # Save default mapping to meta as a fallback
         default_mapping = constants.JIRA_SEVERITY_MAPPING
         default_mapping.update(self.config.get("custom_mapping", dict()))
         self.set_meta("mapping", default_mapping)
         # Report error
         log.error("Jira configuration is invalid. Skipping Jira reporting")
         raise RuntimeError("Jira configuration is invalid")
     log.debug("Legacy wrapper is valid")
     # Prepare findings
     priority_mapping = self.config.get("custom_mapping",
                                        prepare_jira_mapping(wrapper))
     mapping_meta = dict(priority_mapping)
     findings = list()
     for item in self.context.findings:
         if item.get_meta("information_finding", False) or \
                 item.get_meta("false_positive_finding", False) or \
                 item.get_meta("excluded_finding", False):
             continue
         if isinstance(item, DastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 item.description.replace("\\.", "."),
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "DAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "raw":
                 item
             })
         elif isinstance(item, SastFinding):
             severity = item.get_meta("severity", SEVERITIES[-1])
             priority = constants.JIRA_SEVERITY_MAPPING[severity]
             if priority_mapping and priority in priority_mapping:
                 priority = priority_mapping[priority]
             mapping_meta[
                 severity] = priority  # Update meta mapping to reflect actual results
             description_chunks = [
                 item.replace("\\.", ".").replace(
                     "<pre>", "{code:collapse=true}\n\n").replace(
                         "</pre>", "\n\n{code}").replace("<br />", "\n")
                 for item in item.description
             ]
             if len("\n\n".join(description_chunks)
                    ) > constants.JIRA_DESCRIPTION_MAX_SIZE:
                 description = description_chunks[0]
                 chunks = description_chunks[1:]
                 comments = list()
                 new_line_str = '  \n  \n'
                 for chunk in chunks:
                     if not comments or (len(comments[-1]) + len(new_line_str) + len(chunk)) >= \
                             constants.JIRA_COMMENT_MAX_SIZE:
                         comments.append(cut_jira_comment(chunk))
                     else:  # Last comment can handle one more chunk
                         comments[-1] += new_line_str + cut_jira_comment(
                             chunk)
             else:
                 description = "\n\n".join(description_chunks)
                 comments = list()
             findings.append({
                 "title":
                 item.title,
                 "priority":
                 priority,
                 "description":
                 description,
                 "issue_hash":
                 item.get_meta("issue_hash", "<no_hash>"),
                 "additional_labels": [
                     label.replace(" ", "_") for label in [
                         item.get_meta("tool", "scanner"),
                         self.context.get_meta("testing_type", "SAST"),
                         item.get_meta("severity", SEVERITIES[-1])
                     ]
                 ],
                 "comments":
                 comments,
                 "raw":
                 item
             })
         else:
             log.warning("Unsupported finding type")
             continue  # raise ValueError("Unsupported item type")
     # Cut description if length above configured limit
     if self.config.get("max_description_size", False):
         for finding in findings:
             if len(finding["description"]) > int(
                     self.config.get("max_description_size")):
                 if "comments" not in finding:
                     finding["comments"] = list()
                 #
                 comment_chunks = list()
                 cut_line_len = len(constants.JIRA_DESCRIPTION_CUT)
                 cut_point = int(
                     self.config.get("max_description_size")) - cut_line_len
                 #
                 item_description = finding["description"]
                 finding["description"] = \
                     f"{item_description[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                 #
                 description_data = item_description[cut_point:]
                 comment_cut_threshold = min(
                     constants.JIRA_COMMENT_MAX_SIZE,
                     int(self.config.get("max_description_size")))
                 cut_point = comment_cut_threshold - cut_line_len
                 #
                 while description_data:
                     if len(description_data) > comment_cut_threshold:
                         comment_chunks.append(
                             f"{description_data[:cut_point]}{constants.JIRA_DESCRIPTION_CUT}"
                         )
                         description_data = description_data[cut_point:]
                     else:
                         comment_chunks.append(description_data)
                         break
                 #
                 while comment_chunks:
                     finding["comments"].insert(0, comment_chunks.pop())
     # Sort findings by severity-tool-title
     findings.sort(key=lambda item: (SEVERITIES.index(item["raw"].get_meta(
         "severity", SEVERITIES[-1])), item["raw"].get_meta("tool", ""),
                                     item["raw"].title))
     # Submit issues
     wrapper.connect()
     new_tickets = list()
     existing_tickets = list()
     for finding in findings:
         try:
             issue, created = wrapper.create_issue(
                 finding["title"],  # title
                 finding["priority"],  # priority
                 finding["description"],  # description
                 finding["issue_hash"],  # issue_hash, self.get_hash_code()
                 # attachments=None,
                 # get_or_create=True,
                 additional_labels=finding[
                     "additional_labels"]  # additional_labels
             )
             if created and "comments" in finding:
                 for comment in finding["comments"]:
                     wrapper.add_comment_to_issue(issue, comment)
             if created and self.config.get("separate_epic_linkage", False):
                 try:
                     wrapper.client.add_issues_to_epic(
                         epic_link, [str(issue.key)])
                 except:  # pylint: disable=W0702
                     log.exception("Failed to add ticket %s to epic %s",
                                   str(issue.key), epic_link)
             try:
                 result_priority = issue.fields.priority
             except:  # pylint: disable=W0702
                 result_priority = "Default"
             ticket_meta = {
                 "jira_id":
                 issue.key,
                 "jira_url":
                 f"{self.config.get('url')}/browse/{issue.key}",
                 "priority":
                 result_priority,
                 "status":
                 issue.fields.status.name,
                 "created":
                 issue.fields.created,
                 "open_date":
                 datetime.strptime(
                     issue.fields.created,
                     "%Y-%m-%dT%H:%M:%S.%f%z").strftime("%d %b %Y %H:%M"),
                 "description":
                 issue.fields.summary,
                 "assignee":
                 issue.fields.assignee
             }
             if created:
                 if not self._ticket_in_list(ticket_meta, new_tickets):
                     new_tickets.append(ticket_meta)
             else:
                 if issue.fields.status.name in constants.JIRA_OPENED_STATUSES:
                     if not self._ticket_in_list(ticket_meta,
                                                 existing_tickets):
                         existing_tickets.append(ticket_meta)
         except:  # pylint: disable=W0702
             log.exception(
                 f"Failed to create ticket for {finding['title']}")
             error = Error(
                 tool=self.get_name(),
                 error=f"Failed to create ticket for {finding['title']}",
                 details=f"```\n{traceback.format_exc()}\n```")
             self.errors.append(error)
     self.set_meta("new_tickets", new_tickets)
     self.set_meta("existing_tickets", existing_tickets)
     self.set_meta("mapping", mapping_meta)
Exemple #30
0
    def create_issue(self,
                     title,
                     priority,
                     description,
                     issue_hash,
                     attachments=None,
                     get_or_create=True,
                     additional_labels=None):
        def replace_defaults(value):
            if isinstance(value,
                          str) and const.JIRA_FIELD_USE_DEFAULT_VALUE in value:
                for default_key in default_fields.keys():
                    if default_key in value:
                        value = value.replace(default_key,
                                              default_fields[default_key])
            return value

        default_fields = {
            '!default_issuetype': 'Bug',
            '!default_summary': title,
            '!default_description': description,
            '!default_priority': priority
        }
        description = self.markdown_to_jira_markdown(description)
        issue_data = {
            'project': {
                'key': self.project
            },
            'issuetype': 'Bug',
            'summary': title,
            'description': description,
            'priority': {
                'name': priority
            }
        }
        fields = deepcopy(self.fields)
        for key, value in fields.items():
            if isinstance(value, str):
                if const.JIRA_FIELD_DO_NOT_USE_VALUE in value:
                    issue_data.pop(key)
                else:
                    issue_data[key] = replace_defaults(value)
            elif isinstance(value, list):
                for item in value:
                    value[value.index(item)] = replace_defaults(item)
                if issue_data.get(key):
                    issue_data[key].extend(value)
                else:
                    issue_data[key] = value
            elif isinstance(value, dict):
                for _key, _value in value.items():
                    value[_key] = replace_defaults(_value)
                issue_data[key] = value
            elif not key in issue_data:
                issue_data[key] = value
            else:
                logging.warning(
                    'field {} is already set and has \'{}\' value'.format(
                        key, issue_data[key]))
        _labels = []
        if additional_labels and isinstance(additional_labels, list):
            _labels.extend(additional_labels)
        if issue_data.get('labels', None):
            issue_data['labels'].extend(_labels)
        else:
            issue_data['labels'] = _labels
        jira_request = self.JIRA_REQUEST.format(issue_data["project"]["key"],
                                                issue_hash, issue_hash)
        if get_or_create:
            issue, created = self.get_or_create_issue(jira_request, issue_data)
        else:
            issue = self.post_issue(issue_data)
            created = True
        try:
            if attachments:
                for attachment in attachments:
                    if 'binary_content' in attachment:
                        self.add_attachment(
                            issue.key,
                            attachment=attachment['binary_content'],
                            filename=attachment['message'])
            for watcher in self.watchers:
                self.client.add_watcher(issue.id, watcher)
        except:
            if os.environ.get("debug", False):
                logging.error(format_exc())
        finally:
            try:
                result_priority = issue.fields.priority
            except:
                result_priority = "Default"
            self.created_jira_tickets.append({
                'description':
                issue.fields.summary,
                'priority':
                result_priority,
                'key':
                issue.key,
                'link':
                self.url + '/browse/' + issue.key,
                'new':
                created,
                'assignee':
                issue.fields.assignee,
                'status':
                issue.fields.status.name,
                'open_date':
                issue.fields.created
            })
        return issue, created