def updated_nessus_parser(root, project_id, scan_id):
    global agent, description, fname, plugin_modification_date, plugin_name, plugin_publication_date, plugin_type, risk_factor, script_version, solution, synopsis, plugin_output, see_also, scan_ip, pluginName, pluginID, protocol, severity, svc_name, pluginFamily, port, vuln_color, total_vul, total_high, total_medium, total_low, target, report_name

    date_time = datetime.datetime.now()

    for data in root:
        if data.tag == "Report":
            report_name = data.attrib["name"]

        for reportHost in data.iter("ReportHost"):
            try:
                for key, value in reportHost.items():
                    target = value
                    scan_id = uuid.uuid4()
                    scan_status = "100"
                    scan_dump = NetworkScanDb(
                        ip=target,
                        scan_id=scan_id,
                        date_time=date_time,
                        project_id=project_id,
                        scan_status=scan_status,
                        scanner="Nessus",
                    )
                    scan_dump.save()
                    for ReportItem in reportHost.iter("ReportItem"):
                        for key, value in ReportItem.attrib.items():
                            if key == "pluginName":
                                pluginName = value

                            if key == "pluginID":
                                pluginID = value

                            if key == "protocol":
                                protocol = value

                            if key == "severity":
                                severity = value

                            if key == "svc_name":
                                svc_name = value

                            if key == "pluginFamily":
                                pluginFamily = value

                            if key == "port":
                                port = value
                        try:
                            agent = ReportItem.find("agent").text
                        except:
                            agent = "NA"
                        try:
                            description = ReportItem.find("description").text
                        except:
                            description = "NA"
                        try:
                            fname = ReportItem.find("fname").text
                        except:
                            fname = "NA"
                        try:
                            plugin_modification_date = ReportItem.find(
                                "plugin_modification_date").text
                        except:
                            plugin_modification_date = "NA"
                        try:
                            plugin_name = ReportItem.find("plugin_name").text
                        except:
                            plugin_name = "NA"
                        try:
                            plugin_publication_date = ReportItem.find(
                                "plugin_publication_date").text
                        except:
                            plugin_publication_date = "NA"
                        try:
                            plugin_type = ReportItem.find("plugin_type").text
                        except:
                            plugin_type = "NA"
                        try:
                            risk_factor = ReportItem.find("risk_factor").text
                        except:
                            risk_factor = "NA"
                        try:
                            script_version = ReportItem.find(
                                "script_version").text
                        except:
                            script_version = "NA"
                        try:
                            see_also = ReportItem.find("see_also").text
                        except:
                            see_also = "NA"
                        try:
                            solution = ReportItem.find("solution").text
                        except:
                            solution = "NA"
                        try:
                            synopsis = ReportItem.find("synopsis").text
                        except:
                            synopsis = "NA"
                        try:
                            plugin_output = ReportItem.find(
                                "plugin_output").text
                        except:
                            plugin_output = "NA"
                        vuln_id = uuid.uuid4()

                        if risk_factor == "Critical":
                            vuln_color = "danger"
                            risk_factor = "High"
                        elif risk_factor == "High":
                            vuln_color = "danger"
                            risk_factor = "High"
                        elif risk_factor == "Medium":
                            vuln_color = "warning"
                            risk_factor = "Medium"
                        elif risk_factor == "Low":
                            vuln_color = "info"
                            risk_factor = "Low"
                        else:
                            risk_factor = "Low"
                            vuln_color = "info"

                        dup_data = target + plugin_name + severity + port
                        duplicate_hash = hashlib.sha256(
                            dup_data.encode("utf-8")).hexdigest()
                        match_dup = (NetworkScanResultsDb.objects.filter(
                            dup_hash=duplicate_hash).values(
                                "dup_hash").distinct())
                        lenth_match = len(match_dup)

                        if lenth_match == 0:
                            duplicate_vuln = "No"

                            global false_positive
                            false_p = NetworkScanResultsDb.objects.filter(
                                false_positive_hash=duplicate_hash)
                            fp_lenth_match = len(false_p)
                            if fp_lenth_match == 1:
                                false_positive = "Yes"
                            else:
                                false_positive = "No"
                            if risk_factor == "None":
                                risk_factor = "Low"

                            all_data_save = NetworkScanResultsDb(
                                project_id=project_id,
                                scan_id=scan_id,
                                date_time=date_time,
                                title=pluginName,
                                ip=target,
                                vuln_id=vuln_id,
                                description=description,
                                solution=solution,
                                severity=risk_factor,
                                port=port,
                                false_positive=false_positive,
                                vuln_status="Open",
                                dup_hash=duplicate_hash,
                                vuln_duplicate=duplicate_vuln,
                                severity_color=vuln_color,
                                scanner="Nessus",
                            )
                            all_data_save.save()

                        else:
                            duplicate_vuln = "Yes"

                            all_data_save = NetworkScanResultsDb(
                                project_id=project_id,
                                scan_id=scan_id,
                                date_time=date_time,
                                title=pluginName,
                                ip=target,
                                vuln_id=vuln_id,
                                description=description,
                                solution=solution,
                                severity=risk_factor,
                                port=port,
                                false_positive="Duplicate",
                                vuln_status="Duplicate",
                                dup_hash=duplicate_hash,
                                vuln_duplicate=duplicate_vuln,
                                severity_color=vuln_color,
                                scanner="Nessus",
                            )
                            all_data_save.save()
            except:
                continue
        for reportHost in data.iter("ReportHost"):
            try:
                for key, value in reportHost.items():
                    target = value
                    target_filter = NetworkScanResultsDb.objects.filter(
                        ip=target,
                        vuln_status="Open",
                        vuln_duplicate="No",
                    )

                    duplicate_count = NetworkScanResultsDb.objects.filter(
                        ip=target, vuln_duplicate="Yes")

                    target_total_vuln = len(target_filter)
                    target_total_high = len(
                        target_filter.filter(severity="High"))
                    target_total_medium = len(
                        target_filter.filter(severity="Medium"))
                    target_total_low = len(
                        target_filter.filter(severity="Low"))
                    target_total_duplicate = len(
                        duplicate_count.filter(vuln_duplicate="Yes"))
                    NetworkScanDb.objects.filter(ip=target).update(
                        date_time=date_time,
                        total_vul=target_total_vuln,
                        high_vul=target_total_high,
                        medium_vul=target_total_medium,
                        low_vul=target_total_low,
                        total_dup=target_total_duplicate,
                    )
            except:
                continue
    trend_update()
    subject = "Archery Tool Scan Status - Nessus Report Uploaded"
    message = ("Nessus Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (scan_id, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
def npmaudit_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col
    for vuln in data['advisories']:
        title = data['advisories'][vuln]['title']
        found_by = data['advisories'][vuln]['found_by']
        reported_by = data['advisories'][vuln]['reported_by']
        module_name = data['advisories'][vuln]['module_name']
        cves = data['advisories'][vuln]['cves']
        vulnerable_versions = data['advisories'][vuln]['vulnerable_versions']
        patched_versions = data['advisories'][vuln]['patched_versions']
        overview = data['advisories'][vuln]['overview']
        recommendation = data['advisories'][vuln]['recommendation']
        references = data['advisories'][vuln]['references']
        access = data['advisories'][vuln]['access']
        severity = data['advisories'][vuln]['severity']
        cwe = data['advisories'][vuln]['cwe']
        metadata = data['advisories'][vuln]['metadata']
        url = data['advisories'][vuln]['url']

        findings = (data['advisories'][vuln]['findings'])
        vuln_versions = {}
        for find in findings:
            vuln_versions[find['version']] = [find['paths']]

        if not title:
            title = "not found"
        if not found_by:
            found_by = "not found"
        if not reported_by:
            reported_by = "not found"
        if not module_name:
            module_name = "not found"
        if not cves:
            cves = "not found"
        if not vulnerable_versions:
            vulnerable_versions = "not found"
        if not patched_versions:
            patched_versions = "not found"
        if not recommendation:
            recommendation = "not found"
        if not overview:
            overview = "not found"
        if not references:
            references = "not found"
        if not access:
            access = "not found"
        if not severity:
            severity = "not found"
        if not cwe:
            cwe = "not found"
        if not url:
            url = "not found"

        if severity == "critical":
            severity = 'High'
            vul_col = "danger"

        if severity == "high":
            severity = 'High'
            vul_col = "danger"

        elif severity == 'moderate':
            severity = 'Medium'
            vul_col = "warning"

        elif severity == 'low':
            severity = 'Low'
            vul_col = "info"

        elif severity == 'info':
            severity = 'Low'
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(title) + str(severity) + str(module_name)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = npmaudit_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = npmaudit_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = npmaudit_scan_results_db(
                vuln_id=vul_id,
                date_time=date_time,
                scan_id=scan_id,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                version=vuln_versions,
                title=title,
                found_by=found_by,
                reported_by=reported_by,
                module_name=module_name,
                cves=cves,
                vulnerable_versions=vulnerable_versions,
                patched_versions=patched_versions,
                overview=overview,
                recommendation=recommendation,
                references=references,
                access=access,
                severity=severity,
                cwe=cwe,
                url=url,
                username=username,
            )
            save_all.save()

        else:
            duplicate_vuln = 'Yes'

            save_all = npmaudit_scan_results_db(
                vuln_id=vul_id,
                date_time=date_time,
                scan_id=scan_id,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                version=vuln_versions,
                title=title,
                found_by=found_by,
                reported_by=reported_by,
                module_name=module_name,
                cves=cves,
                vulnerable_versions=vulnerable_versions,
                patched_versions=patched_versions,
                overview=overview,
                recommendation=recommendation,
                references=references,
                access=access,
                severity=severity,
                cwe=cwe,
                url=url,
                username=username,
            )
            save_all.save()

    all_findbugs_data = npmaudit_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, false_positive='No')

    duplicate_count = npmaudit_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    npmaudit_scan_db.objects.filter(username=username, scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Trivy Report Uploaded'
    message = 'Trivy Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % ("npm-audit", total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def trivy_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    vul_col = ''
    for vuln_data in data:
        vuln = vuln_data['Vulnerabilities']
        if vuln is not None:
            for issue in vuln:
                try:
                    VulnerabilityID = issue['VulnerabilityID']
                except Exception as e:
                    VulnerabilityID = "Not Found"
                    print(e)
                try:
                    PkgName = issue['PkgName']
                except Exception as e:
                    PkgName = "Not Found"
                    print(e)
                try:
                    InstalledVersion = issue['InstalledVersion']
                except Exception as e:
                    InstalledVersion = "Not Found"
                    print(e)
                try:
                    FixedVersion = issue['FixedVersion']
                except Exception as e:
                    FixedVersion = "Not Found"
                    print(e)
                try:
                    Title = issue['Title']
                except Exception as e:
                    Title = "Not Found"
                    print(e)
                try:
                    Description = issue['Description']
                except Exception as e:
                    Description = "Not Found"
                    print(e)
                try:
                    Severity = issue['Severity']
                except Exception as e:
                    Severity = "Not Found"
                    print(e)
                try:
                    References = issue['References']
                except Exception as e:
                    References = "Not Found"
                    print(e)

                if Severity == "CRITICAL":
                    Severity = 'High'
                    vul_col = "danger"

                if Severity == "HIGH":
                    Severity = 'High'
                    vul_col = "danger"

                if Severity == 'MEDIUM':
                    Severity = 'Medium'
                    vul_col = "warning"

                if Severity == 'LOW':
                    Severity = 'Low'
                    vul_col = "info"

                if Severity == 'UNKNOWN':
                    Severity = 'Low'
                    vul_col = "info"

                vul_id = uuid.uuid4()

                dup_data = str(VulnerabilityID) + str(Severity) + str(PkgName)

                duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

                match_dup = trivy_scan_results_db.objects.filter(username=username,
                                                                 dup_hash=duplicate_hash).values('dup_hash')
                lenth_match = len(match_dup)

                if lenth_match == 0:
                    duplicate_vuln = 'No'

                    false_p = trivy_scan_results_db.objects.filter(username=username,
                                                                   false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)

                    if fp_lenth_match == 1:
                        false_positive = 'Yes'
                    else:
                        false_positive = 'No'

                    save_all = trivy_scan_results_db(
                        vuln_id=vul_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        project_id=project_id,
                        VulnerabilityID=VulnerabilityID,
                        PkgName=PkgName,
                        InstalledVersion=InstalledVersion,
                        FixedVersion=FixedVersion,
                        Title=Title,
                        Description=Description,
                        Severity=Severity,
                        References=References,
                        vul_col=vul_col,
                        vuln_status='Open',
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        false_positive=false_positive,
                        username=username,
                    )
                    save_all.save()

                else:
                    duplicate_vuln = 'Yes'

                    save_all = trivy_scan_results_db(
                        vuln_id=vul_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        project_id=project_id,
                        VulnerabilityID=VulnerabilityID,
                        PkgName=PkgName,
                        InstalledVersion=InstalledVersion,
                        FixedVersion=FixedVersion,
                        Title=Title,
                        Description=Description,
                        Severity=Severity,
                        References=References,
                        vul_col=vul_col,
                        vuln_status='Duplicate',
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        false_positive='Duplicate',
                        username=username,
                    )
                    save_all.save()

        all_findbugs_data = trivy_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                                 false_positive='No')

        duplicate_count = trivy_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                                             vuln_duplicate='Yes')

        total_vul = len(all_findbugs_data)
        total_high = len(all_findbugs_data.filter(Severity="High"))
        total_medium = len(all_findbugs_data.filter(Severity="Medium"))
        total_low = len(all_findbugs_data.filter(Severity="Low"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

        trivy_scan_db.objects.filter(scan_id=scan_id).update(username=username,
                                                             total_vul=total_vul,
                                                             date_time=date_time,
                                                             high_vul=total_high,
                                                             medium_vul=total_medium,
                                                             low_vul=total_low,
                                                             total_dup=total_duplicate
                                                             )
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Trivy Report Uploaded'
    message = 'Trivy Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
Exemple #4
0
    def xml_parser(self):
        """

        :param root:
        :param project_id:
        :param scan_id:
        :return:
        """
        date_time = datetime.now()
        global name, classname, risk, ShortMessage, LongMessage, sourcepath, vul_col, ShortDescription, Details, lenth_match, duplicate_hash, vul_id, total_vul, total_high, total_medium, total_low, details, message
        for bug in self.root:
            if bug.tag == "BugInstance":
                name = bug.attrib["type"]
                priority = bug.attrib["priority"]
                for BugInstance in bug:
                    if BugInstance.tag == "ShortMessage":
                        global ShortMessage
                        ShortMessage = BugInstance.text
                    if BugInstance.tag == "LongMessage":
                        global LongMessage
                        LongMessage = BugInstance.text
                    if BugInstance.tag == "Class":
                        global classname
                        try:
                            classname = BugInstance.attrib["classname"]
                        except:
                            classname = 'na'
                    if BugInstance.tag == "SourceLine":
                        global sourcepath, sourcefile
                        try:
                            sourcepath = BugInstance.attrib["sourcepath"]
                        except:
                            sourcepath = 'NA'
                        try:
                            sourcefile = BugInstance.attrib["sourcefile"]
                        except:
                            sourcefile = 'NA'

                        for data in bug:
                            for message_data in data:
                                if message_data.tag == 'Message':
                                    message = message_data.text

                    if priority == "1":
                        risk = "High"
                        vul_col = "danger"

                    elif priority == "2":
                        risk = "Medium"
                        vul_col = "warning"

                    elif priority == "3":
                        risk = "Low"
                        vul_col = "info"

                    vul_id = uuid.uuid4()

                    dup_data = str(ShortMessage) + str(message) + str(
                        sourcepath) + str(risk)

                    duplicate_hash = hashlib.sha256(
                        dup_data.encode("utf-8")).hexdigest()

                    match_dup = StaticScanResultsDb.objects.filter(
                        dup_hash=duplicate_hash).values("dup_hash")
                    lenth_match = len(match_dup)

                    details = self.find_bug_pattern(name)
                if lenth_match == 0:
                    duplicate_vuln = "No"

                    false_p = StaticScanResultsDb.objects.filter(
                        false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)

                    if fp_lenth_match == 1:
                        false_positive = "Yes"
                    else:
                        false_positive = "No"

                    save_all = StaticScanResultsDb(
                        vuln_id=vul_id,
                        date_time=date_time,
                        scan_id=self.scan_id,
                        project_id=self.project_id,
                        title=str(ShortMessage),
                        severity=risk,
                        description="<b>Finding Path & Line:</b> %s" %
                        str(message) + "<br><br>"
                        "<b>Finding Classes:</b> %s" % str(classname) +
                        "<br><br>"
                        "<b>Finding Source Path</b>: %s" % str(sourcepath) +
                        "<br><br>" + str(ShortMessage) + "<br><br>" +
                        str(LongMessage) + "<br><br>" + str(details),
                        # + "\n\n"
                        # + str(classname),
                        fileName=str(message),
                        severity_color=vul_col,
                        vuln_status="Open",
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        false_positive=false_positive,
                        scanner="Findbugs",
                    )
                    save_all.save()

                else:
                    duplicate_vuln = "Yes"
                    save_all = StaticScanResultsDb(
                        vuln_id=vul_id,
                        date_time=date_time,
                        scan_id=self.scan_id,
                        project_id=self.project_id,
                        title=str(ShortMessage),
                        severity=risk,
                        description="<b>Finding Path & Line:</b> %s" %
                        str(message) + "<br><br>"
                        "<b>Finding Classes:</b> %s" % str(classname) +
                        "<br><br>"
                        "<b>Finding Source Path</b>: %s" % str(sourcepath) +
                        "<br><br>" + str(ShortMessage) + "<br><br>" +
                        str(LongMessage) + "<br><br>" + str(details),
                        # + "\n\n"
                        # + str(classname),
                        fileName=str(message),
                        severity_color=vul_col,
                        vuln_status="Duplicate",
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        false_positive="Duplicate",
                        scanner="Findbugs",
                    )
                    save_all.save()

            # if bug.tag == "BugPattern":
            #     for BugPattern in bug:
            #         name = bug.attrib["type"]
            #         if BugPattern.tag == "ShortDescription":
            #             ShortDescription = BugPattern.text
            #         if BugPattern.tag == "Details":
            #             global Details
            #             Details = BugPattern.text
            #         print(Details)
            #         StaticScanResultsDb.objects.filter(vuln_id=vul_id, title=name).update(
            #             description=str(Details)
            #                         + "\n\n"
            #                         + str(ShortMessage)
            #                         + "\n\n"
            #                         + str(LongMessage)
            #                         + "\n\n"
            #                         + str(classname),
            #         )

            all_findbugs_data = StaticScanResultsDb.objects.filter(
                scan_id=self.scan_id, false_positive="No")

            duplicate_count = StaticScanResultsDb.objects.filter(
                scan_id=self.scan_id, vuln_duplicate="Yes")

            total_vul = len(all_findbugs_data)
            total_high = len(all_findbugs_data.filter(severity="High"))
            total_medium = len(all_findbugs_data.filter(severity="Medium"))
            total_low = len(all_findbugs_data.filter(severity="Low"))
            total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

            StaticScansDb.objects.filter(scan_id=self.scan_id).update(
                total_vul=total_vul,
                date_time=date_time,
                high_vul=total_high,
                medium_vul=total_medium,
                low_vul=total_low,
                total_dup=total_duplicate,
                scanner="Findbugs",
            )
        trend_update()
        subject = "Archery Tool Scan Status - Findbugs Report Uploaded"
        message = (
            "Findbugs Scanner has completed the scan "
            "  %s <br> Total: %s <br>High: %s <br>"
            "Medium: %s <br>Low %s" %
            (self.scan_id, total_vul, total_high, total_medium, total_low))

        email_sch_notify(subject=subject, message=message)
def tfsec_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col
    for vuln in data['results']:
        rule_id = vuln['rule_id']
        link = vuln['link']
        filename = vuln['location']['filename']
        start_line = vuln['location']['start_line']
        end_line = vuln['location']['end_line']
        description = vuln['description']
        severity = vuln['severity']

        if severity == "ERROR":
            severity = 'High'
            vul_col = "danger"

        elif severity == 'WARNING':
            severity = 'Medium'
            vul_col = "warning"

        elif severity == 'INFO':
            severity = 'Info'
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(rule_id) + str(severity) + str(filename)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = tfsec_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = tfsec_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = tfsec_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                rule_id=rule_id,
                filename=filename,
                severity=severity,
                description=description,
                link=link,
                start_line=start_line,
                end_line=end_line,
                username=username,
            )
            save_all.save()

        else:
            duplicate_vuln = 'Yes'

            save_all = tfsec_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                rule_id=rule_id,
                filename=filename,
                severity=severity,
                description=description,
                link=link,
                start_line=start_line,
                end_line=end_line,
                username=username,
            )
            save_all.save()

    all_findbugs_data = tfsec_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, false_positive='No')

    duplicate_count = tfsec_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    tfsec_scan_db.objects.filter(username=username, scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - tfsec Report Uploaded'
    message = 'tfsec Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % ("tfsec", total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def whitesource_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()

    global vul_col, project
    vuln = data['vulnerabilities']

    for issues in vuln:
        name = issues['name']
        severity = issues['severity']
        score = issues['score']
        cvss3_severity = issues['cvss3_severity']
        cvss3_score = issues['cvss3_score']
        publishDate = issues['publishDate']
        lastUpdatedDate = issues['lastUpdatedDate']
        scoreMetadataVector = issues['scoreMetadataVector']
        url = issues['url']
        description = issues['description']
        project = issues['project']
        product = issues['product']
        cvss3Attributes = issues['cvss3Attributes']
        library = issues['library']
        topFix = issues['topFix']
        #allFixes = issues['allFixes']
        filename = issues['library']['filename']
        sha1 = issues['library']['sha1']
        version = issues['library']['version']
        groupId = issues['library']['groupId']
        if severity == "high":
            severity = 'High'
            vul_col = "danger"
        elif severity == 'medium':
            severity = 'Medium'
            vul_col = "warning"
        elif severity == 'low':
            severity = 'Low'
            vul_col = "info"
        vul_id = uuid.uuid4()
        dup_data = str(name) + str(severity) + str(project)
        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()
        match_dup = whitesource_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)
        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = whitesource_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)
            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'
            save_all = whitesource_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                name=name,
                severity=severity,
                score=score,
                cvss3_severity=cvss3_severity,
                cvss3_score=cvss3_score,
                publishDate=publishDate,
                lastUpdatedDate=lastUpdatedDate,
                scoreMetadataVector=scoreMetadataVector,
                url=url,
                description=description,
                project=project,
                product=product,
                cvss3Attributes=cvss3Attributes,
                library=library,
                topFix=topFix,
                # allFixes=allFixes,
                filename=filename,
                sha1=sha1,
                version=version,
                groupId=groupId,
                username=username,
            )
            save_all.save()

        else:
            duplicate_vuln = 'Yes'

            save_all = whitesource_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                name=name,
                severity=severity,
                score=score,
                cvss3_severity=cvss3_severity,
                cvss3_score=cvss3_score,
                publishDate=publishDate,
                lastUpdatedDate=lastUpdatedDate,
                scoreMetadataVector=scoreMetadataVector,
                url=url,
                description=description,
                project=project,
                product=product,
                cvss3Attributes=cvss3Attributes,
                library=library,
                topFix=topFix,
                # allFixes=allFixes,
                filename=filename,
                sha1=sha1,
                version=version,
                groupId=groupId,
                username=username,
            )
            save_all.save()

    all_findbugs_data = whitesource_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, false_positive='No')

    duplicate_count = whitesource_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    whitesource_scan_db.objects.filter(
        username=username, scan_id=scan_id).update(project_name=project,
                                                   date_time=date_time,
                                                   total_vul=total_vul,
                                                   high_vul=total_high,
                                                   medium_vul=total_medium,
                                                   low_vul=total_low,
                                                   total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - whitesource Report Uploaded'
    message = 'whitesource Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % ("whitesource", total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
Exemple #7
0
def updated_nessus_parser(root, project_id, scan_id, username):
    global agent, description, fname, \
        plugin_modification_date, plugin_name, \
        plugin_publication_date, plugin_type, \
        risk_factor, script_version, solution, \
        synopsis, plugin_output, see_also, scan_ip, \
        pluginName, pluginID, protocol, severity, \
        svc_name, pluginFamily, port, vuln_color, total_vul, total_high, total_medium, total_low, target, report_name

    date_time = datetime.datetime.now()

    for data in root:
        if data.tag == 'Report':
            report_name = data.attrib['name']

            scan_status = "100"
            scan_dump = nessus_scan_db(
                report_name=report_name,
                target=target,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                scan_status=scan_status,
                username=username
            )
            scan_dump.save()
        for reportHost in data.iter('ReportHost'):
            try:
                for key, value in reportHost.items():
                    target = value
            except:
                continue

            for ReportItem in reportHost.iter('ReportItem'):
                for key, value in ReportItem.attrib.items():
                    if key == 'pluginName':
                        pluginName = value
                        # print ("pluginName = "+str(value))
                    if key == 'pluginID':
                        pluginID = value
                        # print ("pluginID = "+str(value))
                    if key == 'protocol':
                        protocol = value
                        # print ("protocol = "+str(value))
                    if key == 'severity':
                        severity = value
                        # print ("severity = "+str(value))
                    if key == 'svc_name':
                        svc_name = value
                        # print ("svc_name = "+str(value))
                    if key == 'pluginFamily':
                        pluginFamily = value
                        # print ("pluginFamily = "+str(value))
                    if key == 'port':
                        port = value
                        # print ("port = "+str(value))

                try:
                    agent = ReportItem.find('agent').text
                except:
                    agent = "NA"
                try:
                    description = ReportItem.find('description').text
                except:
                    description = "NA"
                try:
                    fname = ReportItem.find('fname').text
                except:
                    fname = "NA"
                try:
                    plugin_modification_date = ReportItem.find('plugin_modification_date').text
                except:
                    plugin_modification_date = "NA"
                try:
                    plugin_name = ReportItem.find('plugin_name').text
                except:
                    plugin_name = "NA"
                try:
                    plugin_publication_date = ReportItem.find('plugin_publication_date').text
                except:
                    plugin_publication_date = "NA"
                try:
                    plugin_type = ReportItem.find('plugin_type').text
                except:
                    plugin_type = "NA"
                try:
                    risk_factor = ReportItem.find('risk_factor').text
                except:
                    risk_factor = "NA"
                try:
                    script_version = ReportItem.find('script_version').text
                except:
                    script_version = "NA"
                try:
                    see_also = ReportItem.find('see_also').text
                except:
                    see_also = "NA"
                try:
                    solution = ReportItem.find('solution').text
                except:
                    solution = "NA"
                try:
                    synopsis = ReportItem.find('synopsis').text
                except:
                    synopsis = "NA"
                try:
                    plugin_output = ReportItem.find('plugin_output').text
                except:
                    plugin_output = "NA"
                vuln_id = uuid.uuid4()

                if risk_factor == 'Critical':
                    vuln_color = 'danger'
                    risk_factor = 'High'
                elif risk_factor == 'High':
                    vuln_color = 'danger'
                    risk_factor = 'High'
                elif risk_factor == 'Medium':
                    vuln_color = 'warning'
                    risk_factor = 'Medium'
                elif risk_factor == 'Low':
                    vuln_color = 'info'
                    risk_factor = 'Low'
                else:
                    risk_factor = 'Low'
                    vuln_color = 'info'

                dup_data = target + plugin_name + severity + port
                duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()
                match_dup = nessus_scan_results_db.objects.filter(username=username,
                                                                  dup_hash=duplicate_hash).values('dup_hash').distinct()
                lenth_match = len(match_dup)

                if lenth_match == 0:
                    duplicate_vuln = 'No'

                    global false_positive
                    false_p = nessus_scan_results_db.objects.filter(username=username,
                                                                    false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)
                    if fp_lenth_match == 1:
                        false_positive = 'Yes'
                    else:
                        false_positive = 'No'
                    if risk_factor == 'None':
                        risk_factor = 'Low'

                    all_data_save = nessus_scan_results_db(project_id=project_id,
                                                           report_name=report_name,
                                                           scan_id=scan_id,
                                                           date_time=date_time,
                                                           target=target,
                                                           vuln_id=vuln_id,
                                                           agent=agent,
                                                           description=description,
                                                           fname=fname,
                                                           plugin_modification_date=plugin_modification_date,
                                                           plugin_name=plugin_name,
                                                           plugin_publication_date=plugin_publication_date,
                                                           plugin_type=plugin_type,
                                                           risk_factor=risk_factor,
                                                           script_version=script_version,
                                                           see_also=see_also,
                                                           solution=solution,
                                                           synopsis=synopsis,
                                                           plugin_output=plugin_output,
                                                           pluginName=pluginName,
                                                           pluginID=pluginID,
                                                           protocol=protocol,
                                                           severity=severity,
                                                           svc_name=svc_name,
                                                           pluginFamily=pluginFamily,
                                                           port=port,
                                                           false_positive=false_positive,
                                                           vuln_status='Open',
                                                           dup_hash=duplicate_hash,
                                                           vuln_duplicate=duplicate_vuln,
                                                           severity_color=vuln_color,
                                                           username=username,
                                                           )
                    all_data_save.save()
                    del_na = nessus_scan_results_db.objects.filter(username=username, plugin_name='NA')
                    del_na.delete()

                else:
                    duplicate_vuln = 'Yes'

                    all_data_save = nessus_scan_results_db(project_id=project_id,
                                                           scan_id=scan_id,
                                                           target=target,
                                                           vuln_id=vuln_id,
                                                           date_time=date_time,
                                                           agent=agent,
                                                           description=description,
                                                           fname=fname,
                                                           plugin_modification_date=plugin_modification_date,
                                                           plugin_name=plugin_name,
                                                           plugin_publication_date=plugin_publication_date,
                                                           plugin_type=plugin_type,
                                                           risk_factor=risk_factor,
                                                           script_version=script_version,
                                                           see_also=see_also,
                                                           solution=solution,
                                                           synopsis=synopsis,
                                                           plugin_output=plugin_output,
                                                           pluginName=pluginName,
                                                           pluginID=pluginID,
                                                           protocol=protocol,
                                                           severity=severity,
                                                           svc_name=svc_name,
                                                           pluginFamily=pluginFamily,
                                                           port=port,
                                                           false_positive='Duplicate',
                                                           vuln_status='Duplicate',
                                                           dup_hash=duplicate_hash,
                                                           vuln_duplicate=duplicate_vuln,
                                                           severity_color=vuln_color,
                                                           username=username,
                                                           )
                    all_data_save.save()
                    del_na = nessus_scan_results_db.objects.filter(username=username, plugin_name='NA')
                    del_na.delete()
                    ov_all_vul = nessus_scan_results_db.objects.filter(username=username, scan_id=scan_id)
                    total_duplicate = len(ov_all_vul.filter(vuln_duplicate='Yes'))
                    nessus_scan_db.objects.filter(username=username, scan_id=scan_id) \
                        .update(
                        total_dup=total_duplicate,
                        target=target,
                    )

            target_filter = nessus_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                                  target=target,
                                                                  vuln_status='Open',
                                                                  vuln_duplicate='No'
                                                                  )

            duplicate_count = nessus_scan_results_db.objects.filter(username=username,
                                                                    scan_id=scan_id,
                                                                    target=target,
                                                                    vuln_duplicate='Yes')

            target_total_vuln = len(target_filter)
            target_total_high = len(target_filter.filter(risk_factor="High"))
            target_total_medium = len(target_filter.filter(risk_factor="Medium"))
            target_total_low = len(target_filter.filter(risk_factor="Low"))
            target_total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))
            target_scan_dump = nessus_targets_db(
                report_name=report_name,
                target=target,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                username=username,
                total_vuln=target_total_vuln,
                total_high=target_total_high,
                total_medium=target_total_medium,
                total_low=target_total_low,
                total_dup=target_total_duplicate,
            )
            target_scan_dump.save()
        ov_all_vul = nessus_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                           vuln_status='Open',
                                                           vuln_duplicate='No'
                                                           )
        duplicate_count_report = nessus_scan_results_db.objects.filter(username=username,
                                                                       scan_id=scan_id,
                                                                       vuln_duplicate='Yes')
        total_vuln = len(ov_all_vul)
        total_high = len(ov_all_vul.filter(risk_factor="High"))
        total_medium = len(ov_all_vul.filter(risk_factor="Medium"))
        total_low = len(ov_all_vul.filter(risk_factor="Low"))
        total_duplicate = len(duplicate_count_report.filter(vuln_duplicate='Yes'))

        nessus_scan_db.objects.filter(username=username, scan_id=scan_id) \
            .update(total_vuln=total_vuln,
                    total_high=total_high,
                    total_medium=total_medium,
                    total_low=total_low,
                    total_dup=total_duplicate,
                    target=target,
                    )
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Nessus Report Uploaded'
    message = 'Nessus Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id):
    global url, Scheme, Host, Port, AttackMethod, VulnerableSession, TriggerSession, VulnerabilityID, Severity, Name, ReportSection, HighlightSelections, RawResponse, SectionText, vuln_id, severity_name, vul_col, target
    date_time = datetime.now()
    for data in root:
        if data.tag == "Name":
            target = data.text
        for issues in data:
            for issue in issues:
                if issue.tag == "URL":
                    url = issue.text

                if issue.tag == "Host":
                    Host = issue.text

                if issue.tag == "Port":
                    Port = issue.text

                if issue.tag == "AttackMethod":
                    AttackMethod = issue.text

                if issue.tag == "VulnerableSession":
                    VulnerableSession = issue.text

                if issue.tag == "Severity":
                    Severity = issue.text

                if issue.tag == "Name":
                    Name = issue.text

                for d_issue in issue:
                    if d_issue.tag == "SectionText":
                        SectionText = issue.text

                vuln_id = uuid.uuid4()

            if Severity == "4":
                Severity = "High"
                vul_col = "danger"

            elif Severity == "3":
                Severity = "High"
                vul_col = "danger"

            elif Severity == "2":
                Severity = "Medium"
                vul_col = "warning"

            elif Severity == "1":
                Severity = "Low"
                vul_col = "info"

            else:
                Severity = "Low"
                vul_col = "info"

            dup_data = Name + url + Severity
            duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

            match_dup = (
                WebScanResultsDb.objects.filter(dup_hash=duplicate_hash)
                .values("dup_hash")
                .distinct()
            )
            lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = "No"

                false_p = WebScanResultsDb.objects.filter(
                    false_positive_hash=duplicate_hash
                )
                fp_lenth_match = len(false_p)

                global false_positive
                if fp_lenth_match == 1:
                    false_positive = "Yes"
                elif lenth_match == 0:
                    false_positive = "No"
                else:
                    false_positive = "No"

                if Name is None:
                    continue
                else:
                    dump_data = WebScanResultsDb(
                        scan_id=scan_id,
                        vuln_id=vuln_id,
                        project_id=project_id,
                        url=url,
                        date_time=date_time,
                        title=Name,
                        severity=Severity,
                        severity_color=vul_col,
                        description=str(Host)
                        + str(Port)
                        + str(SectionText)
                        + str(AttackMethod),
                        instance=VulnerableSession,
                        false_positive=false_positive,
                        vuln_status="Open",
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        scanner="Webinspect",
                    )
                    dump_data.save()

            else:
                duplicate_vuln = "Yes"

                dump_data = WebScanResultsDb(
                    scan_id=scan_id,
                    vuln_id=vuln_id,
                    project_id=project_id,
                    url=url,
                    date_time=date_time,
                    title=Name,
                    severity=Severity,
                    severity_color=vul_col,
                    description=str(Host)
                    + str(Port)
                    + str(SectionText)
                    + str(AttackMethod),
                    instance=VulnerableSession,
                    false_positive="Duplicate",
                    vuln_status="Duplicate",
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    scanner="Webinspect",
                )
                dump_data.save()

        webinspect_all_vul = WebScanResultsDb.objects.filter(
            scan_id=scan_id, false_positive="No"
        )

        duplicate_count = WebScanResultsDb.objects.filter(
            scan_id=scan_id, vuln_duplicate="Yes"
        )

        total_high = len(webinspect_all_vul.filter(severity="High"))
        total_medium = len(webinspect_all_vul.filter(severity="Medium"))
        total_low = len(webinspect_all_vul.filter(severity="Low"))
        total_info = len(webinspect_all_vul.filter(severity="Information"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))
        total_vul = total_high + total_medium + total_low + total_info

        WebScansDb.objects.filter(scan_id=scan_id).update(
            total_vul=total_vul,
            scan_url=target,
            date_time=date_time,
            high_vul=total_high,
            medium_vul=total_medium,
            low_vul=total_low,
            info_vul=total_info,
            total_dup=total_duplicate,
        )
    trend_update()

    subject = "Archery Tool Scan Status - Webinspect Report Uploaded"
    message = (
        "Webinspect Scanner has completed the scan "
        "  %s <br> Total: %s <br>High: %s <br>"
        "Medium: %s <br>Low %s" % (Host, total_vul, total_high, total_medium, total_low)
    )

    email_sch_notify(subject=subject, message=message)
Exemple #9
0
def xml_parser(root, project_id, scan_id, target_url):
    date_time = datetime.now()
    global name, description, remedy_guidance, remedy_code, severity, check, digest, references, vector, remarks, page, signature, proof, trusted, platform_type, platform_name, url, action, body, vuln_id, vul_col, ref_key, ref_values, vector_input_key, vector_input_values, vector_source_key, vector_source_values, page_body_data, request_url, request_method, request_raw, response_ip, response_raw_headers

    for issue in root:
        for data in issue:
            if data.tag == "issue":
                for vuln in data:
                    vuln_id = uuid.uuid4()

                    if vuln.tag == "name":
                        if vuln.text is None:
                            name = "NA"
                        else:
                            name = vuln.text
                    if vuln.tag == "description":

                        if vuln.text is None:
                            description = "NA"
                        else:
                            description = vuln.text
                    if vuln.tag == "remedy_guidance":

                        if vuln.text is None:
                            remedy_guidance = "NA"
                        else:
                            remedy_guidance = vuln.text
                    if vuln.tag == "severity":

                        if vuln.text is None:
                            severity = "NA"
                        else:
                            severity = vuln.text

                    if vuln.tag == "references":
                        for ref_vuln in vuln:
                            dat = ref_vuln.attrib
                            for key, values in dat.items():

                                if key is None:
                                    ref_key = "NA"
                                else:
                                    ref_key = key

                                if values is None:
                                    ref_values = "NA"
                                else:
                                    ref_values = values

                    if vuln.tag == "vector":
                        for vec_vuln in vuln:
                            if vec_vuln.tag == "inputs":
                                for vec_input in vec_vuln:
                                    dat = vec_input.attrib
                                    for key, values in dat.items():

                                        if key is None:
                                            vector_input_key = "NA"
                                        else:
                                            vector_input_key = key

                                        if values is None:
                                            vector_input_values = "NA"
                                        else:
                                            vector_input_values = values
                            if vec_vuln.tag == "source":
                                for vec_source in vec_vuln:
                                    source_dat = vec_source.attrib
                                    for key, values in source_dat.items():
                                        if key is None:
                                            vector_source_key = "NA"
                                        else:
                                            vector_source_key = key

                                        if values in None:
                                            vector_source_values = "NA"
                                        else:
                                            vector_source_values = values

                    if vuln.tag == "page":
                        for page_body in vuln:
                            if page_body.tag == "body":
                                page_body_dat = page_body.text

                                if page_body_dat is None:
                                    page_body_data = "NA"
                                else:
                                    page_body_data = page_body_dat
                        for req in vuln:
                            if req.tag == "request":
                                for req_dat in req:
                                    if req_dat.tag == "url":
                                        req_url = req_dat.text
                                        if req_url is None:
                                            request_url = "NA"
                                        else:
                                            request_url = req_url
                                    if req_dat.tag == "method":
                                        req_method = req_dat.text
                                        if req_method is None:
                                            request_method = "NA"
                                        else:
                                            request_method = req_method

                                    if req_dat.tag == "raw":
                                        if req_dat.text is None:
                                            request_raw = "NA"
                                        else:
                                            request_raw = req_dat.text
                            if req.tag == "response":
                                for res_dat in req:
                                    if res_dat.tag == "ip_address":
                                        res_ip = res_dat.text
                                        if res_ip is None:
                                            response_ip = "NA"
                                        else:
                                            response_ip = res_dat.text

                                    if res_dat.tag == "raw_headers":
                                        res_raw_headers = res_dat.text
                                        if res_raw_headers is None:
                                            response_raw_headers = "NA"
                                        else:
                                            response_raw_headers = res_dat.text

                    if vuln.tag == "proof":
                        proof = vuln.text
                        if vuln.text is None:
                            proof = "NA"
                        else:
                            proof = vuln.text

                    for extra_data in vuln:
                        for extra_vuln in extra_data:
                            if extra_vuln.tag == "url":

                                if extra_vuln.text is None:
                                    url = "NA"
                                else:
                                    url = extra_vuln.text
                            if extra_vuln.tag == "action":

                                if extra_vuln.text is None:
                                    action = "NA"
                                else:
                                    action = extra_vuln.text
                            if extra_vuln.tag == "body":

                                if extra_vuln.text is None:
                                    body = "NA"
                                else:
                                    body = extra_vuln.text

                details = (description + "\n\n" + str(proof) + "\n\n" +
                           str(ref_values) + "\n\n" + str(page_body_data))

                dup_data = name + url + severity
                duplicate_hash = hashlib.sha256(
                    dup_data.encode("utf-8")).hexdigest()

                match_dup = (WebScanResultsDb.objects.filter(
                    vuln_duplicate=duplicate_hash,
                    scanner="Arachni").values("dup_hash").distinct())
                lenth_match = len(match_dup)
                if severity == "high":
                    vul_col = "danger"
                    severity = "High"

                elif severity == "medium":
                    vul_col = "warning"
                    severity = "Medium"

                elif severity == "low":
                    severity = "Low"
                    vul_col = "info"

                else:
                    severity = "Low"
                    vul_col = "info"

                if lenth_match == 0:
                    duplicate_vuln = "No"

                    false_p = WebScanResultsDb.objects.filter(
                        false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)

                    global false_positive
                    if fp_lenth_match == 1:
                        false_positive = "Yes"
                    elif fp_lenth_match == 0:
                        false_positive = "No"
                    else:
                        false_positive = "No"

                    dump_data = WebScanResultsDb(
                        vuln_id=vuln_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        severity_color=vul_col,
                        project_id=project_id,
                        title=name,
                        description=details,
                        solution=remedy_guidance,
                        severity=severity,
                        url=url,
                        false_positive=false_positive,
                        vuln_status="Open",
                        false_positive_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        scanner="Arachni",
                    )
                    dump_data.save()

                else:
                    duplicate_vuln = "Yes"

                    dump_data = WebScanResultsDb(
                        vuln_id=vuln_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        severity_color=vul_col,
                        project_id=project_id,
                        title=name,
                        description=details,
                        solution=remedy_guidance,
                        severity=severity,
                        url=url,
                        false_positive="Duplicate",
                        vuln_status="Duplicate",
                        false_positive_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        scanner="Arachni",
                    )
                    dump_data.save()

    arachni_all_vul = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                      false_positive="No",
                                                      scanner="Arachni")

    duplicate_count = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                      vuln_duplicate="Yes",
                                                      scanner="Arachni")

    total_high = len(arachni_all_vul.filter(severity="High"))
    total_medium = len(arachni_all_vul.filter(severity="Medium"))
    total_low = len(arachni_all_vul.filter(severity="Low"))
    total_info = len(arachni_all_vul.filter(severity="Informational"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))
    total_vul = total_high + total_medium + total_low + total_info

    print(total_high)
    print(total_low)
    print(total_medium)
    print(total_info)

    WebScansDb.objects.filter(scan_id=scan_id).update(
        scan_url=target_url,
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        scan_status="100",
        total_dup=total_duplicate,
        scanner="Arachni",
    )
    trend_update()

    subject = "Archery Tool Scan Status - Arachni Report Uploaded"
    message = ("Arachni Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (url, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
Exemple #10
0
def nodejsscan_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col, severity
    for vuln in data["sec_issues"]:
        for vuln_dat in data["sec_issues"][vuln]:
            with open(
                "scanners/scanner_parser/staticscanner_parser/nodejsscan_vuln.json"
            ) as f:
                vuln_name = json.load(f)
                for v in vuln_name["vuln"]:
                    if v["name"] == vuln_dat["title"]:
                        severity = v["severity"]
            title = vuln_dat["title"]
            filename = vuln_dat["filename"]
            path = vuln_dat["path"]
            sha2 = vuln_dat["sha2"]
            tag = vuln_dat["tag"]
            description = vuln_dat["description"]

            line = vuln_dat["line"]
            lines = vuln_dat["lines"]

            if severity == "High":
                vul_col = "danger"

            elif severity == "Medium":
                vul_col = "warning"

            elif severity == "Low":
                vul_col = "info"

            vul_id = uuid.uuid4()

            dup_data = str(title) + str(severity) + str(filename) + str(line)

            duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

            match_dup = StaticScanResultsDb.objects.filter(
                dup_hash=duplicate_hash
            ).values("dup_hash")
            lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = "No"

                false_p = StaticScanResultsDb.objects.filter(
                    false_positive_hash=duplicate_hash
                )
                fp_lenth_match = len(false_p)

                if fp_lenth_match == 1:
                    false_positive = "Yes"
                else:
                    false_positive = "No"

                save_all = StaticScanResultsDb(
                    vuln_id=vul_id,
                    scan_id=scan_id,
                    date_time=date_time,
                    project_id=project_id,
                    severity_color=vul_col,
                    vuln_status="Open",
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive=false_positive,
                    title=title,
                    fileName=filename,
                    severity=severity,
                    filePath=path,
                    description=str(description)
                    + "\n\n"
                    + str(line)
                    + "\n\n"
                    + str(lines),
                    scanner="Nodejsscan",
                )
                save_all.save()

            else:
                duplicate_vuln = "Yes"

                save_all = StaticScanResultsDb(
                    vuln_id=vul_id,
                    scan_id=scan_id,
                    date_time=date_time,
                    project_id=project_id,
                    severity_color=vul_col,
                    vuln_status="Duplicate",
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive="Duplicate",
                    title=title,
                    fileName=filename,
                    severity=severity,
                    filePath=path,
                    description=str(description)
                    + "\n\n"
                    + str(line)
                    + "\n\n"
                    + str(lines),
                    scanner="Nodejsscan",
                )
                save_all.save()

        all_findbugs_data = StaticScanResultsDb.objects.filter(
            scan_id=scan_id, false_positive="No"
        )

        duplicate_count = StaticScanResultsDb.objects.filter(
            scan_id=scan_id, vuln_duplicate="Yes"
        )

        total_vul = len(all_findbugs_data)
        total_high = len(all_findbugs_data.filter(severity="High"))
        total_medium = len(all_findbugs_data.filter(severity="Medium"))
        total_low = len(all_findbugs_data.filter(severity="Low"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

        StaticScansDb.objects.filter(scan_id=scan_id).update(
            total_vul=total_vul,
            date_time=date_time,
            high_vul=total_high,
            medium_vul=total_medium,
            low_vul=total_low,
            total_dup=total_duplicate,
            scanner="Nodejsscan",
        )
        trend_update()
        subject = "Archery Tool Scan Status - Nodejsscan Report Uploaded"
        message = (
            "Nodejsscan Scanner has completed the scan "
            "  %s <br> Total: %s <br>High: %s <br>"
            "Medium: %s <br>Low %s"
            % ("Nodejsscan", total_vul, total_high, total_medium, total_low)
        )

        email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id):
    """
    ZAP Proxy scanner xml report parser.
    :param root:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col, risk, reference, url, solution, instance, alert, desc, riskcode, vuln_id, false_positive, duplicate_hash, duplicate_vuln, scan_url, title

    for child in root:
        d = child.attrib
        scan_url = d["name"]

    for alert in root.iter("alertitem"):
        inst = []
        for vuln in alert:
            vuln_id = uuid.uuid4()
            if vuln.tag == "alert":
                alert = vuln.text
            if vuln.tag == "name":
                title = vuln.text
            if vuln.tag == "solution":
                solution = vuln.text
            if vuln.tag == "reference":
                reference = vuln.text
            if vuln.tag == "riskcode":
                riskcode = vuln.text
            for instances in vuln:
                for ii in instances:
                    instance = {}
                    dd = re.sub(r"<[^>]*>", " ", ii.text)
                    instance[ii.tag] = dd
                    inst.append(instance)

            if vuln.tag == "desc":
                desc = vuln.text
            if riskcode == "3":
                vul_col = "danger"
                risk = "High"
            elif riskcode == "2":
                vul_col = "warning"
                risk = "Medium"
            elif riskcode == "1":
                vul_col = "info"
                risk = "Low"
            else:
                vul_col = "info"
                risk = "Low"
        if title == "None":
            print(title)
        else:
            duplicate_hash = check_false_positive(title=title,
                                                  severity=risk,
                                                  scan_url=scan_url)
            match_dup = (WebScanResultsDb.objects.filter(
                dup_hash=duplicate_hash).values("dup_hash").distinct())
            lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = "No"
                vuln_status = "Open"
            else:
                duplicate_vuln = "Yes"
                false_positive = "Duplicate"
                vuln_status = "Duplicate"

            data_store = WebScanResultsDb(
                vuln_id=vuln_id,
                severity_color=vul_col,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                url=scan_url,
                title=title,
                solution=solution,
                instance=inst,
                reference=reference,
                description=desc,
                severity=risk,
                false_positive=false_positive,
                jira_ticket="NA",
                vuln_status=vuln_status,
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                scanner="Zap",
            )

            data_store.save()

            false_p = WebScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"

    zap_all_vul = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                  false_positive="No")

    duplicate_count = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                      vuln_duplicate="Yes")

    total_high = len(zap_all_vul.filter(severity="High"))
    total_medium = len(zap_all_vul.filter(severity="Medium"))
    total_low = len(zap_all_vul.filter(severity="Low"))
    total_info = len(zap_all_vul.filter(severity="Informational"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))
    total_vul = total_high + total_medium + total_low + total_info

    WebScansDb.objects.filter(scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        total_dup=total_duplicate,
        scan_url=scan_url,
    )
    if total_vul == total_duplicate:
        WebScansDb.objects.filter(scan_id=scan_id).update(
            total_vul=total_vul,
            date_time=date_time,
            high_vul=total_high,
            medium_vul=total_medium,
            low_vul=total_low,
            total_dup=total_duplicate,
        )

    trend_update()

    subject = "Archery Tool Scan Status - ZAP Report Uploaded"
    message = ("ZAP Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (scan_url, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
def twistlock_report_json(data, project_id, scan_id, username):
    """
 
    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    """
    {
    "results": [
        {
            "id": "sha256:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
            "distro": "Debian GNU/Linux 9 (stretch)",
            "compliances": [
                {
                    "title": "Sensitive information provided in environment variables",
                    "severity": "high",
                    "cause": "The environment variables DD_CELERY_BROKER_PASSWORD,DD_DATABASE_PASSWORD,DD_SECRET_KEY contain sensitive data"
                }
            ],
            "complianceDistribution": {
                "critical": 0,
                "high": 1,
                "medium": 0,
                "low": 0,
                "total": 1
            },
            "vulnerabilities": [
                {
                    "id": "CVE-2013-7459",
                    "cvss": 9.8,
                    "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
                    "description": "Heap-based buffer overflow in the ALGnew function in block_templace.c in Python Cryptography Toolkit (aka pycrypto) allows remote attackers to execute arbitrary code as demonstrated by a crafted iv parameter to cryptmsg.py.",
                    "severity": "critical",
                    "packageName": "pycrypto",
                    "packageVersion": "2.6.1",
                    "link": "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-7459",
                    "riskFactors": {
                        "Attack complexity: low": {},
                        "Attack vector: network": {},
                        "Critical severity": {},
                        "Remote execution": {}
                    }
                }
            ],
            "vulnerabilityDistribution": {
                "critical": 1,
                "high": 0,
                "medium": 0,
                "low": 0,
                "total": 1
            }
        }
    ]
    }
    """
    global false_positive
    date_time = datetime.now()
    vul_col = ''

    # Parser for above json data

    vuln = data['results'][0]['vulnerabilities']

    for vuln_data in vuln:
        try:
            name = vuln_data['id']
        except Exception as e:
            name = "Not Found"

        try:
            cvss = vuln_data['cvss']
        except Exception as e:
            cvss = "Not Found"

        try:
            vector = vuln_data['vector']
        except Exception as e:
            vector = "Not Found"

        try:
            description = vuln_data['description']
        except Exception as e:
            description = "Not Found"

        try:
            severity = vuln_data['severity']
            if severity == 'critical':
                severity = 'High'
        except Exception as e:
            severity = "Not Found"

        try:
            packageName = vuln_data['packageName']
        except Exception as e:
            packageName = "Not Found"

        try:
            packageVersion = vuln_data['packageVersion']
        except Exception as e:
            packageVersion = "Not Found"

        try:
            link = vuln_data['link']
        except Exception as e:
            link = "Not Found"

        if severity == "Critical":
            severity = 'High'
            vul_col = "danger"

        if severity == "High":
            vul_col = "danger"

        elif severity == 'Medium':
            vul_col = "warning"

        elif severity == 'Low':
            vul_col = "info"

        elif severity == 'Unknown':
            severity = "Low"
            vul_col = "info"

        elif severity == 'Everything else':
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(name) + str(severity) + str(packageName)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = twistlock_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = twistlock_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = twistlock_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                username=username,
                name=name,
                cvss=cvss,
                description=description,
                Severity=severity,
                packageName=packageName,
                packageVersion=packageVersion,
                link=link,
            )
            save_all.save()
        else:
            duplicate_vuln = 'Yes'

            save_all = twistlock_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                username=username,
                name=name,
                cvss=cvss,
                description=description,
                Severity=severity,
                packageName=packageName,
                packageVersion=packageVersion,
                link=link,
            )
            save_all.save()

    all_findbugs_data = twistlock_scan_results_db.objects.filter(
        username=username,
        scan_id=scan_id,
        false_positive='No',
        vuln_duplicate='No')

    duplicate_count = twistlock_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(Severity="High"))
    total_medium = len(all_findbugs_data.filter(Severity="Medium"))
    total_low = len(all_findbugs_data.filter(Severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    twistlock_scan_db.objects.filter(scan_id=scan_id).update(
        username=username,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - twistlock Report Uploaded'
    message = 'twistlock Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id, username):
    """

    :param root:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global name, classname, risk, ShortMessage, LongMessage, sourcepath, vul_col, \
        ShortDescription, Details, lenth_match, duplicate_hash, vul_id
    # print root
    for bug in root:
        if bug.tag == 'BugInstance':
            name = bug.attrib['type']
            priority = bug.attrib['priority']
            for BugInstance in bug:
                if BugInstance.tag == 'ShortMessage':
                    global ShortMessage
                    ShortMessage = BugInstance.text
                if BugInstance.tag == 'LongMessage':
                    global LongMessage
                    LongMessage = BugInstance.text
                if BugInstance.tag == 'Class':
                    global classname
                    classname = BugInstance.attrib['classname']
                if BugInstance.tag == 'SourceLine':
                    global sourcepath, sourcefile
                    sourcepath = BugInstance.attrib['sourcepath']
                    sourcefile = BugInstance.attrib['sourcefile']

                if priority == "1":
                    risk = 'High'
                    vul_col = "danger"

                elif priority == '2':
                    risk = 'Medium'
                    vul_col = "warning"

                elif priority == '3':
                    risk = 'Medium'
                    vul_col = "info"

                vul_id = uuid.uuid4()

                dup_data = name + classname + risk

                duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

                match_dup = findbugs_scan_results_db.objects.filter(username=username,
                                                                    dup_hash=duplicate_hash).values('dup_hash')
                lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = 'No'

                false_p = findbugs_scan_results_db.objects.filter(username=username,
                                                                  false_positive_hash=duplicate_hash)
                fp_lenth_match = len(false_p)

                if fp_lenth_match == 1:
                    false_positive = 'Yes'
                else:
                    false_positive = 'No'

                save_all = findbugs_scan_results_db(
                    vuln_id=vul_id,
                    date_time=date_time,
                    scan_id=scan_id,
                    project_id=project_id,
                    name=name,
                    priority=priority,
                    ShortMessage=ShortMessage,
                    LongMessage=LongMessage,
                    classname=classname,
                    sourcepath=sourcepath,
                    vul_col=vul_col,
                    vuln_status='Open',
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive=false_positive,
                    risk=risk,
                    username=username
                )
                save_all.save()

            else:
                duplicate_vuln = 'Yes'

                save_all = findbugs_scan_results_db(
                    vuln_id=vul_id,
                    scan_id=scan_id,
                    date_time=date_time,
                    project_id=project_id,
                    name=name,
                    priority=priority,
                    ShortMessage=ShortMessage,
                    LongMessage=LongMessage,
                    classname=classname,
                    sourcepath=sourcepath,
                    vul_col=vul_col,
                    vuln_status='Duplicate',
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive='Duplicate',
                    risk=risk,
                    username=username
                )
                save_all.save()

        if bug.tag == 'BugPattern':
            for BugPattern in bug:
                name = bug.attrib['type']
                if BugPattern.tag == 'ShortDescription':
                    ShortDescription = BugPattern.text
                if BugPattern.tag == 'Details':
                    global Details
                    Details = BugPattern.text

                findbugs_scan_results_db.objects.filter(username=username, scan_id=scan_id, name=name).update(
                    ShortDescription=ShortDescription,
                    Details=Details,
                )

        all_findbugs_data = findbugs_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                                    false_positive='No')

        duplicate_count = findbugs_scan_results_db.objects.filter(username=username, scan_id=scan_id,
                                                                             vuln_duplicate='Yes')

        total_vul = len(all_findbugs_data)
        total_high = len(all_findbugs_data.filter(priority="1"))
        total_medium = len(all_findbugs_data.filter(priority="2"))
        total_low = len(all_findbugs_data.filter(priority="3"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

        findbugs_scan_db.objects.filter(username=username, scan_id=scan_id).update(
            total_vul=total_vul,
            date_time=date_time,
            high_vul=total_high,
            medium_vul=total_medium,
            low_vul=total_low,
            total_dup=total_duplicate
        )
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Findbugs Report Uploaded'
    message = 'Findbugs Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (scan_id, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
Exemple #14
0
def checkmarx_report_xml(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col, project, result, result_data, file_name, inst, code_data
    project = data.attrib["ProjectName"]
    scan_details = data.attrib
    for dat in data:
        query = dat.attrib
        name = dat.attrib["name"]
        severity = dat.attrib["Severity"]
        code_data = []
        result_data_all = []
        for dd in dat:
            result_data = dd.attrib
            file_name = dd.attrib["FileName"]
            result_data_all.append(dd.attrib)
            for d in dd.findall(".//Code"):
                result = d.text
                instance = {}
                instance[file_name] = d.text
                code_data.append(instance)
        if severity == "High":
            vul_col = "danger"
        elif severity == "Medium":
            vul_col = "warning"
        elif severity == "Low":
            vul_col = "info"
        else:
            severity = "Low"
            vul_col = "info"
        vul_id = uuid.uuid4()

        dup_data = str(name) + str(severity) + str(file_name)
        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()
        match_dup = StaticScanResultsDb.objects.filter(
            dup_hash=duplicate_hash).values("dup_hash")
        lenth_match = len(match_dup)
        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = StaticScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)
            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                title=name,
                severity=severity,
                description=str(scan_details),
                fileName=file_name,
                scanner="Checkmarx",
            )
            save_all.save()

        else:
            duplicate_vuln = "Yes"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Duplicate",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive="Duplicate",
                title=name,
                severity=severity,
                description=str(scan_details),
                fileName=file_name,
                scanner="Checkmarx",
            )
            save_all.save()

    all_findbugs_data = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                           false_positive="No")

    duplicate_count = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                         vuln_duplicate="Yes")

    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_vul = len(all_findbugs_data)
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

    StaticScansDb.objects.filter(scan_id=scan_id).update(
        project_name=project,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate,
        scanner="Checkmarx",
    )
    trend_update()
    subject = "Archery Tool Scan Status - checkmarx Report Uploaded"
    message = ("checkmarx Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               ("checkmarx", total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
Exemple #15
0
def burp_scan_data(root, project_id, scan_id):
    date_time = datetime.now()
    """
    The function parse the burp result as xml data
    and stored into archery database.
    :param xml_data:
    :return:
    """
    global vuln_id, burp_status, vul_col, issue_description, issue_remediation, issue_reference, issue_vulnerability_classifications, vul_col, severity, name, path, host, location, confidence, types, serialNumber, request_datas, response_datas, url
    for issue in root:
        for data in issue:
            vuln_id = uuid.uuid4()
            if data.tag == "name":
                global name
                if data.text is None:
                    name = "NA"
                else:
                    name = data.text
            if data.tag == "host":
                global host
                if data.text is None:
                    host = "NA"
                else:
                    host = data.text
            if data.tag == "path":
                global path
                if data.text is None:
                    path = "NA"
                else:
                    path = data.text
            if data.tag == "location":
                global location
                if data.text is None:
                    location = "NA"
                else:
                    location = data.text
            if data.tag == "severity":
                global severity
                if data.text is None:
                    severity = "NA"
                else:
                    severity = data.text

            if data.tag == "requestresponse":
                global requestresponse
                if data.text is None:
                    requestresponse = "NA"
                else:
                    requestresponse = data.text
                for d in data:
                    req = d.tag
                    met = d.attrib
                    if req == "request":
                        global request_datas
                        reqst = d.text
                        request_datas = base64.b64decode(reqst)  # reqst

                    if req == "response":
                        global response_datas
                        res_dat = d.text
                        response_datas = base64.b64decode(res_dat)  # res_dat

                    for key, items in met.items():
                        global methods
                        if key == "method":
                            methods = items

            if data.tag == "issueBackground":
                global issue_description
                if data.text is None:
                    issue_description = "NA"
                else:
                    issue_description = data.text
            if data.tag == "remediationBackground":
                global issue_remediation
                if data.text is None:
                    issue_remediation = "NA"
                else:
                    issue_remediation = data.text
            if data.tag == "references":
                global issue_reference
                if data.text is None:
                    issue_reference = "NA"
                else:
                    issue_reference = data.text
            if data.tag == "vulnerabilityClassifications":
                global issue_vulnerability_classifications
                if data.text is None:
                    issue_vulnerability_classifications = "NA"
                else:
                    issue_vulnerability_classifications = data.text

        details = (
            str(issue_description)
            + str("\n")
            + str(request_datas)
            + str("\n\n")
            + str(response_datas)
            + str("\n\n")
            + str("\n\n")
            + str(issue_description)
            + str("\n\n")
            + str(issue_vulnerability_classifications)
        )

        if severity == "High":
            vul_col = "danger"
        elif severity == "Medium":
            vul_col = "warning"
        elif severity == "Low":
            vul_col = "info"
        else:
            severity = "Low"
            vul_col = "info"

        vuln_id = uuid.uuid4()

        dup_data = name + host + location + details + severity
        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

        match_dup = (
            WebScanResultsDb.objects.filter(dup_hash=duplicate_hash, scanner="Burp")
            .values("dup_hash")
            .distinct()
        )
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = WebScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash, scanner="Burp"
            )
            fp_lenth_match = len(false_p)

            global false_positive
            if fp_lenth_match == 1:
                false_positive = "Yes"
            elif lenth_match == 0:
                false_positive = "No"
            else:
                false_positive = "No"

            url = host + location

            try:
                data_dump = WebScanResultsDb(
                    scan_id=scan_id,
                    vuln_id=vuln_id,
                    url=url,
                    title=name,
                    solution=issue_remediation,
                    description=details,
                    reference=issue_reference,
                    project_id=project_id,
                    severity_color=vul_col,
                    severity=severity,
                    date_time=date_time,
                    false_positive=false_positive,
                    vuln_status="Open",
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    scanner="Burp",
                )
                data_dump.save()
            except Exception as e:
                print(e)

        else:
            duplicate_vuln = "Yes"

            try:
                data_dump = WebScanResultsDb(
                    scan_id=scan_id,
                    vuln_id=vuln_id,
                    url=url,
                    title=name,
                    solution=issue_remediation,
                    description=issue_description,
                    reference=issue_reference,
                    project_id=project_id,
                    severity_color=vul_col,
                    severity=severity,
                    date_time=date_time,
                    false_positive="Duplicate",
                    vuln_status="Duplicate",
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    scanner="Burp",
                )
                data_dump.save()
            except Exception as e:
                print(e)

    burp_all_vul = WebScanResultsDb.objects.filter(
        scan_id=scan_id, scanner="Burp", false_positive="No"
    )

    duplicate_count = WebScanResultsDb.objects.filter(
        scan_id=scan_id, scanner="Burp", vuln_duplicate="Yes"
    )

    total_vul = len(burp_all_vul)
    total_high = len(burp_all_vul.filter(severity="High"))
    total_medium = len(burp_all_vul.filter(severity="Medium"))
    total_low = len(burp_all_vul.filter(severity="Low"))
    total_info = len(burp_all_vul.filter(severity="Information"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))
    WebScansDb.objects.filter(scan_id=scan_id, scanner="Burp").update(
        scan_url=host,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        total_dup=total_duplicate,
    )
    print(host)
    trend_update()
    subject = "Archery Tool Scan Status - Burp Report Uploaded"
    message = (
        "Burp Scanner has completed the scan "
        "  %s <br> Total: %s <br>High: %s <br>"
        "Medium: %s <br>Low %s" % (host, total_vul, total_high, total_medium, total_low)
    )

    email_sch_notify(subject=subject, message=message)

    try:
        email_notification.email_notify()
    except Exception as e:
        print(e)
    HttpResponse(status=201)
Exemple #16
0
def xml_parser(root, project_id, scan_id, username):
    global url, \
        Scheme, \
        Host, \
        Port, \
        AttackMethod, \
        VulnerableSession, \
        TriggerSession, \
        VulnerabilityID, \
        Severity, \
        Name, \
        ReportSection, \
        HighlightSelections, \
        RawResponse, \
        SectionText, \
        vuln_id, severity_name, vul_col
    date_time = datetime.now()
    for data in root:
        for issues in data:
            for issue in issues:
                if issue.tag == 'URL':
                    url = issue.text

                if issue.tag == 'Host':
                    Host = issue.text

                if issue.tag == 'Port':
                    Port = issue.text

                if issue.tag == 'AttackMethod':
                    AttackMethod = issue.text

                if issue.tag == 'VulnerableSession':
                    VulnerableSession = issue.text

                if issue.tag == 'TriggerSession':
                    TriggerSession = issue.text

                if issue.tag == 'VulnerabilityID':
                    VulnerabilityID = issue.text

                if issue.tag == 'Severity':
                    Severity = issue.text

                if issue.tag == 'Name':
                    Name = issue.text

                if issue.tag == 'ReportSection':
                    ReportSection = issue.text

                if issue.tag == 'HighlightSelections':
                    HighlightSelections = issue.text

                if issue.tag == 'RawResponse':
                    RawResponse = issue.text

                for d_issue in issue:
                    if d_issue.tag == 'SectionText':
                        SectionText = issue.text

                vuln_id = uuid.uuid4()

            if Severity == "4":
                Severity = 'High'
                vul_col = "danger"

            elif Severity == "3":
                Severity = 'High'
                vul_col = 'danger'

            elif Severity == "2":
                Severity = 'Medium'
                vul_col = "warning"

            elif Severity == '1':
                Severity = 'Low'
                vul_col = "info"

            else:
                Severity = 'Low'
                vul_col = "info"

            dup_data = Name + url + Severity
            duplicate_hash = hashlib.sha256(
                dup_data.encode('utf-8')).hexdigest()

            match_dup = webinspect_scan_result_db.objects.filter(
                username=username,
                dup_hash=duplicate_hash).values('dup_hash').distinct()
            lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = 'No'

                false_p = webinspect_scan_result_db.objects.filter(
                    username=username, false_positive_hash=duplicate_hash)
                fp_lenth_match = len(false_p)

                global false_positive
                if fp_lenth_match == 1:
                    false_positive = 'Yes'
                elif lenth_match == 0:
                    false_positive = 'No'
                else:
                    false_positive = 'No'

                if Name is None:
                    print(Name)
                else:
                    dump_data = webinspect_scan_result_db(
                        scan_id=scan_id,
                        vuln_id=vuln_id,
                        vuln_url=url,
                        date_time=date_time,
                        host=Host,
                        port=Port,
                        attackmethod=AttackMethod,
                        vulnerablesession=VulnerableSession,
                        triggerSession=TriggerSession,
                        vulnerabilityID=VulnerabilityID,
                        severity=Severity,
                        name=Name,
                        reportSection=ReportSection,
                        highlightSelections=HighlightSelections,
                        rawResponse=RawResponse,
                        SectionText=SectionText,
                        severity_name=severity_name,
                        vuln_color=vul_col,
                        false_positive=false_positive,
                        vuln_status='Open',
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        project_id=project_id,
                        username=username)
                    dump_data.save()

            else:
                duplicate_vuln = 'Yes'

                dump_data = webinspect_scan_result_db(
                    scan_id=scan_id,
                    vuln_id=vuln_id,
                    vuln_url=url,
                    date_time=date_time,
                    host=Host,
                    port=Port,
                    attackmethod=AttackMethod,
                    vulnerablesession=VulnerableSession,
                    triggerSession=TriggerSession,
                    vulnerabilityID=VulnerabilityID,
                    severity=Severity,
                    name=Name,
                    reportSection=ReportSection,
                    highlightSelections=HighlightSelections,
                    rawResponse=RawResponse,
                    SectionText=SectionText,
                    severity_name=severity_name,
                    vuln_color=vul_col,
                    false_positive='Duplicate',
                    vuln_status='Duplicate',
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    project_id=project_id,
                    username=username)
                dump_data.save()

        webinspect_all_vul = webinspect_scan_result_db.objects.filter(
            username=username, scan_id=scan_id, false_positive='No')

        duplicate_count = webinspect_scan_result_db.objects.filter(
            username=username, scan_id=scan_id, vuln_duplicate='Yes')

        total_critical = len(webinspect_all_vul.filter(severity='Critical'))
        total_high = len(webinspect_all_vul.filter(severity="High"))
        total_medium = len(webinspect_all_vul.filter(severity="Medium"))
        total_low = len(webinspect_all_vul.filter(severity="Low"))
        total_info = len(webinspect_all_vul.filter(severity="Information"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))
        total_vul = total_critical + total_high + total_medium + total_low + total_info

        webinspect_scan_db.objects.filter(username=username,
                                          scan_id=scan_id).update(
                                              total_vul=total_vul,
                                              date_time=date_time,
                                              high_vul=total_high,
                                              medium_vul=total_medium,
                                              low_vul=total_low,
                                              critical_vul=total_critical,
                                              info_vul=total_info,
                                              total_dup=total_duplicate)
    trend_update(username=username)

    subject = 'Archery Tool Scan Status - Webinspect Report Uploaded'
    message = 'Webinspect Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Host, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
Exemple #17
0
def twistlock_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    """
    {
    "results": [
        {
            "id": "sha256:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
            "distro": "Debian GNU/Linux 9 (stretch)",
            "compliances": [
                {
                    "title": "Sensitive information provided in environment variables",
                    "severity": "high",
                    "cause": "The environment variables DD_CELERY_BROKER_PASSWORD,DD_DATABASE_PASSWORD,DD_SECRET_KEY contain sensitive data"
                }
            ],
            "complianceDistribution": {
                "critical": 0,
                "high": 1,
                "medium": 0,
                "low": 0,
                "total": 1
            },
            "vulnerabilities": [
                {
                    "id": "CVE-2013-7459",
                    "cvss": 9.8,
                    "vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
                    "description": "Heap-based buffer overflow in the ALGnew function in block_templace.c in Python Cryptography Toolkit (aka pycrypto) allows remote attackers to execute arbitrary code as demonstrated by a crafted iv parameter to cryptmsg.py.",
                    "severity": "critical",
                    "packageName": "pycrypto",
                    "packageVersion": "2.6.1",
                    "link": "https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-7459",
                    "riskFactors": {
                        "Attack complexity: low": {},
                        "Attack vector: network": {},
                        "Critical severity": {},
                        "Remote execution": {}
                    }
                }
            ],
            "vulnerabilityDistribution": {
                "critical": 1,
                "high": 0,
                "medium": 0,
                "low": 0,
                "total": 1
            }
        }
    ]
    }
    """
    global false_positive
    date_time = datetime.now()
    vul_col = ""

    # Parser for above json data

    vuln = data["results"][0]["vulnerabilities"]

    for vuln_data in vuln:
        try:
            name = vuln_data["id"]
        except Exception as e:
            name = "Not Found"

        try:
            cvss = vuln_data["cvss"]
        except Exception as e:
            cvss = "Not Found"

        try:
            vector = vuln_data["vector"]
        except Exception as e:
            vector = "Not Found"

        try:
            description = vuln_data["description"]
        except Exception as e:
            description = "Not Found"

        try:
            severity = vuln_data["severity"]
            if severity == "critical":
                severity = "High"
        except Exception as e:
            severity = "Not Found"

        try:
            packageName = vuln_data["packageName"]
        except Exception as e:
            packageName = "Not Found"

        try:
            packageVersion = vuln_data["packageVersion"]
        except Exception as e:
            packageVersion = "Not Found"

        try:
            link = vuln_data["link"]
        except Exception as e:
            link = "Not Found"

        if severity == "Critical":
            severity = "High"
            vul_col = "danger"

        if severity == "High":
            vul_col = "danger"

        elif severity == "Medium":
            vul_col = "warning"

        elif severity == "Low":
            vul_col = "info"

        elif severity == "Unknown":
            severity = "Low"
            vul_col = "info"

        elif severity == "Everything else":
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(name) + str(severity) + str(packageName)

        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

        match_dup = StaticScanResultsDb.objects.filter(
            dup_hash=duplicate_hash).values("dup_hash")
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = StaticScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                title=name,
                description=str(description) + "\n\n" + str(cvss) + "\n\n" +
                str(packageVersion),
                severity=severity,
                fileName=packageName,
                references=link,
                scanner="Twistlock",
            )
            save_all.save()
        else:
            duplicate_vuln = "Yes"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Duplicate",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive="Duplicate",
                title=name,
                description=str(description) + "\n\n" + str(cvss) + "\n\n" +
                str(packageVersion),
                severity=severity,
                fileName=packageName,
                references=link,
                scanner="Twistlock",
            )
            save_all.save()

    all_findbugs_data = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                           false_positive="No",
                                                           vuln_duplicate="No")

    duplicate_count = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                         vuln_duplicate="Yes")

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

    StaticScansDb.objects.filter(scan_id=scan_id).update(
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate,
        scanner="Twistlock",
    )
    trend_update()
    subject = "Archery Tool Scan Status - twistlock Report Uploaded"
    message = ("twistlock Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (Target, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
Exemple #18
0
def whitesource_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()

    global vul_col, project
    vuln = data["vulnerabilities"]

    for issues in vuln:
        name = issues["name"]
        severity = issues["severity"]
        score = issues["score"]
        cvss3_severity = issues["cvss3_severity"]
        cvss3_score = issues["cvss3_score"]
        publishDate = issues["publishDate"]
        lastUpdatedDate = issues["lastUpdatedDate"]
        scoreMetadataVector = issues["scoreMetadataVector"]
        url = issues["url"]
        description = issues["description"]
        project = issues["project"]
        product = issues["product"]
        cvss3Attributes = issues["cvss3Attributes"]
        library = issues["library"]
        topFix = issues["topFix"]
        # allFixes = issues['allFixes']
        filename = issues["library"]["filename"]
        sha1 = issues["library"]["sha1"]
        version = issues["library"]["version"]
        groupId = issues["library"]["groupId"]
        if severity == "high":
            severity = "High"
            vul_col = "danger"
        elif severity == "medium":
            severity = "Medium"
            vul_col = "warning"
        elif severity == "low":
            severity = "Low"
            vul_col = "info"
        vul_id = uuid.uuid4()
        dup_data = str(name) + str(severity) + str(project)
        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()
        match_dup = StaticScanResultsDb.objects.filter(dup_hash=duplicate_hash).values(
            "dup_hash"
        )
        lenth_match = len(match_dup)
        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = StaticScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash
            )
            fp_lenth_match = len(false_p)
            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"
            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                title=name,
                severity=severity,
                references=url,
                description=str(description)
                + "\n\n"
                + str(score)
                + "\n\n"
                + str(library)
                + "\n\n"
                + str(topFix)
                + "\n\n",
                fileName=filename,
                scanner="Whitesource",
            )
            save_all.save()

        else:
            duplicate_vuln = "Yes"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Duplicate",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive="Duplicate",
                title=name,
                severity=severity,
                references=url,
                description=str(description)
                + "\n\n"
                + str(score)
                + "\n\n"
                + str(library)
                + "\n\n"
                + str(topFix)
                + "\n\n",
                fileName=filename,
                scanner="Whitesource",
            )
            save_all.save()

    all_findbugs_data = StaticScanResultsDb.objects.filter(
        scan_id=scan_id, false_positive="No"
    )

    duplicate_count = StaticScanResultsDb.objects.filter(
        scan_id=scan_id, vuln_duplicate="Yes"
    )

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

    StaticScansDb.objects.filter(scan_id=scan_id).update(
        project_name=project,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate,
        scanner="Whitesource",
    )
    trend_update()
    subject = "Archery Tool Scan Status - whitesource Report Uploaded"
    message = (
        "whitesource Scanner has completed the scan "
        "  %s <br> Total: %s <br>High: %s <br>"
        "Medium: %s <br>Low %s"
        % ("whitesource", total_vul, total_high, total_medium, total_low)
    )

    email_sch_notify(subject=subject, message=message)
Exemple #19
0
def debcvescan_report_json(data, project_id, scan_id, username):
    """
    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    vul_col = ''

    vuln = data['vulnerabilities']

    for vuln_data in vuln:
        try:
            description = vuln_data['description']
        except Exception as e:
            description = "Not Found"

        if "DISPUTED" in description:
            continue

        try:
            cve = vuln_data['cve']
        except Exception as e:
            cve = "Not Found"

        try:
            severity = vuln_data['severity']
        except Exception as e:
            severity = "Not Found"

        try:
            package = vuln_data['package']
        except Exception as e:
            package = "Not Found"

        try:
            package_ver = vuln_data['installed_version']
        except Exception as e:
            package_ver = "Not Found"

        try:
            fix_ver = vuln_data['fixed_version']
        except Exception as e:
            fix_ver = "Not Found"

        if severity == 3:
            severity = "High"
            vul_col = "danger"

        elif severity == 2:
            vul_col = "warning"
            severity = "Medium"

        elif severity == 1:
            vul_col = "info"
            severity = "Low"

        else:
            continue

        vul_id = uuid.uuid4()

        dup_data = str(cve) + str(severity) + str(package)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = debcvescan_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = debcvescan_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = debcvescan_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                description=description,
                cve=cve,
                package=package,
                package_ver=package_ver,
                fix_ver=fix_ver,
                Severity=severity,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                username=username,
            )
            save_all.save()
        else:
            duplicate_vuln = 'Yes'

            save_all = debcvescan_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                description=description,
                cve=cve,
                package=package,
                package_ver=package_ver,
                fix_ver=fix_ver,
                Severity=severity,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                username=username,
            )
            save_all.save()

    all_findbugs_data = debcvescan_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, false_positive='No')

    duplicate_count = debcvescan_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(Severity="High"))
    total_medium = len(all_findbugs_data.filter(Severity="Medium"))
    total_low = len(all_findbugs_data.filter(Severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    debcvescan_scan_db.objects.filter(scan_id=scan_id).update(
        username=username,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Debian CVE Scan Report Uploaded'
    message = 'Debian CVE Scan has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def nodejsscan_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col, severity
    for vuln in data['sec_issues']:
        for vuln_dat in (data['sec_issues'][vuln]):
            with open(
                    'scanners/scanner_parser/staticscanner_parser/nodejsscan_vuln.json'
            ) as f:
                vuln_name = json.load(f)
                for v in (vuln_name['vuln']):
                    if v['name'] == vuln_dat['title']:
                        severity = v['severity']
            title = vuln_dat['title']
            filename = vuln_dat['filename']
            path = vuln_dat['path']
            sha2 = vuln_dat['sha2']
            tag = vuln_dat['tag']
            description = vuln_dat['description']

            line = vuln_dat['line']
            lines = vuln_dat['lines']

            if severity == "High":
                vul_col = "danger"

            elif severity == 'Medium':
                vul_col = "warning"

            elif severity == 'Low':
                vul_col = "info"

            vul_id = uuid.uuid4()

            dup_data = str(title) + str(severity) + str(filename) + str(line)
            print(dup_data)

            duplicate_hash = hashlib.sha256(
                dup_data.encode('utf-8')).hexdigest()
            print(duplicate_hash)

            match_dup = nodejsscan_scan_results_db.objects.filter(
                username=username, dup_hash=duplicate_hash).values('dup_hash')
            lenth_match = len(match_dup)

            if lenth_match == 0:
                duplicate_vuln = 'No'

                false_p = nodejsscan_scan_results_db.objects.filter(
                    username=username, false_positive_hash=duplicate_hash)
                fp_lenth_match = len(false_p)

                if fp_lenth_match == 1:
                    false_positive = 'Yes'
                else:
                    false_positive = 'No'

                save_all = nodejsscan_scan_results_db(
                    vuln_id=vul_id,
                    scan_id=scan_id,
                    date_time=date_time,
                    project_id=project_id,
                    vul_col=vul_col,
                    vuln_status='Open',
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive=false_positive,
                    title=title,
                    filename=filename,
                    severity=severity,
                    path=path,
                    sha2=sha2,
                    tag=tag,
                    description=description,
                    line=line,
                    lines=lines,
                    username=username,
                )
                save_all.save()

            else:
                duplicate_vuln = 'Yes'

                save_all = nodejsscan_scan_results_db(
                    vuln_id=vul_id,
                    scan_id=scan_id,
                    date_time=date_time,
                    project_id=project_id,
                    vul_col=vul_col,
                    vuln_status='Duplicate',
                    dup_hash=duplicate_hash,
                    vuln_duplicate=duplicate_vuln,
                    false_positive='Duplicate',
                    title=title,
                    filename=filename,
                    severity=severity,
                    path=path,
                    sha2=sha2,
                    tag=tag,
                    description=description,
                    line=line,
                    lines=lines,
                    username=username,
                )
                save_all.save()

        all_findbugs_data = nodejsscan_scan_results_db.objects.filter(
            username=username, scan_id=scan_id, false_positive='No')

        duplicate_count = nodejsscan_scan_results_db.objects.filter(
            username=username, scan_id=scan_id, vuln_duplicate='Yes')

        total_vul = len(all_findbugs_data)
        total_high = len(all_findbugs_data.filter(severity="High"))
        total_medium = len(all_findbugs_data.filter(severity="Medium"))
        total_low = len(all_findbugs_data.filter(severity="Low"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

        nodejsscan_scan_db.objects.filter(username=username,
                                          scan_id=scan_id).update(
                                              total_vul=total_vul,
                                              date_time=date_time,
                                              high_vul=total_high,
                                              medium_vul=total_medium,
                                              low_vul=total_low,
                                              total_dup=total_duplicate)
        trend_update(username=username)
        subject = 'Archery Tool Scan Status - Trivy Report Uploaded'
        message = 'Trivy Scanner has completed the scan ' \
                  '  %s <br> Total: %s <br>High: %s <br>' \
                  'Medium: %s <br>Low %s' % ("Nodejsscan", total_vul, total_high, total_medium, total_low)

        email_sch_notify(subject=subject, message=message)
Exemple #21
0
def brakeman_report_json(data, project_id, scan_id, username):
    """
 
    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    """
    {
    "scan_info": {
        "app_path": "/test_app",
        "rails_version": "4.2.7.1",
        "security_warnings": 5,
        "start_time": "2018-10-23 19:32:28 +0300",
        "end_time": "2018-10-23 19:32:42 +0300",
        "duration": 3.723474664,
        "checks_performed": [
        "BasicAuth",
        "BasicAuthTimingAttack",
        "ContentTag",
        "CreateWith",
        "CrossSiteScripting",
        "DefaultRoutes",
        "Deserialize",
        "DetailedExceptions",
        "DigestDoS",
        "DynamicFinders",
        "EscapeFunction",
        "Evaluation",
        "Execute",
        "FileAccess",
        "FileDisclosure",
        "FilterSkipping",
        "ForgerySetting",
        "HeaderDoS",
        "I18nXSS",
        "JRubyXML",
        "JSONEncoding",
        "JSONParsing",
        "LinkTo",
        "LinkToHref",
        "MailTo",
        "MassAssignment",
        "MimeTypeDoS",
        "ModelAttrAccessible",
        "ModelAttributes",
        "ModelSerialize",
        "NestedAttributes",
        "NestedAttributesBypass",
        "NumberToCurrency",
        "PermitAttributes",
        "QuoteTableName",
        "Redirect",
        "RegexDoS",
        "Render",
        "RenderDoS",
        "RenderInline",
        "ResponseSplitting",
        "RouteDoS",
        "SQL",
        "SQLCVEs",
        "SSLVerify",
        "SafeBufferManipulation",
        "SanitizeMethods",
        "SelectTag",
        "SelectVulnerability",
        "Send",
        "SendFile",
        "SessionManipulation",
        "SessionSettings",
        "SimpleFormat",
        "SingleQuotes",
        "SkipBeforeFilter",
        "StripTags",
        "SymbolDoSCVE",
        "TranslateBug",
        "UnsafeReflection",
        "ValidationRegex",
        "WithoutProtection",
        "XMLDoS",
        "YAMLParsing"
        ],
        "number_of_controllers": 5,
        "number_of_models": 12,
        "number_of_templates": 25,
        "ruby_version": "2.5.1",
        "brakeman_version": "4.3.1"
    },
    "warnings": [
        {
        "warning_type": "Mass Assignment",
        "warning_code": 60,
        "fingerprint": "00a38ca07fd6d6058d0b8664aae5b0ec1e2fd89c59d8d74ee95babab02f6fbdf",
        "check_name": "ModelAttrAccessible",
        "message": "Potentially dangerous attribute available for mass assignment",
        "file": "app/models/test1.rb",
        "line": null,
        "link": "https://brakemanscanner.org/docs/warning_types/mass_assignment/",
        "code": ":test_id",
        "render_path": null,
        "location": {
            "type": "model",
            "model": "Test1"
        },
        "user_input": null,
        "confidence": "Weak"
        },
        {
        "warning_type": "Cross-Site Scripting",
        "warning_code": 2,
        "fingerprint": "00ac2b92111049e24c28fa4f315d962c4e81c21a7bb28d7b205c8a32e99f643d",
        "check_name": "CrossSiteScripting",
        "message": "Unescaped model attribute",
        "file": "app/views/test1.html.erb",
        "line": 88,
        "link": "https://brakemanscanner.org/docs/warning_types/cross_site_scripting",
        "code": "Test::Test.find(params[:id]).name(:test)",
        "render_path": [{"type":"controller","class":"TestController","method":"test_access","line":6,"file":"app/controllers/test1.rb"}],
        "location": {
            "type": "template",
            "template": "test1"
        },
        "user_input": null,
        "confidence": "High"
        },
        {
        "warning_type": "SQL Injection",
        "warning_code": 0,
        "fingerprint": "0c8be6f7618c44181ab46aa9108a3e3624df7f89146349e4de884f5ae2d35a77",
        "check_name": "SQL",
        "message": "Possible SQL injection",
        "file": "app/models/test2.rb",
        "line": 260,
        "link": "https://brakemanscanner.org/docs/warning_types/sql_injection/",
        "code": "where(\"#{column_name} IS NOT NULL\")",
        "render_path": null,
        "location": {
            "type": "method",
            "class": "Test",
            "method": "Test.test_retrieve"
        },
        "user_input": "column_name",
        "confidence": "Medium"
        },
        {
        "warning_type": "Dynamic Render Path",
        "warning_code": 15,
        "fingerprint": "1c1e1a42a8b8bb0ad2b74bd3b91db2dd48f21062b3fe7e96e45be3ea1faa7c43",
        "check_name": "Render",
        "message": "Render path contains parameter value",
        "file": "app/controllers/test_controller.rb",
        "line": 5,
        "link": "https://brakemanscanner.org/docs/warning_types/dynamic_render_path/",
        "code": "render(action => { :json => (...)})",
        "render_path": null,
        "location": {
            "type": "method",
            "class": "TestController",
            "method": "index"
        },
        "user_input": "params[:fields].split(\",\")",
        "confidence": "Weak"
        },
        {
        "warning_type": "Attribute Restriction",
        "warning_code": 19,
        "fingerprint": "29e2c701f167599ce572ead7c3ff377aac1bc0e71834fe5867f10660e9a42de7",
        "check_name": "ModelAttributes",
        "message": "Mass assignment is not restricted using attr_accessible",
        "file": "app/models/test2.rb",
        "line": 2,
        "link": "https://brakemanscanner.org/docs/warning_types/attribute_restriction/",
        "code": null,
        "render_path": null,
        "location": {
            "type": "method",
            "model": "Test2::TestParameter"
        },
        "user_input": null,
        "confidence": "High"
        }
    ],
    "ignored_warnings": [

    ],
    "errors": [

    ],
    "obsolete": [

    ]
    }

    """
    global false_positive
    date_time = datetime.now()
    vul_col = ''

    # Parser for above json data
    # print(data['warnings'])

    vuln = data['warnings']

    for vuln_data in vuln:
        try:
            name = vuln_data['warning_type']
        except Exception as e:
            name = "Not Found"

        try:
            warning_code = vuln_data['warning_code']
        except Exception as e:
            warning_code = "Not Found"

        try:
            fingerprint = vuln_data['fingerprint']
        except Exception as e:
            fingerprint = "Not Found"

        try:
            description = vuln_data['message']
        except Exception as e:
            description = "Not Found"

        try:
            check_name = vuln_data['check_name']
        except Exception as e:
            check_name = "Not Found"

        try:
            severity = vuln_data['confidence']
            if severity == 'Weak':
                severity = 'Low'
        except Exception as e:
            severity = "Not Found"

        try:
            file = vuln_data['file']
        except Exception as e:
            file = "Not Found"

        try:
            line = vuln_data['line']
        except Exception as e:
            line = "Not Found"

        try:
            link = vuln_data['link']
        except Exception as e:
            link = "Not Found"

        try:
            code = vuln_data['code']
        except Exception as e:
            code = "Not Found"

        try:
            render_path = vuln_data['render_path']
        except Exception as e:
            render_path = "Not Found"

        if severity == "Critical":
            severity = 'High'
            vul_col = "danger"

        if severity == "High":
            vul_col = "danger"

        elif severity == 'Medium':
            vul_col = "warning"

        elif severity == 'Low':
            vul_col = "info"

        elif severity == 'Unknown':
            severity = "Low"
            vul_col = "info"

        elif severity == 'Everything else':
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(name) + str(severity) + str(file)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = brakeman_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = brakeman_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = brakeman_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                username=username,
                name=name,
                warning_code=warning_code,
                description=description,
                severity=severity,
                file=file,
                check_name=check_name,
                fingerprint=fingerprint,
                line=line,
                code=code,
                render_path=render_path,
                link=link,
            )
            save_all.save()
        else:
            duplicate_vuln = 'Yes'

            save_all = brakeman_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                username=username,
                name=name,
                warning_code=warning_code,
                description=description,
                severity=severity,
                file=file,
                check_name=check_name,
                fingerprint=fingerprint,
                line=line,
                code=code,
                render_path=render_path,
                link=link,
            )
            save_all.save()

    all_findbugs_data = brakeman_scan_results_db.objects.filter(
        username=username,
        scan_id=scan_id,
        false_positive='No',
        vuln_duplicate='No')

    duplicate_count = brakeman_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    brakeman_scan_db.objects.filter(scan_id=scan_id).update(
        username=username,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - brakeman Report Uploaded'
    message = 'brakeman Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def gitlabsast_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    vul_col = ""

    vuln = data["vulnerabilities"]

    for vuln_data in vuln:

        try:
            name = vuln_data["message"]
        except Exception as e:
            name = "Not Found"

        try:
            description = vuln_data["description"]
        except Exception as e:
            description = "Not Found"

        try:
            cve = vuln_data["cve"]
        except Exception as e:
            cve = "Not Found"

        try:
            scanner = vuln_data["scanner"]
        except Exception as e:
            scanner = "Not Found"

        try:
            location = vuln_data["location"]
        except Exception as e:
            location = "Not Found"

        try:
            identifiers = vuln_data["identifiers"]
        except Exception as e:
            identifiers = "Not Found"

        try:
            severity = vuln_data["severity"]
        except Exception as e:
            severity = "Not Found"

        try:
            file = vuln_data["location"]["file"]
        except Exception as e:
            file = "Not Found"

        if severity == "Critical":
            severity = "High"
            vul_col = "danger"

        if severity == "High":
            vul_col = "danger"

        elif severity == "Medium":
            vul_col = "warning"

        elif severity == "Low":
            vul_col = "info"

        elif severity == "Unknown":
            severity = "Low"
            vul_col = "info"

        elif severity == "Everything else":
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(name) + str(severity) + str(file)

        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

        match_dup = StaticScanResultsDb.objects.filter(dup_hash=duplicate_hash).values(
            "dup_hash"
        )
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = StaticScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash
            )
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                title=name,
                description=str(description) + "\n\n" + str(scanner),
                filePath=location,
                fileName=file,
                severity=severity,
                severity_color=vul_col,
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                scanner="Gitlabsast",
            )
            save_all.save()
        else:
            duplicate_vuln = "Yes"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                title=name,
                description=description,
                filePath=location,
                fileName=file,
                severity=severity,
                severity_color=vul_col,
                vuln_status="Duplicate",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive="Duplicate",
                scanner="Gitlabsast",
            )
            save_all.save()

    all_findbugs_data = StaticScanResultsDb.objects.filter(
        scan_id=scan_id, false_positive="No", vuln_duplicate="No"
    )

    duplicate_count = StaticScanResultsDb.objects.filter(
        scan_id=scan_id, vuln_duplicate="Yes"
    )

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

    StaticScansDb.objects.filter(scan_id=scan_id).update(
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate,
        scanner="Gitlabsast",
    )
    trend_update()
    subject = "Archery Tool Scan Status - GitLab SAST Report Uploaded"
    message = (
        "GitLab SAST Scanner has completed the scan "
        "  %s <br> Total: %s <br>High: %s <br>"
        "Medium: %s <br>Low %s"
        % (Target, total_vul, total_high, total_medium, total_low)
    )

    email_sch_notify(subject=subject, message=message)
Exemple #23
0
def npmaudit_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global vul_col
    for vuln in data["advisories"]:
        title = data["advisories"][vuln]["title"]
        found_by = data["advisories"][vuln]["found_by"]
        reported_by = data["advisories"][vuln]["reported_by"]
        module_name = data["advisories"][vuln]["module_name"]
        cves = data["advisories"][vuln]["cves"]
        vulnerable_versions = data["advisories"][vuln]["vulnerable_versions"]
        patched_versions = data["advisories"][vuln]["patched_versions"]
        overview = data["advisories"][vuln]["overview"]
        recommendation = data["advisories"][vuln]["recommendation"]
        references = data["advisories"][vuln]["references"]
        access = data["advisories"][vuln]["access"]
        severity = data["advisories"][vuln]["severity"]
        cwe = data["advisories"][vuln]["cwe"]
        metadata = data["advisories"][vuln]["metadata"]
        url = data["advisories"][vuln]["url"]

        findings = data["advisories"][vuln]["findings"]
        vuln_versions = {}
        for find in findings:
            vuln_versions[find["version"]] = [find["paths"]]

        if not title:
            title = "not found"
        if not found_by:
            found_by = "not found"
        if not reported_by:
            reported_by = "not found"
        if not module_name:
            module_name = "not found"
        if not cves:
            cves = "not found"
        if not vulnerable_versions:
            vulnerable_versions = "not found"
        if not patched_versions:
            patched_versions = "not found"
        if not recommendation:
            recommendation = "not found"
        if not overview:
            overview = "not found"
        if not references:
            references = "not found"
        if not access:
            access = "not found"
        if not severity:
            severity = "not found"
        if not cwe:
            cwe = "not found"
        if not url:
            url = "not found"

        if severity == "critical":
            severity = "High"
            vul_col = "danger"

        if severity == "high":
            severity = "High"
            vul_col = "danger"

        elif severity == "moderate":
            severity = "Medium"
            vul_col = "warning"

        elif severity == "low":
            severity = "Low"
            vul_col = "info"

        elif severity == "info":
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(title) + str(severity) + str(module_name)

        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

        match_dup = StaticScanResultsDb.objects.filter(
            dup_hash=duplicate_hash).values("dup_hash")
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = StaticScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = "Yes"
            else:
                false_positive = "No"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                date_time=date_time,
                scan_id=scan_id,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                title=title,
                description=str(overview) + "\n\n" + str(vuln_versions) +
                "\n\n" + str(reported_by) + "\n\n" + str(module_name) +
                "\n\n" + str(cves) + "\n\n" + str(vuln_versions) + "\n\n" +
                str(patched_versions),
                solution=recommendation,
                references=references,
                severity=severity,
                scanner="Npmaudit",
            )
            save_all.save()

        else:
            duplicate_vuln = "Yes"

            save_all = StaticScanResultsDb(
                vuln_id=vul_id,
                date_time=date_time,
                scan_id=scan_id,
                project_id=project_id,
                severity_color=vul_col,
                vuln_status="Duplicate",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive="Duplicate",
                title=title,
                description=str(overview) + "\n\n" + str(vuln_versions) +
                "\n\n" + str(reported_by) + "\n\n" + str(module_name) +
                "\n\n" + str(cves) + "\n\n" + str(vuln_versions) + "\n\n" +
                str(patched_versions),
                solution=recommendation,
                references=references,
                severity=severity,
                scanner="Npmaudit",
            )
            save_all.save()

    all_findbugs_data = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                           false_positive="No")

    duplicate_count = StaticScanResultsDb.objects.filter(scan_id=scan_id,
                                                         vuln_duplicate="Yes")

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(severity="High"))
    total_medium = len(all_findbugs_data.filter(severity="Medium"))
    total_low = len(all_findbugs_data.filter(severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

    StaticScansDb.objects.filter(scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate,
        scanner="Npmaudit",
    )
    trend_update()
    subject = "Archery Tool Scan Status - Npmaudit Report Uploaded"
    message = ("Npmaudit Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               ("npm-audit", total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id):
    global vuln_url, vuln_type, vuln_severity, vuln_certainty, vuln_rawrequest, vuln_rawresponse, vuln_extrainformation, vuln_classification, vuln_id, vul_col, description, impact, actionsToTake, remedy, requiredSkillsForExploitation, externalReferences, remedyReferences, proofOfConcept, proofs, target
    date_time = datetime.now()
    for data in root:
        if data.tag == "target":
            for url in data:
                if url.tag == "url":
                    target = url.text
        for vuln in data:

            if vuln.tag == "url":
                vuln_url = vuln.text

            if vuln.tag == "type":
                vuln_type = vuln.text

            if vuln.tag == "severity":
                if vuln.text == "Important":
                    vuln_severity = "High"
                else:
                    vuln_severity = vuln.text

            if vuln.tag == "certainty":
                vuln_certainty = vuln.text

            if vuln.tag == "rawrequest":
                vuln_rawrequest = vuln.text

            if vuln.tag == "rawresponse":
                vuln_rawresponse = vuln.text

            if vuln.tag == "extrainformation":
                vuln_extrainformation = vuln.text

            if vuln.tag == "classification":
                vuln_classification = vuln.text

            if vuln.tag == "description":
                description = vuln.text

            if vuln.tag == "impact":
                impact = vuln.text

            if vuln.tag == "actionsToTake":
                actionsToTake = vuln.text

            if vuln.tag == "remedy":
                remedy = vuln.text

            if vuln.tag == "requiredSkillsForExploitation":
                requiredSkillsForExploitation = vuln.text

            if vuln.tag == "externalReferences":
                externalReferences = vuln.text

            if vuln.tag == "remedyReferences":
                remedyReferences = vuln.text

            if vuln.tag == "proofOfConcept":
                proofOfConcept = vuln.text

            if vuln.tag == "proofs":
                proofs = vuln.text

        vuln_id = uuid.uuid4()

        if vuln_severity == "Critical":
            vuln_severity = "High"
            vul_col = "danger"

        elif vuln_severity == "High":
            vul_col = "danger"

        elif vuln_severity == "Medium":
            vul_col = "warning"

        elif vuln_severity == "Low":
            vul_col = "info"

        else:
            vuln_severity = "Low"
            vul_col = "info"

        dup_data = str(vuln_type) + str(vuln_url) + str(vuln_severity)
        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()
        match_dup = (WebScanResultsDb.objects.filter(
            dup_hash=duplicate_hash).values("dup_hash").distinct())
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = "No"

            false_p = WebScanResultsDb.objects.filter(
                false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            global false_positive
            if fp_lenth_match == 1:
                false_positive = "Yes"
            elif lenth_match == 0:
                false_positive = "No"
            else:
                false_positive = "No"

            dump_data = WebScanResultsDb(
                scan_id=scan_id,
                project_id=project_id,
                date_time=date_time,
                vuln_id=vuln_id,
                title=vuln_type,
                url=vuln_url,
                severity=vuln_severity,
                false_positive=false_positive,
                severity_color=vul_col,
                description=description,
                solution=str(remedy) + "\n\n" + str(actionsToTake),
                reference=str(externalReferences) + "\n\n" +
                str(remedyReferences),
                vuln_status="Open",
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                scanner="Netsparker",
            )
            dump_data.save()
        else:
            duplicate_vuln = "Yes"

            dump_data = WebScanResultsDb(
                scan_id=scan_id,
                project_id=project_id,
                date_time=date_time,
                vuln_id=vuln_id,
                url=vuln_url,
                title=vuln_type,
                severity=vuln_severity,
                false_positive="Duplicate",
                vuln_status="Duplicate",
                severity_color=vul_col,
                description=description,
                solution=remedy + "\n\n" + str(actionsToTake),
                reference=externalReferences + "\n\n" + str(remedyReferences),
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                scanner="Netsparker",
            )
            dump_data.save()

    netsparker_all_vul = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                         false_positive="No",
                                                         scanner="Netsparker")
    duplicate_count = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                      vuln_duplicate="Yes",
                                                      scanner="Netsparker")

    total_high = len(netsparker_all_vul.filter(severity="High"))
    total_medium = len(netsparker_all_vul.filter(severity="Medium"))
    total_low = len(netsparker_all_vul.filter(severity="Low"))
    total_info = len(netsparker_all_vul.filter(severity="Information"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))
    total_vul = total_high + total_medium + total_low + total_info

    WebScansDb.objects.filter(scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        total_dup=total_duplicate,
        scan_url=target,
        scanner="Netsparker",
    )
    trend_update()
    subject = "Archery Tool Scan Status - Netsparker Report Uploaded"
    message = ("Netsparker Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (target, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
Exemple #25
0
def xml_parser(root,
               project_id,
               scan_id, username):
    global vuln_url, vuln_type, vuln_severity, vuln_certainty, vuln_rawrequest, \
        vuln_rawresponse, vuln_extrainformation, vuln_classification, vuln_id, \
        vul_col, description, impact, actionsToTake, remedy, requiredSkillsForExploitation, \
        externalReferences, remedyReferences, proofOfConcept, proofs
    date_time = datetime.now()
    for data in root:
        if data.tag == "target":
            for url in data:
                if url.tag == 'url':
                    target = url.text
        for vuln in data:

            if vuln.tag == 'url':
                vuln_url = vuln.text

            if vuln.tag == 'type':
                vuln_type = vuln.text

            if vuln.tag == 'severity':
                if vuln.text == 'Important':
                    vuln_severity = 'High'
                else:
                    vuln_severity = vuln.text

            if vuln.tag == 'certainty':
                vuln_certainty = vuln.text

            if vuln.tag == 'rawrequest':
                vuln_rawrequest = vuln.text

            if vuln.tag == 'rawresponse':
                vuln_rawresponse = vuln.text

            if vuln.tag == 'extrainformation':
                vuln_extrainformation = vuln.text

            if vuln.tag == 'classification':
                vuln_classification = vuln.text

            if vuln.tag == 'description':
                description = vuln.text

            if vuln.tag == 'impact':
                impact = vuln.text

            if vuln.tag == 'actionsToTake':
                actionsToTake = vuln.text

            if vuln.tag == 'remedy':
                remedy = vuln.text

            if vuln.tag == 'requiredSkillsForExploitation':
                requiredSkillsForExploitation = vuln.text

            if vuln.tag == 'externalReferences':
                externalReferences = vuln.text

            if vuln.tag == 'remedyReferences':
                remedyReferences = vuln.text

            if vuln.tag == 'proofOfConcept':
                proofOfConcept = vuln.text

            if vuln.tag == 'proofs':
                proofs = vuln.text

            vuln_id = uuid.uuid4()

        if vuln_severity == "Critical":
            vuln_severity = "High"
            vul_col = "danger"

        elif vuln_severity == "High":
            vul_col = 'danger'

        elif vuln_severity == 'Medium':
            vul_col = "warning"

        elif vuln_severity == 'Low':
            vul_col = "info"

        else:
            vuln_severity = "Low"
            vul_col = "info"

        dup_data = str(vuln_type) + str(vuln_url) + str(vuln_severity)
        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()
        match_dup = netsparker_scan_result_db.objects.filter(username=username,
                                                             dup_hash=duplicate_hash).values('dup_hash').distinct()
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = netsparker_scan_result_db.objects.filter(username=username,
                                                               false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            global false_positive
            if fp_lenth_match == 1:
                false_positive = 'Yes'
            elif lenth_match == 0:
                false_positive = 'No'
            else:
                false_positive = 'No'

            dump_data = netsparker_scan_result_db(scan_id=scan_id,
                                                  project_id=project_id,
                                                  date_time=date_time,
                                                  vuln_id=vuln_id,
                                                  vuln_url=vuln_url,
                                                  type=vuln_type,
                                                  severity=vuln_severity,
                                                  certainty=vuln_certainty,
                                                  rawrequest=vuln_rawrequest,
                                                  rawresponse=vuln_rawresponse,
                                                  extrainformation=vuln_extrainformation,
                                                  classification=vuln_classification,
                                                  false_positive=false_positive,
                                                  vuln_color=vul_col,
                                                  description=description,
                                                  impact=impact,
                                                  actionsToTake=actionsToTake,
                                                  remedy=remedy,
                                                  requiredSkillsForExploitation=requiredSkillsForExploitation,
                                                  externalReferences=externalReferences,
                                                  remedyReferences=remedyReferences,
                                                  proofOfConcept=proofOfConcept,
                                                  proofs=proofs,
                                                  vuln_status='Open',
                                                  dup_hash=duplicate_hash,
                                                  vuln_duplicate=duplicate_vuln,
                                                  username=username
                                                  )
            dump_data.save()

        else:
            duplicate_vuln = 'Yes'

            dump_data = netsparker_scan_result_db(scan_id=scan_id,
                                                  project_id=project_id,
                                                  vuln_id=vuln_id,
                                                  date_time=date_time,
                                                  vuln_url=vuln_url,
                                                  type=vuln_type,
                                                  severity=vuln_severity,
                                                  certainty=vuln_certainty,
                                                  rawrequest=vuln_rawrequest,
                                                  rawresponse=vuln_rawresponse,
                                                  extrainformation=vuln_extrainformation,
                                                  classification=vuln_classification,
                                                  false_positive='Duplicate',
                                                  vuln_color=vul_col,
                                                  description=description,
                                                  impact=impact,
                                                  actionsToTake=actionsToTake,
                                                  remedy=remedy,
                                                  requiredSkillsForExploitation=requiredSkillsForExploitation,
                                                  externalReferences=externalReferences,
                                                  remedyReferences=remedyReferences,
                                                  proofOfConcept=proofOfConcept,
                                                  proofs=proofs,
                                                  vuln_status='Duplicate',
                                                  dup_hash=duplicate_hash,
                                                  vuln_duplicate=duplicate_vuln,
                                                  username=username
                                                  )
            dump_data.save()

    netsparker_all_vul = netsparker_scan_result_db.objects.filter(username=username, scan_id=scan_id,
                                                                  false_positive='No')
    duplicate_count = netsparker_scan_result_db.objects.filter(username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_critical = len(netsparker_all_vul.filter(severity='Critical'))
    total_high = len(netsparker_all_vul.filter(severity="High"))
    total_medium = len(netsparker_all_vul.filter(severity="Medium"))
    total_low = len(netsparker_all_vul.filter(severity="Low"))
    total_info = len(netsparker_all_vul.filter(severity="Information"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))
    total_vul = total_critical + total_high + total_medium + total_low + total_info

    netsparker_scan_db.objects.filter(username=username, scan_id=scan_id).update(total_vul=total_vul,
                                                                                 date_time=date_time,
                                                                                 high_vul=total_high,
                                                                                 medium_vul=total_medium,
                                                                                 low_vul=total_low,
                                                                                 critical_vul=total_critical,
                                                                                 info_vul=total_info,
                                                                                 total_dup=total_duplicate,
                                                                                 url=target
                                                                                 )

    if total_vul == total_duplicate:
        netsparker_scan_db.objects.filter(username=username, scan_id=scan_id).update(total_vul=total_vul,
                                                                                     high_vul=total_high,
                                                                                     date_time=date_time,
                                                                                     medium_vul=total_medium,
                                                                                     low_vul=total_low,
                                                                                     critical_vul=total_critical,
                                                                                     info_vul=total_info,
                                                                                     total_dup=total_duplicate,
                                                                                     url=target
                                                                                     )
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - Netsparker Report Uploaded'
    message = 'Netsparker Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
Exemple #26
0
def retirejs_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global component, files, severity
    for f in data:
        files = f["file"]

        for components in data:

            component = components["results"][0]["component"]

        for versions in data:
            global version
            version = versions["results"][0]["version"]

        for vuln in data:
            global identifires
            identifires = vuln["results"][0]["vulnerabilities"][0][
                "identifiers"]
            for key, value in identifires.items():
                if key == "CVE":
                    for cve_v in value:
                        global cve
                        cve = cve_v
                if key == "issue":
                    global issue
                    issue = value
                if key == "bug":
                    global bug
                    bug = value
                if key == "summary":
                    global summary
                    summary = value
        for infos in data:
            global info
            info = infos["results"][0]["vulnerabilities"][0]["info"]

        for severities in data:
            global severity
            severity = severities["results"][0]["vulnerabilities"][0][
                "severity"]

        date_time = datetime.now()
        vul_id = uuid.uuid4()

        global vul_col
        if severity == "HIGH":
            vul_col = "danger"

        elif severity == "MEDIUM":
            vul_col = "warning"

        elif severity == "LOW":
            vul_col = "info"

        dup_data = files + component + severity
        duplicate_hash = hashlib.sha256(dup_data.encode("utf-8")).hexdigest()

        match_dup = (StaticScanResultsDb.objects.filter(
            dup_hash=duplicate_hash).values("dup_hash").distinct())
        lenth_match = len(match_dup)

        if lenth_match == 1:
            duplicate_vuln = "Yes"
        elif lenth_match == 0:
            duplicate_vuln = "No"
        else:
            duplicate_vuln = "None"

        false_p = StaticScanResultsDb.objects.filter(
            false_positive_hash=duplicate_hash)
        fp_lenth_match = len(false_p)

        if fp_lenth_match == 1:
            false_positive = "Yes"
        else:
            false_positive = "No"
        save_all = StaticScanResultsDb(
            scan_id=scan_id,
            date_time=date_time,
            scan_date=date_time,
            project_id=project_id,
            vuln_id=vul_id,
            fileName=files,
            # component=component,
            # CVE=cve,
            title=issue,
            # bug=bug,
            description=summary,
            # info=info,
            severity=severity,
            # false_positive=false_positive,
            vuln_status="Open",
            # dup_hash=duplicate_hash,
            # vuln_duplicate=duplicate_vuln,
            # version=version,
            scanner="Retirejs",
        )
        save_all.save()
        trend_update()
Exemple #27
0
def gitlabsast_report_json(data, project_id, scan_id, username):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    vul_col = ''

    vuln = data['vulnerabilities']

    for vuln_data in vuln:
        try:
            name = vuln_data['name']
        except Exception as e:
            name = "Not Found"

        try:
            message = vuln_data['message']
        except Exception as e:
            message = "Not Found"

        try:
            description = vuln_data['description']
        except Exception as e:
            description = "Not Found"

        try:
            cve = vuln_data['cve']
        except Exception as e:
            cve = "Not Found"

        try:
            scanner = vuln_data['scanner']
        except Exception as e:
            scanner = "Not Found"

        try:
            location = vuln_data['location']
        except Exception as e:
            location = "Not Found"

        try:
            identifiers = vuln_data['identifiers']
        except Exception as e:
            identifiers = "Not Found"

        try:
            severity = vuln_data['severity']
        except Exception as e:
            severity = "Not Found"

        try:
            file = vuln_data['location']['file']
        except Exception as e:
            file = "Not Found"

        if severity == "Critical":
            severity = 'High'
            vul_col = "danger"

        if severity == "High":
            vul_col = "danger"

        elif severity == 'Medium':
            vul_col = "warning"

        elif severity == 'Low':
            vul_col = "info"

        elif severity == 'Unknown':
            severity = "Low"
            vul_col = "info"

        elif severity == 'Everything else':
            severity = "Low"
            vul_col = "info"

        vul_id = uuid.uuid4()

        dup_data = str(message) + str(severity) + str(file)

        duplicate_hash = hashlib.sha256(dup_data.encode('utf-8')).hexdigest()

        match_dup = gitlabsast_scan_results_db.objects.filter(
            username=username, dup_hash=duplicate_hash).values('dup_hash')
        lenth_match = len(match_dup)

        if lenth_match == 0:
            duplicate_vuln = 'No'

            false_p = gitlabsast_scan_results_db.objects.filter(
                username=username, false_positive_hash=duplicate_hash)
            fp_lenth_match = len(false_p)

            if fp_lenth_match == 1:
                false_positive = 'Yes'
            else:
                false_positive = 'No'

            save_all = gitlabsast_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                date_time=date_time,
                project_id=project_id,
                name=name,
                message=message,
                description=description,
                cve=cve,
                gl_scanner=scanner,
                location=location,
                file=file,
                Severity=severity,
                identifiers=identifiers,
                vul_col=vul_col,
                vuln_status='Open',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive=false_positive,
                username=username,
            )
            save_all.save()
        else:
            duplicate_vuln = 'Yes'

            save_all = gitlabsast_scan_results_db(
                vuln_id=vul_id,
                scan_id=scan_id,
                project_id=project_id,
                date_time=date_time,
                name=name,
                message=message,
                description=description,
                cve=cve,
                gl_scanner=scanner,
                location=location,
                file=file,
                Severity=severity,
                identifiers=identifiers,
                vul_col=vul_col,
                vuln_status='Duplicate',
                dup_hash=duplicate_hash,
                vuln_duplicate=duplicate_vuln,
                false_positive='Duplicate',
                username=username,
            )
            save_all.save()

    all_findbugs_data = gitlabsast_scan_results_db.objects.filter(
        username=username,
        scan_id=scan_id,
        false_positive='No',
        vuln_duplicate='No')

    duplicate_count = gitlabsast_scan_results_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_vul = len(all_findbugs_data)
    total_high = len(all_findbugs_data.filter(Severity="High"))
    total_medium = len(all_findbugs_data.filter(Severity="Medium"))
    total_low = len(all_findbugs_data.filter(Severity="Low"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))

    gitlabsast_scan_db.objects.filter(scan_id=scan_id).update(
        username=username,
        date_time=date_time,
        total_vul=total_vul,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        total_dup=total_duplicate)
    trend_update(username=username)
    subject = 'Archery Tool Scan Status - GitLab SAST Report Uploaded'
    message = 'GitLab SAST Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (Target, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)
def xml_parser(root, project_id, scan_id):
    """

    :param root:
    :param project_id:
    :param scan_id:
    :return:
    """
    date_time = datetime.now()
    global ScanName, ScanShortName, ScanStartURL, ScanStartTime, ScanFinishTime, ScanScanTime, ScanAborted, ScanResponsive, ScanResponsive, ScanBanner, ScanOs, ScanWebServer, ScanTechnologies, ScanCrawler, ScanReportItems, VulnName, VulnModuleName, VulnDetails, VulnAffects, VulnParameter, VulnAOP_SourceFile, VulnAOP_SourceLine, VulnAOP_Additional, VulnIsFalsePositive, VulnSeverity, VulnType, VulnImpact, VulnDescription, VulnDetailedInformation, VulnRecommendation, VulnTechnicalDetails, VulnCWEList, VulnCVEList, VulnCVSS, VulnCVSS3, VulnReferences, vul_col, risk, UriName, VulnUrl, FullURL

    for scan in root:
        for reports in scan:
            if reports.tag == "Name":
                ScanName = reports.text

            if reports.tag == "ShortName":
                ScanShortName = reports.text
            if reports.tag == "StartURL":
                ScanStartURL = reports.text

            if reports.tag == "StartTime":
                ScanStartTime = reports.text
            if reports.tag == "FinishTime":
                ScanFinishTime = reports.text
            if reports.tag == "ScanTime":
                ScanScanTime = reports.text
            if reports.tag == "Aborted":
                ScanAborted = reports.text
            if reports.tag == "Responsive":
                ScanResponsive = reports.text
            if reports.tag == "Banner":
                ScanBanner = reports.text
            if reports.tag == "Os":
                ScanOs = reports.text
            if reports.tag == "WebServer":
                ScanWebServer = reports.text
            if reports.tag == "Technologies":
                ScanTechnologies = reports.text
            if reports.tag == "Crawler":
                ScanCrawler = reports.text
            if reports.tag == "ReportItems":
                ScanReportItems = reports.text
            if reports.tag == "ReportItems":
                ScanReportItems = reports.text
            for report_item in reports:
                for ReportItem in report_item:
                    # print ReportItem
                    # print(ReportItem.tag)

                    if ReportItem.tag == "Name":
                        VulnName = ReportItem.text

                    if ReportItem.tag == "ModuleName":
                        VulnModuleName = ReportItem.text

                    if ReportItem.tag == "Details":
                        VulnDetails = ReportItem.text

                    if ReportItem.tag == "Affects":
                        VulnAffects = ScanStartURL + ReportItem.text

                    if ReportItem.tag == "Parameter":
                        VulnParameter = ReportItem.text

                    if ReportItem.tag == "AOP_SourceFile":
                        VulnAOP_SourceFile = ReportItem.text

                    if ReportItem.tag == "AOP_SourceLine":
                        VulnAOP_SourceLine = ReportItem.text

                    if ReportItem.tag == "AOP_Additional":
                        VulnAOP_Additional = ReportItem.text

                    if ReportItem.tag == "IsFalsePositive":
                        VulnIsFalsePositive = ReportItem.text

                    if ReportItem.tag == "Severity":
                        VulnSeverity = ReportItem.text

                    if ReportItem.tag == "Type":
                        VulnType = ReportItem.text

                    if ReportItem.tag == "Impact":
                        VulnImpact = ReportItem.text

                    if ReportItem.tag == "Description":
                        VulnDescription = ReportItem.text

                    if ReportItem.tag == "DetailedInformation":
                        VulnDetailedInformation = ReportItem.text

                    if ReportItem.tag == "Recommendation":
                        VulnRecommendation = ReportItem.text

                    if ReportItem.tag == "TechnicalDetails":
                        VulnTechnicalDetails = ReportItem.text

                    if ReportItem.tag == "CWEList":
                        VulnCWEList = ReportItem.text

                    if ReportItem.tag == "CVEList":
                        VulnCVEList = ReportItem.text

                    if ReportItem.tag == "CVSS":
                        VulnCVSS = ReportItem.text

                    if ReportItem.tag == "CVSS3":
                        VulnCVSS3 = ReportItem.text

                    if ReportItem.tag == "References":
                        VulnReferences = ReportItem.text

                    if VulnSeverity == "high":
                        vul_col = "danger"
                        risk = "High"
                    elif VulnSeverity == "medium":
                        vul_col = "warning"
                        risk = "Medium"
                    elif VulnSeverity == "low":
                        vul_col = "info"
                        risk = "Low"
                    else:
                        vul_col = "info"
                        risk = "Low"

                if VulnName is None:
                    print(VulnName)
                else:
                    for c_url in root.findall(".//SiteFile"):
                        for vuln_url in c_url:
                            if vuln_url.tag == "Name":
                                UriName = vuln_url.text
                            if vuln_url.tag == "URL":
                                VulnUrl = vuln_url.text
                            if vuln_url.tag == "FullURL":
                                FullURL = vuln_url.text

                    vuln_id = uuid.uuid4()
                    dup_data = VulnName + FullURL + risk
                    duplicate_hash = hashlib.sha256(
                        dup_data.encode("utf-8")).hexdigest()

                    match_dup = (WebScanResultsDb.objects.filter(
                        dup_hash=duplicate_hash).values("dup_hash").distinct())
                    lenth_match = len(match_dup)

                    if lenth_match == 0:
                        duplicate_vuln = "No"

                        false_p = WebScanResultsDb.objects.filter(
                            false_positive_hash=duplicate_hash)
                        fp_lenth_match = len(false_p)

                        if fp_lenth_match == 1:
                            false_positive = "Yes"
                        else:
                            false_positive = "No"

                        dump_data = WebScanResultsDb(
                            vuln_id=vuln_id,
                            scan_id=scan_id,
                            url=FullURL,
                            title=VulnName,
                            description=str(VulnDescription) +
                            str(VulnDetails) + str(VulnTechnicalDetails),
                            instance=str(VulnParameter) + str(VulnAffects),
                            reference=VulnReferences,
                            project_id=project_id,
                            severity_color=vul_col,
                            severity=risk,
                            date_time=date_time,
                            false_positive=false_positive,
                            vuln_status="Open",
                            dup_hash=duplicate_hash,
                            vuln_duplicate=duplicate_vuln,
                            scanner="Acunetix",
                        )
                        dump_data.save()

                    else:
                        duplicate_vuln = "Yes"

                        dump_data = WebScanResultsDb(
                            vuln_id=vuln_id,
                            scan_id=scan_id,
                            url=VulnUrl,
                            title=VulnName,
                            description=str(VulnDescription) +
                            str(VulnDetails) + str(VulnTechnicalDetails),
                            instance=str(VulnParameter) + str(VulnAffects),
                            reference=VulnReferences,
                            project_id=project_id,
                            severity_color=vul_col,
                            severity=risk,
                            date_time=date_time,
                            false_positive="Duplicate",
                            vuln_status="Duplicate",
                            dup_hash=duplicate_hash,
                            vuln_duplicate=duplicate_vuln,
                            scanner="Acunetix",
                        )
                        dump_data.save()

    acunetix_all_vul = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                       false_positive="No")

    duplicate_count = WebScanResultsDb.objects.filter(scan_id=scan_id,
                                                      vuln_duplicate="Yes")

    total_high = len(acunetix_all_vul.filter(severity="High"))
    total_medium = len(acunetix_all_vul.filter(severity="Medium"))
    total_low = len(acunetix_all_vul.filter(severity="Low"))
    total_info = len(acunetix_all_vul.filter(severity="Informational"))
    total_duplicate = len(duplicate_count.filter(severity="Yes"))
    total_vul = total_high + total_medium + total_low + total_info

    # cal_total_vuln = total_high + total_medium + total_low + total_info

    WebScansDb.objects.filter(scan_id=scan_id).update(
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        total_dup=total_duplicate,
        scan_url=ScanStartURL,
    )
    trend_update()
    subject = "Archery Tool Scan Status - Acunetix Report Uploaded"
    message = ("Acunetix Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (ScanStartURL, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
def bandit_report_json(data, project_id, scan_id):
    """

    :param data:
    :param project_id:
    :param scan_id:
    :return:
    """
    global vul_col, issue_severity, test_name, filename, line_number, code, issue_confidence, line_range, test_id, issue_text, more_info, total_vul, total_high, total_medium, total_low
    for key, items in data.items():
        if key == "results":
            for res in items:
                for key, value in res.items():
                    if key == "line_number":
                        global line_number
                        if value is None:
                            line_number = "NA"
                        else:
                            line_number = value
                    if key == "code":
                        global code
                        if value is None:
                            code = "NA"
                        else:
                            code = value
                    if key == "issue_confidence":
                        global issue_confidence
                        if value is None:
                            issue_confidence = "NA"
                        else:
                            issue_confidence = value
                    if key == "line_range":
                        global line_range
                        if value is None:
                            line_range = "NA"
                        else:
                            line_range = value
                    if key == "test_id":
                        global test_id
                        if value is None:
                            test_id = "NA"
                        else:
                            test_id = value
                    if key == "issue_severity":
                        global issue_severity
                        if value is None:
                            issue_severity = "NA"
                        else:
                            issue_severity = value
                    if key == "issue_text":
                        global issue_text
                        if value is None:
                            issue_text = "NA"
                        else:
                            issue_text = value
                    if key == "test_name":
                        global test_name
                        if value is None:
                            test_name = "NA"
                        else:
                            test_name = value
                    if key == "filename":
                        global filename
                        if value is None:
                            filename = "NA"
                        else:
                            filename = value
                    if key == "more_info":
                        global more_info
                        if value is None:
                            more_info = "NA"
                        else:
                            more_info = value

                date_time = datetime.now()
                vul_id = uuid.uuid4()

                if issue_severity == "HIGH":
                    vul_col = "danger"
                    issue_severity = "High"

                elif issue_severity == "MEDIUM":
                    vul_col = "warning"
                    issue_severity = "Medium"

                elif issue_severity == "LOW":
                    vul_col = "info"
                    issue_severity = "Low"

                dup_data = test_name + filename + issue_severity
                duplicate_hash = hashlib.sha256(
                    dup_data.encode("utf-8")).hexdigest()

                match_dup = (StaticScanResultsDb.objects.filter(
                    dup_hash=duplicate_hash).values("dup_hash").distinct())
                lenth_match = len(match_dup)

                if lenth_match == 0:
                    duplicate_vuln = "No"

                    false_p = StaticScanResultsDb.objects.filter(
                        false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)

                    if fp_lenth_match == 1:
                        false_positive = "Yes"
                    else:
                        false_positive = "No"

                    save_all = StaticScanResultsDb(
                        scan_id=scan_id,
                        date_time=date_time,
                        project_id=project_id,
                        vuln_id=vul_id,
                        severity=issue_severity,
                        title=test_name,
                        fileName=filename,
                        description=str(issue_text) + "\n\n" + str(code) +
                        "\n\n" + str(line_range),
                        references=more_info,
                        severity_color=vul_col,
                        false_positive=false_positive,
                        vuln_status="Open",
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        scanner="Bandit",
                    )
                    save_all.save()

                else:
                    duplicate_vuln = "Yes"

                    save_all = StaticScanResultsDb(
                        scan_id=scan_id,
                        date_time=date_time,
                        project_id=project_id,
                        vuln_id=vul_id,
                        severity=issue_severity,
                        title=test_name,
                        fileName=filename,
                        description=str(issue_text) + "\n\n" + str(code) +
                        "\n\n" + str(line_range),
                        references=more_info,
                        severity_color=vul_col,
                        false_positive="Duplicate",
                        vuln_status="Duplicate",
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        scanner="Bandit",
                    )
                    save_all.save()

        all_bandit_data = StaticScanResultsDb.objects.filter(
            scan_id=scan_id, false_positive="No")

        duplicate_count = StaticScanResultsDb.objects.filter(
            scan_id=scan_id, vuln_duplicate="Yes")

        total_vul = len(all_bandit_data)
        total_high = len(all_bandit_data.filter(severity="High"))
        total_medium = len(all_bandit_data.filter(severity="Medium"))
        total_low = len(all_bandit_data.filter(severity="Low"))
        total_duplicate = len(duplicate_count.filter(vuln_duplicate="Yes"))

        StaticScansDb.objects.filter(scan_id=scan_id).update(
            total_vul=total_vul,
            high_vul=total_high,
            medium_vul=total_medium,
            low_vul=total_low,
            total_dup=total_duplicate,
        )
    trend_update()
    subject = "Archery Tool Scan Status - Bandit Report Uploaded"
    message = ("Bandit Scanner has completed the scan "
               "  %s <br> Total: %s <br>High: %s <br>"
               "Medium: %s <br>Low %s" %
               (scan_id, total_vul, total_high, total_medium, total_low))

    email_sch_notify(subject=subject, message=message)
Exemple #30
0
def xml_parser(root, project_id, scan_id, username, target_url):
    date_time = datetime.now()
    global name, description, remedy_guidance, remedy_code, severity, check, digest, references, \
        vector, remarks, page, signature, \
        proof, trusted, platform_type, platform_name, url, action, \
        body, vuln_id, vul_col, ref_key, ref_values, vector_input_key, vector_input_values, vector_source_key, vector_source_values, page_body_data, request_url, request_method, request_raw, response_ip, response_raw_headers

    for issue in root:
        for data in issue:
            if data.tag == "issue":
                for vuln in data:
                    vuln_id = uuid.uuid4()

                    if vuln.tag == "name":
                        if vuln.text is None:
                            name = "NA"
                        else:
                            name = vuln.text
                    if vuln.tag == "description":

                        if vuln.text is None:
                            description = "NA"
                        else:
                            description = vuln.text
                    if vuln.tag == "remedy_guidance":

                        if vuln.text is None:
                            remedy_guidance = "NA"
                        else:
                            remedy_guidance = vuln.text
                    if vuln.tag == "severity":

                        if vuln.text is None:
                            severity = "NA"
                        else:
                            severity = vuln.text

                    if vuln.tag == "references":
                        for ref_vuln in vuln:
                            dat = ref_vuln.attrib
                            for key, values in dat.items():

                                if key is None:
                                    ref_key = "NA"
                                else:
                                    ref_key = key

                                if values is None:
                                    ref_values = "NA"
                                else:
                                    ref_values = values

                    if vuln.tag == "vector":
                        for vec_vuln in vuln:
                            if vec_vuln.tag == 'inputs':
                                for vec_input in vec_vuln:
                                    dat = vec_input.attrib
                                    for key, values in dat.items():

                                        if key is None:
                                            vector_input_key = "NA"
                                        else:
                                            vector_input_key = key

                                        if values is None:
                                            vector_input_values = "NA"
                                        else:
                                            vector_input_values = values
                            if vec_vuln.tag == 'source':
                                for vec_source in vec_vuln:
                                    source_dat = vec_source.attrib
                                    for key, values in source_dat.items():
                                        if key is None:
                                            vector_source_key = "NA"
                                        else:
                                            vector_source_key = key

                                        if values in None:
                                            vector_source_values = "NA"
                                        else:
                                            vector_source_values = values

                    if vuln.tag == "page":
                        for page_body in vuln:
                            if page_body.tag == "body":
                                page_body_dat = page_body.text

                                if page_body_dat is None:
                                    page_body_data = "NA"
                                else:
                                    page_body_data = page_body_dat
                        for req in vuln:
                            if req.tag == 'request':
                                for req_dat in req:
                                    if req_dat.tag == 'url':
                                        req_url = req_dat.text
                                        if req_url is None:
                                            request_url = "NA"
                                        else:
                                            request_url = req_url
                                    if req_dat.tag == 'method':
                                        req_method = req_dat.text
                                        if req_method is None:
                                            request_method = "NA"
                                        else:
                                            request_method = req_method

                                    if req_dat.tag == 'raw':
                                        if req_dat.text is None:
                                            request_raw = "NA"
                                        else:
                                            request_raw = req_dat.text
                            if req.tag == 'response':
                                for res_dat in req:
                                    if res_dat.tag == 'ip_address':
                                        res_ip = res_dat.text
                                        if res_ip is None:
                                            response_ip = "NA"
                                        else:
                                            response_ip = res_dat.text

                                    if res_dat.tag == 'raw_headers':
                                        res_raw_headers = res_dat.text
                                        if res_raw_headers is None:
                                            response_raw_headers = "NA"
                                        else:
                                            response_raw_headers = res_dat.text

                    if vuln.tag == "proof":
                        proof = vuln.text
                        if vuln.text is None:
                            proof = "NA"
                        else:
                            proof = vuln.text

                    if severity == "high":
                        vul_col = "danger"
                        severity = "High"

                    elif severity == 'medium':
                        vul_col = "warning"
                        severity = "Medium"

                    elif severity == 'low':
                        severity = "Low"
                        vul_col = "info"

                    else:
                        severity = "Low"
                        vul_col = "info"

                    for extra_data in vuln:
                        for extra_vuln in extra_data:
                            if extra_vuln.tag == "url":

                                if extra_vuln.text is None:
                                    url = "NA"
                                else:
                                    url = extra_vuln.text
                            if extra_vuln.tag == "action":

                                if extra_vuln.text is None:
                                    action = "NA"
                                else:
                                    action = extra_vuln.text
                            if extra_vuln.tag == "body":

                                if extra_vuln.text is None:
                                    body = "NA"
                                else:
                                    body = extra_vuln.text

                dup_data = name + url + severity
                duplicate_hash = hashlib.sha256(
                    dup_data.encode('utf-8')).hexdigest()

                match_dup = arachni_scan_result_db.objects.filter(
                    username=username,
                    dup_hash=duplicate_hash).values('dup_hash').distinct()
                lenth_match = len(match_dup)

                if lenth_match == 0:
                    duplicate_vuln = 'No'

                    false_p = arachni_scan_result_db.objects.filter(
                        username=username, false_positive_hash=duplicate_hash)
                    fp_lenth_match = len(false_p)

                    global false_positive
                    if fp_lenth_match == 1:
                        false_positive = 'Yes'
                    elif fp_lenth_match == 0:
                        false_positive = 'No'
                    else:
                        false_positive = "No"

                    dump_data = arachni_scan_result_db(
                        vuln_id=vuln_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        vuln_color=vul_col,
                        project_id=project_id,
                        name=name,
                        description=description,
                        remedy_guidance=remedy_guidance,
                        severity=severity,
                        proof=proof,
                        url=url,
                        action=action,
                        body=body,
                        ref_key=ref_key,
                        ref_value=ref_values,
                        vector_input_values=vector_input_values,
                        vector_source_key=vector_source_key,
                        vector_source_values=vector_source_values,
                        page_body_data=page_body_data,
                        request_url=request_url,
                        request_method=request_method,
                        request_raw=request_raw,
                        response_ip=response_ip,
                        response_raw_headers=response_raw_headers,
                        vector_input_key=vector_input_key,
                        false_positive=false_positive,
                        vuln_status='Open',
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        username=username)
                    dump_data.save()

                else:
                    duplicate_vuln = 'Yes'

                    dump_data = arachni_scan_result_db(
                        vuln_id=vuln_id,
                        scan_id=scan_id,
                        date_time=date_time,
                        vuln_color=vul_col,
                        project_id=project_id,
                        name=name,
                        description=description,
                        remedy_guidance=remedy_guidance,
                        severity=severity,
                        proof=proof,
                        url=url,
                        action=action,
                        body=body,
                        ref_key=ref_key,
                        ref_value=ref_values,
                        vector_input_values=vector_input_values,
                        vector_source_key=vector_source_key,
                        vector_source_values=vector_source_values,
                        page_body_data=page_body_data,
                        request_url=request_url,
                        request_method=request_method,
                        request_raw=request_raw,
                        response_ip=response_ip,
                        response_raw_headers=response_raw_headers,
                        vector_input_key=vector_input_key,
                        false_positive='Duplicate',
                        vuln_status='Duplicate',
                        dup_hash=duplicate_hash,
                        vuln_duplicate=duplicate_vuln,
                        username=username)
                    dump_data.save()

    arachni_all_vul = arachni_scan_result_db.objects.filter(
        username=username, scan_id=scan_id, false_positive='No')

    duplicate_count = arachni_scan_result_db.objects.filter(
        username=username, scan_id=scan_id, vuln_duplicate='Yes')

    total_high = len(arachni_all_vul.filter(severity="High"))
    total_medium = len(arachni_all_vul.filter(severity="Medium"))
    total_low = len(arachni_all_vul.filter(severity="Low"))
    total_info = len(arachni_all_vul.filter(severity="Informational"))
    total_duplicate = len(duplicate_count.filter(vuln_duplicate='Yes'))
    total_vul = total_high + total_medium + total_low + total_info

    arachni_scan_db.objects.filter(scan_id=scan_id, username=username).update(
        url=target_url,
        total_vul=total_vul,
        date_time=date_time,
        high_vul=total_high,
        medium_vul=total_medium,
        low_vul=total_low,
        info_vul=total_info,
        total_dup=total_duplicate,
    )
    trend_update(username=username)

    subject = 'Archery Tool Scan Status - Arachni Report Uploaded'
    message = 'Arachni Scanner has completed the scan ' \
              '  %s <br> Total: %s <br>High: %s <br>' \
              'Medium: %s <br>Low %s' % (url, total_vul, total_high, total_medium, total_low)

    email_sch_notify(subject=subject, message=message)