Exemplo n.º 1
0
def _is_secure_renegotiation_supported(sec_rng, issue_id, asset, asset_port):
    if sec_rng is None:
        return False

    if sec_rng.attrib["supported"] != '1':
        return PatrowlEngineFinding(
            issue_id=issue_id,
            type="ssltest_secure_renegotiation",
            title="Secure renegotiation is not supported",
            description="Secure renegotiation is not supported on {}:{}".format(
                asset, asset_port),
            solution="Enable secure renegotiation on your server",
            severity="medium",
            confidence="firm",
            raw=sec_rng.attrib,
            target_addrs=[asset],
            meta_tags=["ssl", "tls"])
    if sec_rng.attrib["supported"] == '1' and sec_rng.attrib["secure"] != '1':
        return PatrowlEngineFinding(
            issue_id=issue_id,
            type="ssltest_secure_renegotiation",
            title="Unsecure renegotiation is enabled",
            description="Unsecure renegotiation is enabled on {}:{}".format(asset, asset_port),
            solution="Disable unsecure renegotiation on your server",
            severity="high",
            confidence="firm",
            raw=sec_rng.attrib,
            target_addrs=[asset],
            meta_tags=["ssl", "tls"])
    return False
Exemplo n.º 2
0
def _get_heartbleed_vuln(items, issue_id, asset, asset_port):
    if items is None or not isinstance(items, list):
        return False

    is_vulnerable = False
    hb_links = ["http://heartbleed.com/"]
    hb_desc = ""

    for item in items:
        if item.get("vulnerable") == "1":
            hb_desc += "sslversion='{}' --> is VULNERABLE\n".format(
                item.get("sslversion"))
            is_vulnerable = True
        else:
            hb_desc += "sslversion='{}' --> is not vulnerable\n".format(
                item.get("sslversion"))

    if is_vulnerable:
        return PatrowlEngineFinding(
            issue_id=issue_id,
            type="ssltest_heartbleed",
            title="Heartbleed check on '{}:{}': VULNERABLE".format(
                asset, asset_port),
            description=hb_desc,
            solution="Update the version of the OpenSSL component used by the \
                service listening on port '{}'".format(asset_port),
            severity="high",
            confidence="firm",
            raw=hb_desc,
            target_addrs=[asset],
            meta_tags=["heartbleed", "ssl", "tls"],
            meta_links=hb_links,
            meta_vuln_refs=[{
                "CVE": ["CVE-2014-0160"]
            }])
    else:
        return PatrowlEngineFinding(
            issue_id=issue_id,
            type="ssltest_heartbleed",
            title="Heartbleed check on '{}:{}': not vulnerable".format(
                asset, asset_port),
            description=hb_desc,
            solution="n/a",
            severity="info",
            confidence="firm",
            raw=hb_desc,
            target_addrs=[asset],
            meta_tags=["heartbleed", "ssl", "tls"],
            meta_links=hb_links)
Exemplo n.º 3
0
def _get_ciphersuites(items, issue_id, asset, asset_port):
    if items is None or not isinstance(items, list):
        return False

    issue_desc = "Supported ciphersuites:\n"
    for item in items:
        add_info = ""
        if 'curve' in item.keys():
            add_info += "Curve: ".format(item.get("curve"))
        if 'dhebits' in item.keys():
            add_info += "DHEbits: ".format(item.get("dhebits"))
        if 'ecdhebits' in item.keys():
            add_info += "ECDHEbits: ".format(item.get("ecdhebits"))
        issue_desc += "{:30} SSLVersion: {:8} Bits: {:4} Status: {:10} {}\n".format(
            item.get("cipher"), item.get("sslversion"), item.get("bits"),
            item.get("status"), add_info)

    return PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_supported_ciphersuites",
        title="Supported ciphersuites on '{}:{}'.".format(asset, asset_port),
        description=issue_desc,
        solution="n/a",
        severity="info",
        confidence="firm",
        raw=issue_desc,
        target_addrs=[asset],
        meta_tags=["ciphersuites", "ssl", "tls"])
Exemplo n.º 4
0
def _spot_weak_ciphersuites(ciphers, issue_id, asset, asset_port):
    if ciphers is None:
        return False
    res = []
    for cipher in ciphers:
        if cipher.attrib["strength"] in ("anonymous", "medium") and \
            cipher.attrib["status"] in ("preferred", "accepted"):
            issue_id += 1
            res.append(PatrowlEngineFinding(
                issue_id=issue_id,
                type="tls_supported_ciphersuites",
                title="Unsecure TLS ciphersuite detected : {}".format(cipher.attrib["cipher"]),
                description="Unsecure TLS ciphersuite {} was detected on {}:{}".format(
                    cipher.attrib["cipher"], asset, asset_port),
                solution="Deactivate the ciphersuite {} on your TLS configuration".format(cipher.attrib["cipher"]),
                severity="medium",
                confidence="firm",
                raw=cipher.attrib,
                target_addrs=[asset],
                meta_tags=["ssl", "tls", "ciphersuites"]))
        if cipher.attrib["strength"] in ("null", "weak") and \
            cipher.attrib["status"] in ("preferred", "accepted"):
            issue_id += 1
            res.append(PatrowlEngineFinding(
                issue_id=issue_id,
                type="tls_supported_ciphersuites",
                title="Dangerous (weak) TLS ciphersuite detected : {}".format(cipher.attrib["cipher"]),
                description="Weak TLS ciphersuite {} was detected on {}:{}".format(
                    cipher.attrib["cipher"], asset, asset_port),
                solution="Deactivate the ciphersuite {} on your TLS configuration".format(cipher.attrib["cipher"]),
                severity="medium",
                confidence="firm",
                raw=cipher.attrib,
                target_addrs=[asset],
                meta_tags=["ssl", "tls", "ciphersuites"]))

    return res
Exemplo n.º 5
0
def _spot_weak_protocol(protocols, issue_id, asset, asset_port):
    if protocols is None:
        return False
    res = []
    for protocol in protocols:
        if protocol.attrib["type"] == "ssl" and protocol.attrib["enabled"] == "1":
            issue_id += 1
            res.append(PatrowlEngineFinding(
                issue_id=issue_id,
                type="tls_supported_protocols",
                title="Weak TLS protocol detected : SSLv{}".format(protocol.attrib["version"]),
                description="Weak TLS protocol SSLv{} was detected on {}:{}".format(
                    protocol.attrib["version"], asset, asset_port),
                solution="Deactivate SSLv{} on your server".format(protocol.attrib["version"]),
                severity="high",
                confidence="firm",
                raw=protocol.attrib,
                target_addrs=[asset],
                meta_tags=["ssl", "tls"]))
        if protocol.attrib["type"] == "tls" and \
           protocol.attrib["version"] in ("1.0", "1.1") and \
           protocol.attrib["enabled"] == "1":
            issue_id += 1
            res.append(PatrowlEngineFinding(
                issue_id=issue_id,
                type="tls_supported_protocols",
                title="Weak TLS protocol detected : TLSv{}".format(protocol.attrib["version"]),
                description="Weak TLS protocol TLSv{} was detected on {}:{}".format(
                    protocol.attrib["version"], asset, asset_port),
                solution="Deactivate TLSv{} on your server".format(protocol.attrib["version"]),
                severity="medium",
                confidence="firm",
                raw=protocol.attrib,
                target_addrs=[asset],
                meta_tags=["ssl", "tls"]))

    return res
Exemplo n.º 6
0
def _get_certificate_blob(cert_blob, issue_id, asset, asset_port):
    if cert_blob is None:
        return False
    cert_hash = hashlib.sha1(cert_blob.text).hexdigest().upper()
    return PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_certificate_pem",
        title="Certificate was retrieved from '{}:{}' with hash '{}'.".format(
            asset, asset_port, cert_hash[:6]),
        description="Following certificate was retrieved from the server:\n\
            {}".format(cert_blob.text),
        solution="n/a",
        severity="info",
        confidence="firm",
        raw=cert_blob.text,
        target_addrs=[asset],
        meta_tags=["certificate", "ssl", "tls", "pem"])
Exemplo n.º 7
0
def _is_fallback_supported(fallback, issue_id, asset, asset_port):
    if fallback is None:
        return False
    fallback_support = fallback.attrib["supported"]
    if fallback_support == '1':
        return False

    return PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_fallback_support",
        title="Downgrade attack prevention is not supported",
        description="Downgrade attack prevention is not supported on {}:{}".format(
            asset, asset_port),
        solution="Enable TLS_FALLBACK_SCSV option on your server",
        severity="low",
        confidence="firm",
        raw=fallback.attrib,
        target_addrs=[asset],
        meta_tags=["ssl", "tls"])
Exemplo n.º 8
0
def _is_certificate_selfsigned(cert_tags, issue_id, asset, asset_port):
    if cert_tags is None:
        return False

    selfsigned_text = cert_tags.find("self-signed").text
    if len(selfsigned_text) == 0 or selfsigned_text == "false":
        return False

    return PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_certificate_selfsigned",
        title="Certificate from '{}:{}' is self-signed.".format(
            asset, asset_port),
        description="The SSL/TLS certificate retrieved from the server is \
            self-signed.",
        solution="Renew the certificate on the service listening on '{}:{}' \
            and sign it with a trusted CA.".format(asset, asset_port),
        severity="high",
        confidence="firm",
        raw=selfsigned_text,
        target_addrs=[asset],
        meta_tags=["certificate", "ssl", "tls", "self-signed"])
Exemplo n.º 9
0
def _is_certificate_expired(cert_tags, issue_id, asset, asset_port):
    if cert_tags is None:
        return False

    expired_text = cert_tags.find("expired").text
    if len(expired_text) == 0 or expired_text == "false":
        return False

    return PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_certificate_expired",
        title="Certificate from '{}:{}' is expired.".format(asset, asset_port),
        description="The SSL/TLS certificate retrieved from the server is \
            expired:\nNot valid before: {}\nNot valid after: {}".format(
            cert_tags.find("not-valid-before").text,
            cert_tags.find("not-valid-after").text),
        solution="Renew the certificate on the service listening on \
            '{}:{}'.".format(asset, asset_port),
        severity="high",
        confidence="firm",
        raw=expired_text,
        target_addrs=[asset],
        meta_tags=["certificate", "ssl", "tls", "expired"])
Exemplo n.º 10
0
def _search_twitter_thread(scan_id, asset_kw):

    issue_id = 0
    findings = []
    twitter = Twitter(
        auth=OAuth(
            engine.options["twitter_oauth_token"], engine.options["twitter_oauth_secret"],
            engine.options["twitter_consumer_key"], engine.options["twitter_consumer_secret"]
        ),
        retry=True
    )


    # Set the Max count
    max_count = APP_SEARCH_TWITTER_MAX_COUNT_DEFAULT
    extra_kw = ""
    since = ""
    if "search_twitter_options" in engine.scans[scan_id]["options"].keys() and engine.scans[scan_id]["options"]["search_twitter_options"] is not None:
        if "max_count" in engine.scans[scan_id]["options"]["search_twitter_options"].keys() and engine.scans[scan_id]["options"]["search_twitter_options"]["max_count"] is not None and isinstance(engine.scans[scan_id]["options"]["search_twitter_options"]["max_count"], int):
            max_count = engine.scans[scan_id]["options"]["search_twitter_options"]["max_count"]
        if "extra_kw" in engine.scans[scan_id]["options"]["search_twitter_options"].keys() and engine.scans[scan_id]["options"]["search_twitter_options"]["extra_kw"] is not None and isinstance(engine.scans[scan_id]["options"]["search_twitter_options"]["extra_kw"], list):
            extra_kw = " OR ".join(engine.scans[scan_id]["options"]["search_twitter_options"]["extra_kw"])
        if "since" in engine.scans[scan_id]["options"]["search_twitter_options"].keys() and engine.scans[scan_id]["options"]["search_twitter_options"]["since"] is not None and isinstance(engine.scans[scan_id]["options"]["search_twitter_options"]["since"], str):
            since = "since:{}".format(engine.scans[scan_id]["options"]["search_twitter_options"]["since"])

    # WARNING a query should not exceed 500 chars, including filters and operators
    # print "query_string :", "\""+asset_kw+"\" "+extra_kw+" "+since+" -filter:retweets", "len:", len("\""+asset_kw+"\" "+extra_kw+" "+since+" -filter:retweets")
    results = twitter.search.tweets(q="\""+asset_kw+"\" "+extra_kw+" -filter:retweets", count=max_count)
    # print results

    if len(results["statuses"]) == 0:  # no results
        metalink = "https://twitter.com/search"+results["search_metadata"]["refresh_url"]
        new_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="twitter_leak",
            title="No matching tweets.",
            description="No matching tweet with following parameters:\n" + \
                "Keyword (strict): {}\n".format(asset_kw) + \
                "Extra key words: {}\n".format(extra_kw) + \
                "URL: {}\n".format(metalink),
            solution="N/A",
            severity="info", confidence="firm",
            raw=results,
            target_addrs=[asset_kw],
            meta_links=[metalink])
        findings.append(new_finding)

    else:
        for tweet in results["statuses"]:
            # print "id:", tweet["id"], "text:", tweet["text"]
            # print "user_id:", tweet["user"]["id"], "user_name:", tweet["user"]["name"], "user_nickname:", tweet["user"]["screen_name"]
            # print "tweet_url:", "https://twitter.com/i/web/status/"+tweet["id_str"]

            issue_id += 1
            tw_hash = hashlib.sha1(str(tweet["text"]).encode('utf-8')).hexdigest()[:6]

            metalink = "https://twitter.com/search"+results["search_metadata"]["refresh_url"]
            new_finding = PatrowlEngineFinding(
                issue_id=issue_id, type="twitter_leak",
                title="Tweet matching search query (HASH: {}).".format(tw_hash),
                description="A tweet matching monitoring keywords has been found:\n" + \
                    "Query options:\nKeyword (strict): {}\n".format(asset_kw) + \
                    "Extra key words: {}\n".format(extra_kw) + \
                    "URL: {}\n".format(metalink),
                solution="Evaluate criticity. See internal procedures for incident reaction.",
                severity="high", confidence="firm",
                raw=tweet,
                target_addrs=[asset_kw],
                meta_links=[metalink])
            findings.append(new_finding)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] = engine.scans[scan_id]["findings"] + findings
Exemplo n.º 11
0
def _search_github_thread(scan_id, asset_kw):

    issue_id = 0
    findings = []
    asset_values = [a["value"] for a in engine.scans[scan_id]["assets"]]

    # qualifiers={}
    # if "github_qualifiers" in engine.scans[scan_id]["options"].keys() and engine.scans[scan_id]["options"]["github_qualifiers"] is not None:
    #     for opt_qualifier in engine.scans[scan_id]["options"]["github_qualifiers"].keys():
    #         if opt_qualifier == "since_period":
    #             num = re.search(r'\d+', engine.scans[scan_id]["options"]["github_qualifiers"]["since_period"]).group()
    #             unit = re.search(r'[a-zA-Z]+', engine.scans[scan_id]["options"]["github_qualifiers"]["since_period"]).group()
    #             if unit in ["weeks", "days", "hours", "minutes", "seconds"]:
    #                 since_date=date.today()-timedelta(**pa)
    #                 qualifiers.update({"created": ">="+str(since_date)})
    #         elif opt_qualifier == "from_date":
    #             try:
    #                 from_date_str = engine.scans[scan_id]["options"]["github_qualifiers"]["from_date"]
    #                 from_date_check = datetime.strptime(engine.scans[scan_id]["options"]["github_qualifiers"]["from_date"], "%Y-%m-%d")
    #                 qualifiers.update({"created": ">="+str(from_date_str)})
    #             except Exception:
    #                 print "bad datetime format"
    #
    #         elif opt_qualifier == "to_date":
    #             try:
    #                 to_date_str = engine.scans[scan_id]["options"]["github_qualifiers"]["to_date"]
    #                 to_date_check = datetime.strptime(engine.scans[scan_id]["options"]["github_qualifiers"]["to_date"], "%Y-%m-%d")
    #                 qualifiers.update({"created": "<="+str(to_date_str)})
    #             except Exception:
    #                 print "bad datetime format"

    # g = Github(engine.options["github_username"], engine.options["github_password"])  # rate limit = 30 requests/min
    g = Github(engine.options["github_api_token"])

    loops = 0
    for git_code in g.search_code("\'"+asset_kw+"\'", sort="indexed", order="desc"):
        ititle = "File found in Github public repo (code): {}/{} (HASH: {})".format(
            git_code.name,
            git_code.repository.name, git_code.sha[:6])
        iemail = ""
        if git_code.repository.owner.email is not None:
            git_code.repository.owner.email.encode("ascii", "ignore")
        idescription = "File found in Github public repo (code):\n\n" + \
            "URL: {}\n\n".format(git_code.html_url) + \
            "Repo: {}: {}\n\n".format(git_code.repository.name, git_code.repository.url) + \
            "Owner:\nlogin:{}, name:{}, email:{}\n\n".format(
                git_code.repository.owner.login,
                git_code.repository.owner.name,
                iemail) + \
            "Content ({} bits):{}".format(git_code.size, git_code.decoded_content)
        isolution = "Check if the snippet is legit or not. " + \
            "If not, see internal procedures for incident reaction."
        issue_id += 1

        new_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="github_leak_code", title=ititle,
            description=idescription, solution=isolution, severity="high",
            confidence="firm", raw=git_code.raw_data, target_addrs=asset_values,
            meta_links=[git_code.html_url])
        findings.append(new_finding)

        # Ratio limit trick: wait 3 seconds each 20 iters
        loops += 1
        if loops % 20 == 0:
            time.sleep(3)

    # for git_commit in g.search_commits("\'"+asset_kw+"\'", sort="indexed", order="desc"):
    #     print dir(git_commit)

    for git_issue in g.search_issues("\'"+asset_kw+"\'", sort="updated", order="desc"):
        ititle = "Matching issue found in Github public repo: {}... (HASH: {})".format(
            git_issue.title[:16],
            hashlib.sha1(str(git_issue.body).encode('utf-8')).hexdigest()[:6])
        idescription = "Matching issue found in Github public repo:\n\n" + \
            "URL: {}\n\n".format(git_issue.html_url) + \
            "Repo: {}: {}\n\n".format(git_issue.repository.name, git_issue.repository.url) + \
            "Owner:\nlogin:{}, name:{}, email:{}\n\n".format(
                git_issue.repository.owner.login,
                git_issue.repository.owner.name,
                git_issue.repository.owner.email) + \
            "Content: {}".format(git_issue.body)
        isolution = "Check if the snippet is legit or not. " + \
            "If not, see internal procedures for incident reaction."
        issue_id += 1

        new_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="github_leak_issue", title=ititle,
            description=idescription, solution=isolution, severity="high",
            confidence="firm", raw=git_issue.raw_data, target_addrs=asset_values,
            meta_links=[git_issue.html_url])
        findings.append(new_finding)

    for git_repo in g.search_repositories("\'"+asset_kw+"\'", sort="updated", order="desc"):
        ititle = "Matching public Github repo: {} (HASH: {})".format(
            git_repo.name,
            hashlib.sha1(git_repo.description.encode('ascii', 'ignore')).hexdigest()[:6])
        idescription = "Matching public Github repo:\n\n" + \
            "URL: {}\n\n".format(git_repo.html_url) + \
            "Repo: {}: {}\n\n".format(git_repo.name, git_repo.url) + \
            "Owner:\nlogin:{}, name:{}, email:{}\n\n".format(
                git_repo.owner.login,
                git_repo.owner.name,
                git_repo.owner.email) + \
            "Content: {}".format(git_repo.description.encode('ascii', 'ignore'))
        isolution = "Check if the snippet is legit or not. " + \
            "If not, see internal procedures for incident reaction."
        issue_id += 1

        new_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="github_leak_repo", title=ititle,
            description=idescription, solution=isolution, severity="high",
            confidence="firm", raw=git_repo.raw_data, target_addrs=asset_values,
            meta_links=[git_repo.html_url])
        findings.append(new_finding)

    for git_user in g.search_users(asset_kw, sort="joined", order="desc"):
        ititle = "Matching Github user: {} (HASH: {})".format(
            git_user.login,
            hashlib.sha1(str(git_user.login).encode('utf-8')).hexdigest()[:6])
        ibio = ""
        if git_user.bio:
            ibio = git_user.bio.encode('ascii', 'ignore')
        idescription = "Matching Github user:\n\n" + \
            "URL: {}\n\n".format(git_user.html_url) + \
            "Info:\nlogin:{}, name:{}, email:{}\n\n".format(
                git_user.login,
                git_user.name.encode('ascii', 'ignore'),
                git_user.email) + \
            "Bio: {}".format(ibio)
        isolution = "Check if the user is legit or not. " + \
            "If not, see internal procedures for incident reaction."
        issue_id += 1

        new_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="github_leak_user", title=ititle,
            description=idescription, solution=isolution, severity="high",
            confidence="firm", raw=git_user.raw_data, target_addrs=asset_values,
            meta_links=[git_user.html_url])
        findings.append(new_finding)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] = engine.scans[scan_id]["findings"] + findings
Exemplo n.º 12
0
def _parse_results(scan_id):
    issues = []
    issue_id = 1

    nb_vulns = {
        "info": 0,
        "low": 0,
        "medium": 0,
        "high": 0,
        "critical": 0
    }

    report_filename = "{}/results/{}.xml".format(APP_BASE_DIR, scan_id)

    if not os.path.isfile(report_filename):
        return False

    try:
        tree = ET.parse(report_filename)
    except Exception:
        # No Element found in XML file
        return False

    report = tree.getroot().find("report").find("report")

    # Map IP addresses to domains/fqdn/
    all_assets = {}
    for host in report.findall("host"):
        host_ip = host.find("ip").text
        all_assets.update({host_ip: [host_ip]})
        for detail in host.findall("detail"):
            if detail.find("name").text == "hostname":
                host_name = detail.find("value").text
                all_assets[host_ip].append(host_name)

    for result in report.find("results").findall("result"):
        issue_meta = {}
        issue_name = result.find("name").text
        issue_desc = result.find("description").text
        host_ip = result.find("host").text
        assets = all_assets[host_ip]
        host_port = result.find("port").text

        # Severity
        threat = result.find("threat").text
        severity = "info"
        if threat == "High":
            severity = "high"
        elif threat == "Medium":
            severity = "medium"
        elif threat == "Low":
            severity = "low"

        issue_cvss = float(result.find("severity").text)

        if result.find("nvt").find("cve") is not None and result.find("nvt").find("cve").text != "NOCVE":
            cvelist = str(result.find("nvt").find("cve").text)
            issue_meta.update({"CVE": cvelist.split(", ")})
        if result.find("nvt").find("bid") is not None and result.find("nvt").find("bid").text != "NOBID":
            bid_list = str(result.find("nvt").find("bid").text)
            issue_meta.update({"BID": bid_list.split(", ")})
        if result.find("nvt").find("xref") is not None and result.find("nvt").find("xref").text != "NOXREF":
            xref_list = str(result.find("nvt").find("xref").text)
            issue_meta.update({"XREF": xref_list.split(", ")})

        issue = PatrowlEngineFinding(
            issue_id=issue_id,
            type="openvas_scan",
            title="{} ({})".format(issue_name, host_port),
            description=issue_desc,
            solution="n/a",
            severity=severity,
            confidence="firm",
            raw=ET.tostring(result, encoding='utf-8', method='xml'),
            target_addrs=assets,
            meta_tags=["openvas"],
            meta_risk={"cvss_base_score": issue_cvss},
            meta_vuln_refs=issue_meta
        )
        issues.append(issue._PatrowlEngineFinding__to_dict())

        nb_vulns[severity] += 1
        issue_id += 1


    # report_id = engine.scans[scan_id]["report_id"]

    # for asset in engine.scans[scan_id]["findings"]:
    #     if engine.scans[scan_id]["findings"][asset]["issues"]:
    #         description = ''
    #         cvss_max = float(0)
    #         for eng in engine.scans[scan_id]["findings"][asset]["issues"]:
    #             if float(eng[0]) > 0:
    #                 cvss_max = max(float(eng[0]), cvss_max)
    #                 description = description + "[%s] CVSS: %s - Associated CVE: %s" % (eng[2], eng[0], eng[1]) + "\n"
    #         description = description + "For more detail go to 'https://%s/omp?cmd=get_report&report_id=%s'" % (engine.scanner["options"]["omp_host"]["value"], report_id)
    #
    #         criticity = "high"
    #         if cvss_max == 0:
    #             criticity = "info"
    #         elif cvss_max < 4.0:
    #             criticity = "low"
    #         elif cvss_max < 7.0:
    #             criticity = "medium"
    #
    #         nb_vulns[criticity] += 1
    #
    #         issues.append({
    #             "issue_id": len(issues)+1,
    #             "severity": criticity, "confidence": "certain",
    #             "target": {"addr": [asset], "protocol": "http"},
    #             "title": "'{}' identified in openvas".format(asset),
    #             "solution": "n/a",
    #             "metadata": {},
    #             "type": "openvas_report",
    #             "timestamp": timestamp,
    #             "description": description,
    #         })

    summary = {
        "nb_issues": len(issues),
        "nb_info": nb_vulns["info"],
        "nb_low": nb_vulns["low"],
        "nb_medium": nb_vulns["medium"],
        "nb_high": nb_vulns["high"],
        "nb_critical": 0,
        "engine_name": "openvas",
        "engine_version": engine.scanner["version"]
    }

    return issues, summary
Exemplo n.º 13
0
def _scanowaspdc_thread(scan_id, asset_kw):
    issue_id = 0
    findings = []
    asset_values = [a["value"] for a in engine.scans[scan_id]["assets"]]

    # Create the scan's workdirs
    scan_wd = "{}/workdirs/scan_{}_{}".format(APP_BASE_DIR, scan_id, str(time.time()))
    if not os.path.exists(scan_wd):
        os.makedirs(scan_wd)

    for asset_value in asset_values:
        checked_files = []
        # create the asset scan workdir
        scan_wd_asset = "{}/{}/src".format(scan_wd, hashlib.sha1(asset_value).hexdigest()[:6])
        os.makedirs(scan_wd_asset)

        # print "scan_wd_asset:", scan_wd_asset

        # Check location and copy files to the workdir
        if not _check_location(scan_id, asset_value, scan_wd_asset):
            # Generate an error if it was not possible to get the source code
            summary_asset_finding = PatrowlEngineFinding(
                issue_id=issue_id, type="code_ext_jar_summary",
                title="OWASP-DC scan not performed for '{}' (Error)".format(asset_value),
                description="Scan error with source code available at this location: '{}'. Unknwon error.".format(asset_value),
                solution="n/a.",
                severity="info", confidence="firm",
                raw={},
                target_addrs=[asset_value],
                meta_tags=["jar", "library", "owasp", "dependencies"])
            issue_id+=1
            findings.append(summary_asset_finding)
            continue

        time.sleep(2)

        # Start the scan
        cmd = 'libs/dependency-check/bin/dependency-check.sh --scan "{}" --format JSON --out "{}/oc_{}.json" --project "{}" --enableExperimental'.format(
            scan_wd_asset, scan_wd_asset, scan_id, scan_id)

        #print "cmd:", cmd

        p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)

        # Wait a little to ensure the report file is completely writen
        p.wait()
        time.sleep(2)

        report_filename = scan_wd_asset + "/oc_{}.json".format(scan_id)
        if not os.path.exists(report_filename):
            print("report file '{}' not found.".format(report_filename))

        scan_results = json.load(open(report_filename))

        for item in scan_results["dependencies"]:
            if "vulnerabilities" not in item.keys(): continue
            checked_files.append(item["filePath"])

            for vuln in item["vulnerabilities"]:

                vuln_name = ""
                if vuln["name"].isdigit():
                    vuln_name = "NSP-{}".format(vuln["name"])
                else:
                    vuln_name = vuln["name"]

                item_title = "External library '{}' vulnerable ({})".format(
                    item["fileName"], vuln_name)

                item_description = "Filepath: {}\nFilename: {}\n\n{}\n\nIdentifiers:\n{}".format(
                    remove_prefix(item["filePath"], scan_wd_asset),
                    item["fileName"],
                    vuln["description"].encode('utf-8').strip(),
                    "\n".join([vs["software"] for vs in vuln["vulnerableSoftware"]])
                )

                vuln_risks = {}
                if "cvssScore" in vuln.keys() and vuln["cvssScore"] != "":
                    vuln_risks.update({"cvss_base_score": float(vuln["cvssScore"])})

                vuln_links = [v["url"] for v in vuln["references"]]

                vuln_refs = {}
                if "cwe" in vuln.keys() and vuln["cwe"] != "":
                    vuln_refs.update({"CWE": [vuln["cwe"].split(" ")[0]]})
                if vuln["name"].startswith("CVE-"):
                    vuln_refs.update({"CVE": [vuln["name"]]})

                new_finding = PatrowlEngineFinding(
                    issue_id=issue_id, type="code_ext_jar_missing_update",
                    title=item_title,
                    description=item_description,
                    solution="Check the exploitability of the vulnerability in the application context. If the vulnerability is verified, consider updating the library.",
                    severity=vuln["severity"].lower(), confidence="firm",
                    raw=vuln,
                    target_addrs=[asset_value],
                    meta_links=vuln_links,
                    meta_tags=["jar", "library", "update", "owasp", "dependencies"],
                    meta_risk=vuln_risks,
                    meta_vuln_refs=vuln_refs)
                issue_id+=1
                findings.append(new_finding)


        # findings summary per asset (remove the workdir)
        checked_files_str = "\n".join([remove_prefix(ff, scan_wd_asset) for ff in sorted(checked_files)])

        summary_asset_finding_hash = hashlib.sha1(checked_files_str).hexdigest()[:6]
        summary_asset_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="code_ext_jar_summary",
            title="OWASP-DC scan summary for '{}' (#: {}, HASH: {})".format(
                asset_value, len(checked_files), summary_asset_finding_hash),
            description="Checked files:\n\n{}".format(checked_files_str),
            solution="n/a.",
            severity="info", confidence="firm",
            raw=[remove_prefix(ff, scan_wd_asset) for ff in checked_files],
            target_addrs=[asset_value],
            meta_tags=["jar", "library", "owasp", "dependencies"])
        issue_id+=1
        findings.append(summary_asset_finding)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] = engine.scans[scan_id]["findings"] + findings

    # Remove the workdir
    shutil.rmtree(scan_wd, ignore_errors=True)
Exemplo n.º 14
0
def _scanjs_thread(scan_id, asset_kw):
    issue_id = 0
    findings = []
    asset_values = [a["value"] for a in engine.scans[scan_id]["assets"]]

    # Create the scan's workdirs
    scan_wd = "{}/workdirs/scan_{}_{}".format(APP_BASE_DIR, scan_id, str(time.time()))
    if not os.path.exists(scan_wd):
        os.makedirs(scan_wd)

    for asset_value in asset_values:
        checked_files = []
        # create the asset scan workdir
        scan_wd_asset = "{}/{}".format(scan_wd, hashlib.sha1(asset_value).hexdigest()[:6])
        os.makedirs(scan_wd_asset)

        # Check location and copy files to the workdir
        if not _check_location(scan_id, asset_value, scan_wd_asset):
            # Generate an error if it was not possible to get the source code
            summary_asset_finding = PatrowlEngineFinding(
                issue_id=issue_id, type="code_ext_js_summary",
                title="Retire.js scan not performed for '{}' (Error)".format(asset_value),
                description="Scan error with source code available at this location: '{}'. Unknwon error.".format(asset_value),
                solution="n/a.",
                severity="info", confidence="firm",
                raw={},
                target_addrs=[asset_value],
                meta_tags=["js", "library", "retire.js"])
            issue_id+=1
            findings.append(summary_asset_finding)
            continue

        time.sleep(2)

        # Start the scan
        report_filename = "{}/oc_{}.json".format(scan_wd_asset, scan_id)
        cmd = 'retire -j --path="{}" --outputformat json --outputpath="{}" -v'.format(
            scan_wd_asset, report_filename)
        # p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
        p = subprocess.Popen(cmd, shell=True, stdout=open("/dev/null", "w"), stderr=None)

        # Wait a little to ensure the report file is completely writen
        p.wait()
        time.sleep(2)
        if not os.path.exists(report_filename):
            print("report file '{}' not found.".format(report_filename))
            engine.scans[scan_id]["status"] = "ERROR"
            os.killpg(os.getpgid(p.pid), signal.SIGTERM)
            # if psutil.pid_exists(p):
            #     psutil.Process(p).terminate()
            return

        scan_results = json.load(open(report_filename))

        for item in scan_results:
            checked_files.append(item["file"])
            if len(item["results"]) == 0: continue

            for result in item["results"]:
                if "vulnerabilities" not in result.keys(): continue
                for vuln in result["vulnerabilities"]:
                    vuln_summary = "n/a"
                    if "summary" in vuln["identifiers"].keys():
                        vuln_summary = vuln["identifiers"]["summary"]
                    # Title
                    item_title = "'{}-{}' is vulnerable: '{}'".format(
                        result["component"], result["version"],
                        vuln_summary)

                    # Description
                    item_description = "An external JavaScript library has been found to be vulnerable:\n\nFilename: {}\nComponent: {}\nVersion: {}\nTitle: {}".format(
                        item["file"], result["component"], result["version"],
                        vuln_summary
                    )

                    # Check CVE
                    item_vuln_refs = {}
                    if "CVE" in vuln["identifiers"].keys():
                        item_vuln_refs.update({"CVE": vuln["identifiers"]["CVE"]})

                    new_finding = PatrowlEngineFinding(
                        issue_id=issue_id, type="code_js_missing_update",
                        title=item_title,
                        description=item_description,
                        solution="Check the exploitability of the vulnerability in the application context. If the vulnerability is verified, consider updating the library.",
                        severity=vuln["severity"], confidence="firm",
                        raw=item,
                        target_addrs=[asset_value],
                        meta_links=vuln["info"],
                        meta_tags=["js", "library", "update", "retire.js"],
                        meta_vuln_refs=item_vuln_refs)
                    issue_id+=1
                    findings.append(new_finding)

        # findings summary per asset (remove the workdir)
        checked_files_str = "\n".join([remove_prefix(ff, scan_wd_asset) for ff in sorted(checked_files)])

        summary_asset_finding_hash = hashlib.sha1(checked_files_str).hexdigest()[:6]
        summary_asset_finding = PatrowlEngineFinding(
            issue_id=issue_id, type="code_js_summary",
            title="Retire.js scan summary for '{}' (#: {}, HASH: {})".format(
                asset_value, len(checked_files), summary_asset_finding_hash),
            description="Checked files:\n\n{}".format(checked_files_str),
            solution="n/a.",
            severity="info", confidence="firm",
            raw=checked_files,
            target_addrs=[asset_value],
            meta_tags=["js", "library", "retire.js"])
        issue_id+=1
        findings.append(summary_asset_finding)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] = engine.scans[scan_id]["findings"] + findings

    # Remove the workdir
    shutil.rmtree(scan_wd, ignore_errors=True)
Exemplo n.º 15
0
def _parse_xml_results(scan_id, asset, asset_port):
    issue_id = 0
    findings = []
    filename = APP_BASE_DIR + "/results/" + scan_id + "/" + asset + "_" + asset_port + ".xml"
    # Check file
    try:
        findings_tree = ET.parse(filename)
    except Exception:
        print("No Element found in XML file: {}".format(filename))
        return False

    xml_root = findings_tree.getroot()
    scan_results = findings_tree.find("ssltest")

    # Finding: Scan details
    issue_id += 1
    new_finding = PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_scan_summary",
        title="SSLScan scan on '{}:{}'".format(asset, asset_port),
        description=ET.tostring(xml_root, encoding='utf-8', method='xml'),
        solution="n/a",
        severity="info",
        confidence="firm",
        raw=ET.tostring(xml_root, encoding='utf-8', method='xml'),
        target_addrs=[asset])
    findings.append(new_finding)

    if scan_results:
        # Finding: Supported ciphersuites
        issue_id += 1
        ciphersuites_issue = _get_ciphersuites(
            items=scan_results.findall("cipher"),
            issue_id=issue_id,
            asset=asset,
            asset_port=asset_port)
        if ciphersuites_issue:
            findings.append(ciphersuites_issue)

        # Finding: Certificate
        issue_id += 1
        certificate_pem_issue = _get_certificate_blob(
            cert_blob=scan_results.find("certificate").find(
                "certificate-blob"),
            issue_id=issue_id,
            asset=asset,
            asset_port=asset_port)
        if certificate_pem_issue:
            findings.append(certificate_pem_issue)

        # Finding: Certificate is expired ?
        issue_id += 1
        is_cert_expired_issue = _is_certificate_expired(
            cert_tags=scan_results.find(".//certificate/expired/.."),
            issue_id=issue_id,
            asset=asset,
            asset_port=asset_port)
        if is_cert_expired_issue:
            findings.append(is_cert_expired_issue)

        # Finding: Certificate is self-signed ?
        issue_id += 1
        is_cert_selfsigned_issue = _is_certificate_selfsigned(
            cert_tags=scan_results.find(".//certificate/self-signed/.."),
            issue_id=issue_id,
            asset=asset,
            asset_port=asset_port)
        if is_cert_selfsigned_issue:
            findings.append(is_cert_selfsigned_issue)

        # Finding: Heartbleed
        issue_id += 1
        hb_vuln = _get_heartbleed_vuln(
            items=scan_results.findall("heartbleed"),
            issue_id=issue_id,
            asset=asset,
            asset_port=asset_port)
        if hb_vuln:
            findings.append(hb_vuln)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] += findings
    return True
Exemplo n.º 16
0
def _parse_xml_results(scan_id, asset, asset_port):
    issue_id = 0
    findings = []
    filename = APP_BASE_DIR+"/results/"+scan_id+"/"+asset+"_"+asset_port+".xml"
    # Check file
    try:
        findings_tree = ET.parse(filename)
    except Exception:
        print("No Element found in XML file: {}".format(filename))
        return False

    xml_root = findings_tree.getroot()
    scan_results = findings_tree.find("ssltest")

    # Finding: Scan details
    issue_id += 1
    new_finding = PatrowlEngineFinding(
        issue_id=issue_id,
        type="ssltest_scan_summary",
        title="SSLScan scan on '{}:{}'".format(asset, asset_port),
        description=ET.tostring(xml_root, encoding='utf-8', method='xml').decode('utf-8'),
        solution="n/a",
        severity="info",
        confidence="firm",
        raw=ET.tostring(xml_root, encoding='utf-8', method='xml').decode('utf-8'),
        target_addrs=[asset])
    findings.append(new_finding)

    if scan_results is not None:
        # Finding: Supported ciphersuites
        issue_id += 1
        ciphersuites_issue = _get_ciphersuites(
            items=scan_results.findall("cipher"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if ciphersuites_issue:
            findings.append(ciphersuites_issue)

        # Finding: Certificate
        if scan_results.find("certificate") is not None:
            issue_id += 1
            certificate_pem_issue = _get_certificate_blob(
                cert_blob=scan_results.find("certificate").find("certificate-blob"),
                issue_id=issue_id, asset=asset, asset_port=asset_port)
            if certificate_pem_issue:
                findings.append(certificate_pem_issue)

        # Finding: Certificate is expired ?
        issue_id += 1
        is_cert_expired_issue = _is_certificate_expired(
            cert_tags=scan_results.find(".//certificate/expired/.."),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if is_cert_expired_issue:
            findings.append(is_cert_expired_issue)

        # Finding: Certificate is self-signed ?
        issue_id += 1
        is_cert_selfsigned_issue = _is_certificate_selfsigned(
            cert_tags=scan_results.find(".//certificate/self-signed/.."),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if is_cert_selfsigned_issue:
            findings.append(is_cert_selfsigned_issue)

        # Finding: Heartbleed
        issue_id += 1
        hb_vuln = _get_heartbleed_vuln(
            items=scan_results.findall("heartbleed"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if hb_vuln:
            findings.append(hb_vuln)

        # Finding: Fallback supported ?
        issue_id += 1
        is_fallback_supported_issue = _is_fallback_supported(
            fallback=scan_results.find("fallback"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if is_fallback_supported_issue:
            findings.append(is_fallback_supported_issue)

        # Finding: Secure renegotiation supported ?
        issue_id += 1
        is_secure_renegotiation_issue = _is_secure_renegotiation_supported(
            sec_rng=scan_results.find("renegotiation"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if is_secure_renegotiation_issue:
            findings.append(is_secure_renegotiation_issue)

        # Finding: weak protocols
        # issue_id is handled inside the function
        wp_vuln = _spot_weak_protocol(
            protocols=scan_results.findall("protocol"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if wp_vuln:
            for weak_pr in wp_vuln:
                issue_id = weak_pr.__dict__["issue_id"]
                findings.append(weak_pr)

        # Finding: weak ciphersuites
        # issue_id is handled inside the function
        wc_vuln = _spot_weak_ciphersuites(
            ciphers=scan_results.findall("cipher"),
            issue_id=issue_id, asset=asset, asset_port=asset_port)
        if wc_vuln:
            for weak_cs in wc_vuln:
                issue_id = weak_cs.__dict__["issue_id"]
                findings.append(weak_cs)

    # Write results under mutex
    scan_lock = threading.RLock()
    with scan_lock:
        engine.scans[scan_id]["findings"] += findings

    return True