示例#1
0
def test_get_pkg():
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom.xml")
    pkg_list = get_pkg_list(test_bom)
    assert len(pkg_list) == 157
    for pkg in pkg_list:
        assert pkg["vendor"] != "maven"
        assert " " not in pkg["name"]
        assert pkg["version"]
    test_py_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               "data", "bom-py.xml")
    pkg_list = get_pkg_list(test_py_bom)
    assert len(pkg_list) == 31
    for pkg in pkg_list:
        assert pkg["vendor"] == "pypi"
        assert " " not in pkg["name"]
        assert pkg["version"]
    test_dn_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               "data", "bom-dotnet.xml")
    pkg_list = get_pkg_list(test_dn_bom)
    assert len(pkg_list) == 38
    for pkg in pkg_list:
        assert pkg["vendor"]
        assert " " not in pkg["name"]
        assert pkg["version"]

    test_dn_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               "data", "bom-dotnet2.xml")
    pkg_list = get_pkg_list(test_dn_bom)
    assert len(pkg_list) == 6
    for pkg in pkg_list:
        assert pkg["vendor"]
        assert " " not in pkg["name"]
        assert pkg["version"]
示例#2
0
def test_search_webgoat_json(test_db):
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom.json")
    pkg_list = get_pkg_list(test_bom)
    assert len(pkg_list) == 157
    search_res, pkg_aliases = search_pkgs(test_db, pkg_list)
    assert not len(search_res)
示例#3
0
def test_get_pkg_by_type(test_db):
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-docker.json")
    pkg_list = get_pkg_list(test_bom)
    assert len(pkg_list) == 1824
    filtered_list = get_pkg_by_type(pkg_list, "npm")
    assert len(filtered_list) == 1823
示例#4
0
def test_large_lookup(test_license_data):
    test_bom = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "data", "bom-docker.json"
    )
    pkg_list = get_pkg_list(test_bom)
    pkg_lic_dict = bulk_lookup(test_license_data, pkg_list)
    assert pkg_lic_dict
    analysis.analyse_licenses("nodejs", pkg_lic_dict)
示例#5
0
def test_search():
    test_bom = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "data", "bom.xml"
    )
    pkg_list = get_pkg_list(test_bom)
    db = dbLib.get()
    search_res = search_pkgs(db, pkg_list)
    assert len(search_res)
示例#6
0
def test_lookup(test_license_data):
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom.xml")
    pkg_list = get_pkg_list(test_bom)
    pkg_lic_dict = bulk_lookup(test_license_data, pkg_list)
    assert pkg_lic_dict

    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-dotnet.xml")
    pkg_list = get_pkg_list(test_bom)
    pkg_lic_dict = bulk_lookup(test_license_data, pkg_list)
    assert pkg_lic_dict
    violations_list = []
    for pkg, ll in pkg_lic_dict.items():
        for lic in ll:
            if lic["condition_flag"]:
                violations_list.append(lic)
    assert len(violations_list) == 1
示例#7
0
def test_get_pkg():
    test_bom = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "data", "bom.xml"
    )
    pkg_list = get_pkg_list(test_bom)
    assert len(pkg_list) == 157
    for pkg in pkg_list:
        assert pkg["group"]
        assert pkg["name"]
        assert pkg["version"]
    test_py_bom = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), "data", "bom-py.xml"
    )
    pkg_list = get_pkg_list(test_py_bom)
    assert len(pkg_list) == 31
    for pkg in pkg_list:
        assert not pkg["group"]
        assert pkg["name"]
        assert pkg["version"]
示例#8
0
def test_dual_license(test_license_data):
    pkg_lic_dict = bulk_lookup(
        test_license_data,
        [{
            "vendor": "npm",
            "name": "jszip",
            "version": "3.2.2",
            "licenses": ["(MIT OR GPL-3.0)"],
        }],
    )
    assert pkg_lic_dict == {
        "npm:[email protected]": [{
            "title":
            "MIT License",
            "spdx-id":
            "MIT",
            "featured":
            True,
            "hidden":
            False,
            "description":
            "A short and simple permissive license with conditions only requiring preservation of copyright and license notices. Licensed works, modifications, and larger works may be distributed under different terms and without source code.",
            "how":
            "Create a text file (typically named LICENSE or LICENSE.txt) in the root of your source code and copy the text of the license into the file. Replace [year] with the current year and [fullname] with the name (or names) of the copyright holders.",
            "using": [
                {
                    "Babel":
                    "https://github.com/babel/babel/blob/master/LICENSE"
                },
                {
                    ".NET Core":
                    "https://github.com/dotnet/runtime/blob/master/LICENSE.TXT"
                },
                {
                    "Rails":
                    "https://github.com/rails/rails/blob/master/MIT-LICENSE"
                },
            ],
            "permissions": [
                "commercial-use",
                "modifications",
                "distribution",
                "private-use",
            ],
            "conditions": ["include-copyright"],
            "limitations": ["liability", "warranty"],
            "condition_flag":
            False,
        }]
    }
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-node2.xml")
    pkg_list = get_pkg_list(test_bom)
    pkg_lic_dict = bulk_lookup(test_license_data, pkg_list)
    assert pkg_lic_dict
示例#9
0
def main():
    args = build_args()
    print(at_logo, flush=True)
    db = dbLib.get()
    run_cacher = args.cache
    summary = None
    if not dbLib.index_count(db["index_file"]):
        run_cacher = True
    else:
        LOG.info("Vulnerability database loaded from {}".format(config.vulndb_bin_file))
    sources_list = [NvdSource()]
    if os.environ.get("GITHUB_TOKEN"):
        sources_list.insert(0, GitHubSource())
    else:
        LOG.info(
            "To use GitHub advisory source please set the environment variable GITHUB_TOKEN!"
        )
    if run_cacher:
        for s in sources_list:
            LOG.info("Refreshing {}".format(s.__class__.__name__))
            s.refresh()
    elif args.sync:
        for s in sources_list:
            LOG.info("Syncing {}".format(s.__class__.__name__))
            s.download_recent()
    LOG.debug(
        "Vulnerability database contains {} records".format(
            dbLib.index_count(db["index_file"])
        )
    )
    if args.bom:
        if not os.path.isfile(args.bom):
            LOG.error("Invalid bom file specified: {}".format(args.bom))
            return
        LOG.debug("Scanning using the bom file {}".format(args.bom))
        pkg_list = get_pkg_list(args.bom)
        summary = scan(db, pkg_list, args.report_file)
    # proj_type = utils.detect_project_type(args.src_dir)
    if summary and not args.noerror:
        # Hard coded build break logic for now
        if summary.get("CRITICAL") > 0:
            sys.exit(1)
示例#10
0
文件: cli.py 项目: AppThreat/dep-scan
def main():
    args = build_args()
    if not args.no_banner:
        print(at_logo)
    src_dir = args.src_dir_image
    if not src_dir:
        src_dir = os.getcwd()
    reports_base_dir = src_dir
    # Detect the project types and perform the right type of scan
    if args.project_type:
        project_types_list = args.project_type.split(",")
    elif args.bom:
        project_types_list = ["bom"]
    else:
        project_types_list = utils.detect_project_type(src_dir)
    if ("docker" in project_types_list or "podman" in project_types_list
            or "container" in project_types_list
            or "binary" in project_types_list):
        reports_base_dir = os.getcwd()
    db = dbLib.get()
    run_cacher = args.cache
    areport_file = (args.report_file if args.report_file else os.path.join(
        reports_base_dir, "reports", "depscan.json"))
    reports_dir = os.path.dirname(areport_file)
    # Create reports directory
    if not os.path.exists(reports_dir):
        os.makedirs(reports_dir)
    if len(project_types_list) > 1:
        LOG.debug(
            "Multiple project types found: {}".format(project_types_list))
    # Enable license scanning
    if "license" in project_types_list:
        os.environ["FETCH_LICENSE"] = "true"
        project_types_list.remove("license")
        console.print(
            Panel(
                "License audit is enabled for this scan. This would increase the time by up to 10 minutes.",
                title="License Audit",
                expand=False,
            ))
    for project_type in project_types_list:
        sug_version_dict = {}
        pkg_aliases = {}
        results = []
        report_file = areport_file.replace(".json",
                                           "-{}.json".format(project_type))
        risk_report_file = areport_file.replace(
            ".json", "-risk.{}.json".format(project_type))
        LOG.info("=" * 80)
        creation_status = False
        if args.bom and os.path.exists(args.bom):
            bom_file = args.bom
            creation_status = True
        else:
            bom_file = os.path.join(reports_dir,
                                    "bom-" + project_type + ".json")
            creation_status = create_bom(project_type, bom_file, src_dir)
        if not creation_status:
            LOG.debug(
                "Bom file {} was not created successfully".format(bom_file))
            continue
        LOG.debug("Scanning using the bom file {}".format(bom_file))
        pkg_list = get_pkg_list(bom_file)
        if not pkg_list:
            LOG.debug("No packages found in the project!")
            continue
        scoped_pkgs = {}
        if project_type in ["python"]:
            all_imports = utils.get_all_imports(src_dir)
            LOG.debug(f"Identified {len(all_imports)} imports in your project")
            scoped_pkgs = utils.get_scope_from_imports(project_type, pkg_list,
                                                       all_imports)
        else:
            scoped_pkgs = utils.get_pkgs_by_scope(project_type, pkg_list)
        if os.getenv("FETCH_LICENSE", "") in (True, "1", "true"):
            licenses_results = bulk_lookup(
                build_license_data(license_data_dir, spdx_license_list),
                pkg_list=pkg_list,
            )
            license_report_file = os.path.join(
                reports_dir, "license-" + project_type + ".json")
            analyse_licenses(project_type, licenses_results,
                             license_report_file)
        if project_type in risk_audit_map.keys():
            if args.risk_audit:
                console.print(
                    Panel(
                        f"Performing OSS Risk Audit for packages from {src_dir}\nNo of packages [bold]{len(pkg_list)}[/bold]. This will take a while ...",
                        title="OSS Risk Audit",
                        expand=False,
                    ))
                try:
                    risk_results = risk_audit(
                        project_type,
                        scoped_pkgs,
                        args.private_ns,
                        pkg_list,
                        risk_report_file,
                    )
                    analyse_pkg_risks(
                        project_type,
                        scoped_pkgs,
                        args.private_ns,
                        risk_results,
                        risk_report_file,
                    )
                except Exception as e:
                    LOG.error(e)
                    LOG.error("Risk audit was not successful")
            else:
                console.print(
                    Panel(
                        "Depscan supports OSS Risk audit for this project.\nTo enable set the environment variable [bold]ENABLE_OSS_RISK=true[/bold]",
                        title="New Feature",
                        expand=False,
                    ))
        if project_type in type_audit_map.keys():
            LOG.info("Performing remote audit for {} of type {}".format(
                src_dir, project_type))
            LOG.debug(f"No of packages {len(pkg_list)}")
            try:
                audit_results = audit(project_type, pkg_list, report_file)
                if audit_results:
                    LOG.debug(
                        f"Remote audit yielded {len(audit_results)} results")
                    results = results + audit_results
            except Exception as e:
                LOG.error("Remote audit was not successful")
                LOG.error(e)
                results = None
        # In case of docker, check if there are any npm packages that can be audited remotely
        if project_type in ("podman", "docker"):
            npm_pkg_list = get_pkg_by_type(pkg_list, "npm")
            if npm_pkg_list:
                LOG.debug(f"No of packages {len(npm_pkg_list)}")
                try:
                    audit_results = audit("nodejs", npm_pkg_list, report_file)
                    if audit_results:
                        LOG.debug(
                            f"Remote audit yielded {len(audit_results)} results"
                        )
                        results = results + audit_results
                except Exception as e:
                    LOG.error("Remote audit was not successful")
                    LOG.error(e)
        if not dbLib.index_count(db["index_file"]):
            run_cacher = True
        else:
            LOG.debug("Vulnerability database loaded from {}".format(
                config.vdb_bin_file))
        sources_list = [OSVSource(), NvdSource()]
        if os.environ.get("GITHUB_TOKEN"):
            sources_list.insert(0, GitHubSource())
        if run_cacher:
            for s in sources_list:
                LOG.debug("Refreshing {}".format(s.__class__.__name__))
                s.refresh()
                run_cacher = False
        elif args.sync:
            for s in sources_list:
                LOG.debug("Syncing {}".format(s.__class__.__name__))
                s.download_recent()
                run_cacher = False
        LOG.debug("Vulnerability database contains {} records".format(
            dbLib.index_count(db["index_file"])))
        LOG.info("Performing regular scan for {} using plugin {}".format(
            src_dir, project_type))
        vdb_results, pkg_aliases, sug_version_dict = scan(
            db, project_type, pkg_list, args.suggest)
        if vdb_results:
            results = results + vdb_results
        # Summarise and print results
        summary = summarise(
            project_type,
            results,
            pkg_aliases,
            sug_version_dict,
            scoped_pkgs,
            report_file,
            True,
        )
        if summary and not args.noerror and len(project_types_list) == 1:
            # Hard coded build break logic for now
            if summary.get("CRITICAL") > 0:
                sys.exit(1)
示例#11
0
def main():
    args = build_args()
    if not args.no_banner:
        print(at_logo, flush=True)
    # Set logging level
    if os.environ.get("SCAN_DEBUG_MODE") == "debug":
        LOG.setLevel(logging.DEBUG)
    src_dir = args.src_dir
    if not args.src_dir:
        src_dir = os.getcwd()
    db = dbLib.get()
    run_cacher = args.cache
    areport_file = (
        args.report_file
        if args.report_file
        else os.path.join(src_dir, "reports", "depscan.json")
    )
    reports_dir = os.path.dirname(areport_file)
    # Create reports directory
    if not os.path.exists(reports_dir):
        os.makedirs(reports_dir)
    # Detect the project types and perform the right type of scan
    if args.project_type:
        project_types_list = args.project_type.split(",")
    else:
        project_types_list = utils.detect_project_type(src_dir)
    if len(project_types_list) > 1:
        LOG.debug("Multiple project types found: {}".format(project_types_list))
    for project_type in project_types_list:
        sug_version_dict = {}
        pkg_aliases = {}
        report_file = areport_file.replace(".json", "-{}.json".format(project_type))
        LOG.info("=" * 80)
        bom_file = os.path.join(reports_dir, "bom-" + project_type + ".json")
        creation_status = create_bom(project_type, bom_file, src_dir)
        if not creation_status:
            LOG.debug("Bom file {} was not created successfully".format(bom_file))
            continue
        LOG.debug("Scanning using the bom file {}".format(bom_file))
        pkg_list = get_pkg_list(bom_file)
        if not pkg_list:
            LOG.debug("No packages found in the project!")
            continue
        if not args.no_license_scan:
            licenses_results = bulk_lookup(
                build_license_data(license_data_dir), pkg_list=pkg_list
            )
            license_report_file = os.path.join(
                reports_dir, "license-" + project_type + ".json"
            )
            analyse_licenses(
                project_type,
                licenses_results,
                license_report_file
            )
        if project_type in type_audit_map.keys():
            LOG.info(
                "Performing remote audit for {} of type {}".format(
                    src_dir, project_type
                )
            )
            LOG.debug(f"No of packages {len(pkg_list)}")
            results = audit(project_type, pkg_list, report_file)
        else:
            if not dbLib.index_count(db["index_file"]):
                run_cacher = True
            else:
                LOG.debug(
                    "Vulnerability database loaded from {}".format(config.vdb_bin_file)
                )
            sources_list = [NvdSource()]
            if os.environ.get("GITHUB_TOKEN"):
                sources_list.insert(0, GitHubSource())
            else:
                LOG.info(
                    "To use GitHub advisory source please set the environment variable GITHUB_TOKEN!"
                )
            if run_cacher:
                for s in sources_list:
                    LOG.debug("Refreshing {}".format(s.__class__.__name__))
                    s.refresh()
            elif args.sync:
                for s in sources_list:
                    LOG.debug("Syncing {}".format(s.__class__.__name__))
                    s.download_recent()
            LOG.debug(
                "Vulnerability database contains {} records".format(
                    dbLib.index_count(db["index_file"])
                )
            )
            LOG.info(
                "Performing regular scan for {} using plugin {}".format(
                    src_dir, project_type
                )
            )
            results, pkg_aliases, sug_version_dict = scan(db, pkg_list, args.suggest)
        # Summarise and print results
        summary = summarise(
            project_type, results, pkg_aliases, sug_version_dict, report_file, True
        )
        if summary and not args.noerror and len(project_types_list) == 1:
            # Hard coded build break logic for now
            if summary.get("CRITICAL") > 0:
                sys.exit(1)
示例#12
0
def main():
    args = build_args()
    if not args.no_banner:
        print(at_logo)
    src_dir = args.src_dir
    if not args.src_dir:
        src_dir = os.getcwd()
    db = dbLib.get()
    run_cacher = args.cache
    areport_file = (
        args.report_file
        if args.report_file
        else os.path.join(src_dir, "reports", "depscan.json")
    )
    reports_dir = os.path.dirname(areport_file)
    # Create reports directory
    if not os.path.exists(reports_dir):
        os.makedirs(reports_dir)
    # Detect the project types and perform the right type of scan
    if args.project_type:
        project_types_list = args.project_type.split(",")
    else:
        project_types_list = utils.detect_project_type(src_dir)
    if len(project_types_list) > 1:
        LOG.debug("Multiple project types found: {}".format(project_types_list))
    for project_type in project_types_list:
        sug_version_dict = {}
        pkg_aliases = {}
        report_file = areport_file.replace(".json", "-{}.json".format(project_type))
        risk_report_file = areport_file.replace(
            ".json", "-risk.{}.json".format(project_type)
        )
        LOG.info("=" * 80)
        creation_status = False
        if args.bom and os.path.exists(args.bom):
            bom_file = args.bom
            creation_status = True
        else:
            bom_file = os.path.join(reports_dir, "bom-" + project_type + ".json")
            creation_status = create_bom(project_type, bom_file, src_dir)
        if not creation_status:
            LOG.debug("Bom file {} was not created successfully".format(bom_file))
            continue
        LOG.debug("Scanning using the bom file {}".format(bom_file))
        pkg_list = get_pkg_list(bom_file)
        if not pkg_list:
            LOG.debug("No packages found in the project!")
            continue
        scoped_pkgs = utils.get_pkgs_by_scope(pkg_list)
        if not args.no_license_scan:
            licenses_results = bulk_lookup(
                build_license_data(license_data_dir), pkg_list=pkg_list
            )
            license_report_file = os.path.join(
                reports_dir, "license-" + project_type + ".json"
            )
            analyse_licenses(project_type, licenses_results, license_report_file)
        if project_type in risk_audit_map.keys():
            if args.risk_audit:
                console.print(
                    Panel(
                        f"Performing OSS Risk Audit for packages from {src_dir}\nNo of packages [bold]{len(pkg_list)}[/bold]. This will take a while ...",
                        title="OSS Risk Audit",
                        expand=False,
                    )
                )
                try:
                    risk_results = risk_audit(
                        project_type, args.private_ns, pkg_list, risk_report_file
                    )
                    analyse_pkg_risks(
                        project_type, args.private_ns, risk_results, risk_report_file
                    )
                except Exception as e:
                    LOG.error(e)
                    LOG.error("Risk audit was not successful")
                    risk_results = None
            else:
                console.print(
                    Panel(
                        "Depscan supports OSS Risk audit for this project.\nTo enable set the environment variable [bold]ENABLE_OSS_RISK=true[/bold]",
                        title="New Feature",
                        expand=False,
                    )
                )
        if project_type in type_audit_map.keys():
            LOG.info(
                "Performing remote audit for {} of type {}".format(
                    src_dir, project_type
                )
            )
            LOG.debug(f"No of packages {len(pkg_list)}")
            try:
                results = audit(project_type, pkg_list, report_file)
            except Exception as e:
                LOG.error("Remote audit was not successful")
                LOG.error(e)
                results = None
        else:
            if not dbLib.index_count(db["index_file"]):
                run_cacher = True
            else:
                LOG.debug(
                    "Vulnerability database loaded from {}".format(config.vdb_bin_file)
                )
            sources_list = [NvdSource()]
            if os.environ.get("GITHUB_TOKEN"):
                sources_list.insert(0, GitHubSource())
            else:
                LOG.info(
                    "To use GitHub advisory source please set the environment variable GITHUB_TOKEN!"
                )
            if run_cacher:
                for s in sources_list:
                    LOG.debug("Refreshing {}".format(s.__class__.__name__))
                    s.refresh()
            elif args.sync:
                for s in sources_list:
                    LOG.debug("Syncing {}".format(s.__class__.__name__))
                    s.download_recent()
            LOG.debug(
                "Vulnerability database contains {} records".format(
                    dbLib.index_count(db["index_file"])
                )
            )
            LOG.info(
                "Performing regular scan for {} using plugin {}".format(
                    src_dir, project_type
                )
            )
            results, pkg_aliases, sug_version_dict = scan(
                db, project_type, pkg_list, args.suggest
            )
        # Summarise and print results
        summary = summarise(
            project_type,
            results,
            pkg_aliases,
            sug_version_dict,
            scoped_pkgs,
            report_file,
            True,
        )
        if summary and not args.noerror and len(project_types_list) == 1:
            # Hard coded build break logic for now
            if summary.get("CRITICAL") > 0:
                sys.exit(1)
示例#13
0
def test_query_metadata2():
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-py.xml")
    pkg_list = get_pkg_list(test_bom)
    metadata_dict = pypi_metadata(pkg_list, None)
    assert metadata_dict
示例#14
0
def test_query_metadata1():
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-goof.json")
    pkg_list = get_pkg_list(test_bom)
    metadata_dict = npm_metadata(pkg_list, "snyk")
    assert metadata_dict
示例#15
0
def test_go_search(test_db):
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-go.xml")
    pkg_list = get_pkg_list(test_bom)
    search_res, pkg_aliases = search_pkgs(test_db, pkg_list)
    assert not len(search_res)
示例#16
0
def test_query_metadata():
    test_bom = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            "data", "bom-node.xml")
    pkg_list = get_pkg_list(test_bom)
    metadata_dict = npm_metadata({}, pkg_list, None)
    assert metadata_dict