예제 #1
0
def check_latest_version():
    """Checks for the latest version available at PyPI."""

    name: str = "cve-bin-tool"
    url: str = f"https://pypi.org/pypi/{name}/json"
    try:
        with request.urlopen(url) as resp:  # nosec - static url above
            package_json = json.load(resp)
            pypi_version = package_json["info"]["version"]
            if pypi_version != VERSION:
                LOGGER.info(
                    f"[bold red]You are running version {VERSION} of {name} but the latest PyPI Version is {pypi_version}.[/]",
                    extra={"markup": True},
                )
                if version.parse(VERSION) < version.parse(pypi_version):
                    LOGGER.info(
                        "[bold yellow]Alert: We recommend using the latest stable release.[/]",
                        extra={"markup": True},
                    )
    except Exception as error:
        LOGGER.warning(
            textwrap.dedent(
                f"""
        -------------------------- Can't check for the latest version ---------------------------
        warning: unable to access 'https://pypi.org/pypi/{name}'
        Exception details: {error}
        Please make sure you have a working internet connection or try again later.
        """
            )
        )
예제 #2
0
def update_json():
    """Update the Debian CVE JSON file"""

    LOGGER.info("Updating Debian CVE JSON file for checking available fixes.")
    response = request.urlopen(JSON_URL).read().decode(
        "utf-8")  # nosec - static url
    response = loads(response)
    with open(DEB_CVE_JSON_PATH, "w") as debian_json:
        dump(response, debian_json, indent=4)
        LOGGER.info(
            "Debian CVE JSON file for checking available fixes is updated.")
예제 #3
0
    def cve_info(
        self,
        all_cve_data: Dict[ProductInfo, CVEData],
    ):
        """Produces the Backported fixes' info"""

        cve_data = format_output(all_cve_data)
        json_data = self.get_data()
        for cve in cve_data:
            try:
                cve_fix = json_data[cve["product"]][
                    cve["cve_number"]]["releases"][self.compute_distro()]
                if cve_fix["status"] == "resolved":
                    if self.is_backport:
                        if cve_fix["fixed_version"].startswith(cve["version"]):
                            LOGGER.info(
                                f'{cve["product"]}: {cve["cve_number"]} has backported fix in v{cve_fix["fixed_version"]} release.'
                            )
                        else:
                            LOGGER.info(
                                f'{cve["product"]}: No known backported fix for {cve["cve_number"]}.'
                            )
                    else:
                        LOGGER.info(
                            f'{cve["product"]}: {cve["cve_number"]} has available fix in v{cve_fix["fixed_version"]} release.'
                        )
            except KeyError:
                if cve["cve_number"] != "UNKNOWN":
                    LOGGER.info(
                        f'{cve["product"]}: No known fix for {cve["cve_number"]}.'
                    )
예제 #4
0
    def extract_and_parse_file(self, filename):
        """extracts and parses the file for common patterns, version strings and common filename patterns"""

        with self.extractor as ectx:
            if ectx.can_extract(filename):
                binary_string_list = []
                for filepath in self.walker([ectx.extract(filename)]):
                    clean_path = self.version_scanner.clean_file_path(filepath)
                    LOGGER.debug(f"checking whether {clean_path} is binary")

                    # see if the file is ELF binary file and parse for strings
                    is_exec = self.version_scanner.is_executable(filepath)[0]
                    if is_exec:
                        LOGGER.debug(
                            f"{clean_path} <--- this is an ELF binary")
                        file_content = self.version_scanner.parse_strings(
                            filepath)

                        matches = self.search_pattern(file_content,
                                                      self.product_name)

                        # searching for version strings in the found matches
                        version_string = self.search_version_string(matches)
                        self.version_pattern += version_string

                        # if version string is found in file, append it to filename_pattern
                        if version_string:
                            if sys.platform == "win32":
                                self.filename_pattern.append(
                                    filepath.split("\\")[-1])
                            else:
                                self.filename_pattern.append(
                                    filepath.split("/")[-1])
                            LOGGER.info(
                                f"matches for {self.product_name} found in {clean_path}"
                            )

                            binary_string_list += matches

                            for i in matches:
                                if ("/" not in i and "!" not in i
                                    ) and len(i) > self.string_length:
                                    self.contains_patterns.append(i)

                        LOGGER.debug(f"{self.filename_pattern}")

                # to resolve case when there are no strings common with product_name in them
                if self.contains_patterns:
                    return self.contains_patterns
                return binary_string_list
예제 #5
0
class TestJSON:
    # Download the schema
    SCHEMA = json.loads(urlopen(NVD_SCHEMA).read().decode("utf-8"))
    LOGGER.info("Schema loaded successfully")

    @unittest.skipUnless(LONG_TESTS() > 0, "Skipping long tests")
    @pytest.mark.parametrize("year",
                             list(range(2002,
                                        datetime.datetime.now().year + 1)))
    # NVD database started in 2002, so range then to now.
    def test_json_validation(self, year):
        """ Validate latest nvd json file against their published schema """
        # Open the latest nvd file on disk
        with gzip.open(
                os.path.join(DISK_LOCATION_DEFAULT,
                             f"nvdcve-1.1-{year}.json.gz"),
                "rb",
        ) as json_file:
            nvd_json = json.loads(json_file.read())
            LOGGER.info(
                f"Loaded json for year {year}: nvdcve-1.1-{year}.json.gz")

            # Validate -- will raise a ValidationError if not valid
            try:
                validate(nvd_json, self.SCHEMA)
                LOGGER.info("Validation complete")
            except ValidationError as ve:
                LOGGER.error(ve)
                pytest.fail("Validation error occurred")
예제 #6
0
    def scan_file(self) -> Dict[ProductInfo, TriageData]:
        LOGGER.info(f"Processing SBOM {self.filename} of type {self.type.upper()}")
        try:
            if self.type == "spdx":
                spdx = SPDXParser()
                modules = spdx.parse(self.filename)
            elif self.type == "cyclonedx":
                cyclone = CycloneParser()
                modules = cyclone.parse(self.filename)
            elif self.type == "swid":
                swid = SWIDParser()
                modules = swid.parse(self.filename)
            else:
                modules = []
        except (KeyError, FileNotFoundError, ET.ParseError) as e:
            LOGGER.debug(e, exc_info=True)
            modules = []

        LOGGER.debug(
            f"The number of modules identified in SBOM - {len(modules)}\n{modules}"
        )

        # Now process list of modules to create [vendor, product, version] tuples
        parsed_data: List[ProductInfo] = []
        for m in modules:
            product, version = m[0], m[1]
            if version != "":
                # Now add vendor to create product record....
                # print (f"Find vendor for {product} {version}")
                vendor = self.get_vendor(product)
                if vendor is not None:
                    parsed_data.append(ProductInfo(vendor, product, version))
                    # print(vendor,product,version)

        for row in parsed_data:
            self.sbom_data[row]["default"] = {
                "remarks": Remarks.NewFound,
                "comments": "",
                "severity": "",
            }
            self.sbom_data[row]["paths"] = set(map(lambda x: x.strip(), "".split(",")))

        LOGGER.debug(f"SBOM Data {self.sbom_data}")
        return self.sbom_data
예제 #7
0
    def test_json_validation(self, year):
        """ Validate latest nvd json file against their published schema """
        # Open the latest nvd file on disk
        with gzip.open(
                os.path.join(DISK_LOCATION_DEFAULT,
                             f"nvdcve-1.1-{year}.json.gz"),
                "rb",
        ) as json_file:
            nvd_json = json.loads(json_file.read())
            LOGGER.info(
                f"Loaded json for year {year}: nvdcve-1.1-{year}.json.gz")

            # Validate -- will raise a ValidationError if not valid
            try:
                validate(nvd_json, self.SCHEMA)
                LOGGER.info("Validation complete")
            except ValidationError as ve:
                LOGGER.error(ve)
                pytest.fail("Validation error occurred")
예제 #8
0
    def cve_info(
        self,
        all_cve_data: Dict[ProductInfo, CVEData],
    ):
        """Produces the available fixes' info"""

        cve_data = format_output(all_cve_data)
        for cve in cve_data:
            if cve["cve_number"] != "UNKNOWN":
                json_data = self.get_data(cve["cve_number"], cve["product"])
                try:
                    if not json_data:
                        raise KeyError

                    package_state = json_data["package_state"]
                    affected_releases = json_data["affected_release"]

                    no_fix = True

                    for package in affected_releases:
                        if (package["product_name"] ==
                                f"Red Hat Enterprise Linux {self.distro_codename}"
                            ):
                            package_data = self.parse_package_data(
                                package["package"])
                            LOGGER.info(
                                f'{cve["product"]}: {cve["cve_number"]} - Status: Fixed - Fixed package: {package_data}'
                            )
                            no_fix = False

                    for package in package_state:
                        if (package["product_name"] ==
                                f"Red Hat Enterprise Linux {self.distro_codename}"
                            ):
                            package_data = self.parse_package_data(
                                package["package_name"])
                            LOGGER.info(
                                f'{cve["product"]}: {cve["cve_number"]} - Status: {package["fix_state"]} - Related package: {package_data}'
                            )
                            no_fix = False

                    if no_fix:
                        LOGGER.info(
                            f'{cve["product"]}: No known fix for {cve["cve_number"]}.'
                        )

                except (KeyError, TypeError):
                    if cve["cve_number"] != "UNKNOWN":
                        LOGGER.info(
                            f'{cve["product"]}: No known fix for {cve["cve_number"]}.'
                        )
예제 #9
0
def check_latest_version():
    """Checks for the latest version available at PyPI."""

    name = "cve-bin-tool"
    url = f"https://pypi.org/pypi/{name}/json"
    try:
        with request.urlopen(url) as resp:
            package_json = json.load(resp)
            pypi_version = package_json["info"]["version"]
            if pypi_version == VERSION:
                LOGGER.info(
                    textwrap.dedent("""
                                *********************************************************
                                Yay! you are running the latest version.
                                But you can try the latest development version at GitHub.
                                URL: https://github.com/intel/cve-bin-tool
                                *********************************************************
                                """))
            else:
                # TODO In future mark me with some color ( prefer yellow or red )
                LOGGER.info(
                    f"You are running version {VERSION} of {name} but the latest PyPI Version is {pypi_version}."
                )
                if version.parse(VERSION) < version.parse(pypi_version):
                    LOGGER.info(
                        "Alert: We recommend using the latest stable release.")
    except Exception as error:
        LOGGER.warning(
            textwrap.dedent(f"""
        -------------------------- Can't check for the latest version ---------------------------
        warning: unable to access 'https://pypi.org/pypi/{name}'
        Exception details: {error}
        Please make sure you have a working internet connection or try again later. 
        """))
예제 #10
0
    def check_available_fix(self):
        if self.distro_info != "local":
            distro_name, distro_codename = self.distro_info.split("-")
        else:
            distro_name = distro.id()
            distro_codename = distro.codename()

        if distro_name in DEBIAN_DISTROS:
            debian_tracker = DebianCVETracker(distro_name, distro_codename,
                                              self.is_backport)
            debian_tracker.cve_info(self.all_cve_data)
        elif distro_name in REDHAT_DISTROS:
            redhat_tracker = RedhatCVETracker(distro_name, distro_codename)
            redhat_tracker.cve_info(self.all_cve_data)
        elif self.is_backport:
            LOGGER.info(
                f"CVE Binary Tool doesn't support Backported Fix Utility for {distro_name.capitalize()} at the moment."
            )
        else:
            LOGGER.info(
                f"CVE Binary Tool doesn't support Available Fix Utility for {distro_name.capitalize()} at the moment."
            )
예제 #11
0
            if not self.years():
                raise EmptyCache(self.cachedir)
        self.LOGGER.debug(f"Years present: {self.years()}")
        return self

    def __exit__(self, _exc_type, _exc_value, _traceback):
        pass

    def clear_cached_data(self):
        if os.path.exists(self.cachedir):
            self.LOGGER.warning(f"Deleting cachedir {self.cachedir}")
            shutil.rmtree(self.cachedir)


def refresh():
    with CVEDB():
        pass


if __name__ == "__main__":
    LOGGER.debug("Experimenting...")
    cvedb = CVEDB(os.path.join(os.path.expanduser("~"), ".cache", "cvedb"))
    # cvedb.refresh()
    # print(cvedb.years())
    # connection = cvedb.init_database()
    # cvedb.populate_db(connection)
    # cvedb.supplement_curl()
    LOGGER.setLevel(logging.INFO)
    LOGGER.info("Getting cves for curl 7.34.0")
    LOGGER.info(cvedb.get_cves("haxx", "curl", "7.34.0"))
예제 #12
0
def main(argv=None):
    """Scan a binary file for certain open source libraries that may have CVEs"""
    argv = argv or sys.argv

    # Reset logger level to info
    LOGGER.setLevel(logging.INFO)

    parser = argparse.ArgumentParser(
        prog="cve-bin-tool",
        description=textwrap.dedent("""
            The CVE Binary Tool scans for a number of common, vulnerable open source
            components (openssl, libpng, libxml2, expat and a few others) to let you know
            if a given directory or binary file includes common libraries with known
            vulnerabilities.
            """),
        epilog=textwrap.fill(
            f'Available checkers: {", ".join(VersionScanner.available_checkers())}'
        ) + "\n\nPlease disclose issues responsibly!",
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    input_group = parser.add_argument_group("Input")
    input_group.add_argument("directory",
                             help="directory to scan",
                             nargs="?",
                             default=None)
    input_group.add_argument(
        "-e",
        "--exclude",
        action=StringToListAction,
        help="Comma separated Exclude directory path",
        default=None,
    )

    input_group.add_argument(
        "-i",
        "--input-file",
        action="store",
        default="",
        help="provide input filename",
    )
    input_group.add_argument("-C",
                             "--config",
                             action="store",
                             default="",
                             help="provide config file")

    output_group = parser.add_argument_group("Output")
    output_group.add_argument("-q",
                              "--quiet",
                              action="store_true",
                              help="suppress output")
    output_group.add_argument(
        "-l",
        "--log",
        help="log level (default: info)",
        dest="log_level",
        action="store",
        choices=["debug", "info", "warning", "error", "critical"],
    )
    output_group.add_argument(
        "-o",
        "--output-file",
        action="store",
        help="provide output filename (default: output to stdout)",
    )
    output_group.add_argument(
        "--html-theme",
        action="store",
        help="provide custom theme directory for HTML Report",
    )
    output_group.add_argument(
        "-f",
        "--format",
        action="store",
        choices=["csv", "json", "console", "html", "pdf"],
        help="update output format (default: console)",
    )
    output_group.add_argument(
        "-c",
        "--cvss",
        action="store",
        help=
        "minimum CVSS score (as integer in range 0 to 10) to report (default: 0)",
    )
    output_group.add_argument(
        "-S",
        "--severity",
        action="store",
        choices=["low", "medium", "high", "critical"],
        help="minimum CVE severity to report (default: low)",
    )
    parser.add_argument("-V", "--version", action="version", version=VERSION)
    parser.add_argument(
        "-u",
        "--update",
        action="store",
        choices=["now", "daily", "never", "latest"],
        help="update schedule for NVD database (default: daily)",
    )
    parser.add_argument(
        "-x",
        "--extract",
        action="store_true",
        help="autoextract compressed files",
    )
    parser.add_argument(
        "--disable-version-check",
        action="store_true",
        help="skips checking for a new version",
    )

    checker_group = parser.add_argument_group("Checkers")
    checker_group.add_argument(
        "-s",
        "--skips",
        dest="skips",
        action=StringToListAction,
        type=str,
        help="comma-separated list of checkers to disable",
    )
    checker_group.add_argument(
        "-r",
        "--runs",
        dest="runs",
        action=StringToListAction,
        type=str,
        help="comma-separated list of checkers to enable",
    )
    defaults = {
        "directory": "",
        "exclude": [],
        "input_file": "",
        "log_level": "info",
        "format": "console",
        "cvss": 0,
        "severity": "low",
        "update": "daily",
        "extract": True,
        "disable_version_check": False,
        "skips": "",
        "runs": "",
        "quiet": False,
        "output_file": "",
        "html_theme": "",
    }

    with ErrorHandler(mode=ErrorMode.NoTrace):
        raw_args = parser.parse_args(argv[1:])
        args = {key: value for key, value in vars(raw_args).items() if value}

    configs = {}
    if args.get("config"):
        conf = ConfigParser(args["config"])
        configs = conf.parse_config()

    args = ChainMap(args, configs, defaults)

    # logging and error related settings
    if args["log_level"]:
        LOGGER.setLevel(args["log_level"].upper())

    if args["quiet"]:
        LOGGER.setLevel(logging.CRITICAL)

    if 0 < LOGGER.level <= 10:
        error_mode = ErrorMode.FullTrace
    elif LOGGER.level >= 50:
        error_mode = ErrorMode.NoTrace
    else:
        error_mode = ErrorMode.TruncTrace

    if platform.system() != "Linux":
        warning_nolinux = """
                          **********************************************
                          Warning: this utility was developed for Linux.
                          You may need to install additional utilities
                          to use it on other operating systems.
                          **********************************************
                          """
        LOGGER.warning(warning_nolinux)

    # Database update related settings
    # Connect to the database
    cvedb_orig = CVEDB(version_check=not args["disable_version_check"],
                       error_mode=error_mode)

    # if OLD_CACHE_DIR (from cvedb.py) exists, print warning
    if os.path.exists(OLD_CACHE_DIR):
        LOGGER.warning(
            f"Obsolete cache dir {OLD_CACHE_DIR} is no longer needed and can be removed."
        )

    # Clear data if -u now is set
    if args["update"] == "now":
        cvedb_orig.clear_cached_data()

    if args["update"] == "latest":
        cvedb_orig.refresh_cache_and_update_db()

    # update db if needed
    if args["update"] != "never":
        cvedb_orig.get_cvelist_if_stale()
    else:
        LOGGER.warning("Not verifying CVE DB cache")
        if not cvedb_orig.nvd_years():
            with ErrorHandler(mode=error_mode, logger=LOGGER):
                raise EmptyCache(cvedb_orig.cachedir)

    # CVE Database validation
    if not cvedb_orig.check_cve_entries():
        with ErrorHandler(mode=error_mode, logger=LOGGER):
            raise CVEDataMissing("No data in CVE Database")

    # Input validation
    if not args["directory"] and not args["input_file"]:
        parser.print_usage()
        with ErrorHandler(logger=LOGGER, mode=ErrorMode.NoTrace):
            raise InsufficientArgs(
                "Please specify a directory to scan or an input file required")

    if args["directory"] and not os.path.exists(args["directory"]):
        parser.print_usage()
        with ErrorHandler(logger=LOGGER, mode=ErrorMode.NoTrace):
            raise FileNotFoundError("Directory/File doesn't exist")

    # Checkers related settings
    skips = args["skips"]
    if args["runs"]:
        runs = args["runs"]
        skips = list(
            map(
                lambda checker: checker.name,
                filter(
                    lambda checker: checker.name not in runs,
                    pkg_resources.iter_entry_points("cve_bin_tool.checker"),
                ),
            ))

    # CSVScanner related settings
    score = 0
    if args["severity"]:
        # Set minimum CVSS score based on severity
        cvss_score = {"low": 0, "medium": 4, "high": 7, "critical": 9}
        score = cvss_score[args["severity"]]
    if int(args["cvss"]) > 0:
        score = int(args["cvss"])

    with CVEScanner(score=score) as cve_scanner:
        triage_data: TriageData
        total_files: int = 0
        parsed_data: Dict[ProductInfo, TriageData] = {}

        if args["input_file"]:
            input_engine = InputEngine(args["input_file"],
                                       logger=LOGGER,
                                       error_mode=error_mode)
            parsed_data = input_engine.parse_input()
            if not args["directory"]:
                for product_info, triage_data in parsed_data.items():
                    LOGGER.warning(f"{product_info}, {triage_data}")
                    cve_scanner.get_cves(product_info, triage_data)
        if args["directory"]:
            version_scanner = VersionScanner(
                should_extract=args["extract"],
                exclude_folders=args["exclude"],
                error_mode=error_mode,
            )
            version_scanner.remove_skiplist(skips)
            version_scanner.print_checkers()
            for scan_info in version_scanner.recursive_scan(args["directory"]):
                if scan_info:
                    product_info, path = scan_info
                    LOGGER.debug(f"{product_info}: {path}")
                    triage_data = parsed_data.get(product_info,
                                                  {"default": {}})
                    # Ignore paths from triage_data if we are scanning directory
                    triage_data["paths"] = {path}
                    cve_scanner.get_cves(product_info, triage_data)
            total_files = version_scanner.total_scanned_files

        LOGGER.info("")
        LOGGER.info("Overall CVE summary: ")
        if args["input_file"]:
            LOGGER.info(
                f"There are {cve_scanner.products_with_cve} products with known CVEs detected"
            )
        else:
            LOGGER.info(
                f"There are {cve_scanner.products_with_cve} files with known CVEs detected"
            )
        if cve_scanner.products_with_cve > 0 or (args["format"] == "html"
                                                 or args["format"] == "pdf"):
            affected_string = ", ".join(
                map(
                    lambda product_version: "".join(str(product_version)),
                    cve_scanner.affected(),
                ))
            LOGGER.info(f"Known CVEs in {affected_string}:")

            # Creates a Object for OutputEngine
            output = OutputEngine(
                all_cve_data=cve_scanner.all_cve_data,
                scanned_dir=args["directory"],
                filename=args["output_file"],
                themes_dir=args["html_theme"],
                products_with_cve=cve_scanner.products_with_cve,
                products_without_cve=cve_scanner.products_without_cve,
                total_files=total_files,
            )

            if not args["quiet"]:
                output.output_file(args["format"])

        # Use the number of products with known cves as error code
        # as requested by folk planning to automate use of this script.
        # If no cves found, then the program exits cleanly.
        return cve_scanner.products_with_cve
예제 #13
0
 def setup_class(cls):
     """Initialize egg so all checkers can be found"""
     # Update egg if installed in development mode
     if IS_DEVELOP():
         LOGGER.info("Updating egg_info")
         update_egg()
예제 #14
0
    def parse_list(self):
        input_file = self.input_file
        self.check_file()

        if not input_file.endswith("requirements.txt"):
            if distro.id() not in SUPPORTED_DISTROS:
                LOGGER.warning(
                    f"Package list support only available on {','.join(SUPPORTED_DISTROS)}!"
                )
                return {}

            system_packages = []

            LOGGER.info(f"Scanning {distro.id().capitalize()} package list.")

            if distro.id() in DEB_DISTROS:
                installed_packages = run(
                    [
                        "dpkg-query",
                        "--show",
                        '--showformat={"name": "${binary:Package}", "version": "${Version}"}, ',
                    ],
                    stdout=PIPE,
                )
                installed_packages = json.loads(
                    f"[{installed_packages.stdout.decode('utf-8')[0:-2]}]")
            elif distro.id() in RPM_DISTROS:
                installed_packages = run(
                    [
                        "rpm",
                        "--query",
                        "--all",
                        "--queryformat",
                        '{"name": "%{NAME}", "version": "%{VERSION}"\\}, ',
                    ],
                    stdout=PIPE,
                )
                installed_packages = json.loads(
                    f"[{installed_packages.stdout.decode('utf-8')[0:-2]}]")
            elif distro.id() in PACMAN_DISTROS:
                installed_packages = []

                installed_packages_output = run(
                    ["pacman", "--query", "--explicit"],
                    stdout=PIPE,
                )

                installed_packages_output = installed_packages_output.stdout.decode(
                    "utf-8").splitlines()

                dict_keys = ["name", "version"]
                for installed_package in installed_packages_output:
                    package_details = installed_package.split(" ")
                    installed_package_dict = dict(
                        zip(dict_keys, package_details))
                    installed_packages.append(installed_package_dict)

            with open(input_file) as req:
                lines = req.readlines()
            for line in lines:
                system_packages.append(re.split("\n", line)[0])

            for installed_package in installed_packages:
                if installed_package["name"] in system_packages:
                    self.package_names_without_vendor.append(installed_package)

        else:
            LOGGER.info("Scanning python package list.")
            txt_package_names = []

            installed_packages_json = run(
                ["pip", "list", "--format", "json"],
                stdout=PIPE,
            )
            installed_packages = json.loads(
                installed_packages_json.stdout.decode("utf-8"))

            with open(input_file) as txtfile:
                lines = txtfile.readlines()

                for line in lines:
                    txt_package_names.append(re.split(">|\\[|;|=|\n", line)[0])
                for installed_package in installed_packages:
                    package_name = installed_package["name"].lower()
                    if package_name in txt_package_names:
                        self.package_names_without_vendor.append(
                            installed_package)

        cve_db = CVEDB()
        vendor_package_pairs = cve_db.get_vendor_product_pairs(
            self.package_names_without_vendor)

        self.add_vendor(vendor_package_pairs)
        self.parse_data()
        return self.parsed_data_with_vendor
예제 #15
0
def main(argv=None):

    argv = argv or sys.argv

    parser = argparse.ArgumentParser(
        prog="helper-script",
        description=textwrap.dedent("""
                Helps contributors who want to write a new cve-bin-tool checker find common filenames,
                version strings, and other necessary data for building a binary checker
                """),
    )
    # scan directory args
    parser.add_argument(
        "filenames",
        help="files to scan",
        nargs="+",
        default=[],
    )

    # product-name args
    parser.add_argument(
        "-p",
        "--product",
        help="provide product-name that would be searched",
        dest="product_name",
        action="store",
        default=None,
    )

    # version-name args
    parser.add_argument(
        "-v",
        "--version",
        help="provide version that would be searched",
        dest="version_number",
        action="store",
        default=None,
    )

    # log level args
    parser.add_argument(
        "-l",
        "--log",
        help="log level (default: warning)",
        dest="log_level",
        action="store",
        choices=["debug", "info", "warning", "error", "critical"],
        default="warning",
    )

    # contains-patterns string length args
    parser.add_argument(
        "--string-length",
        help=
        "changes the output string-length for CONTAINS_PATTERNS (default: %(default)s)",
        type=int,
        action="store",
        default=40,
    )

    with ErrorHandler(mode=ErrorMode.NoTrace):
        raw_args = parser.parse_args(argv[1:])
        args = {key: value for key, value in vars(raw_args).items() if value}
        defaults = {key: parser.get_default(key) for key in vars(raw_args)}

    args = ChainMap(args, defaults)

    LOGGER.setLevel(args["log_level"].upper())

    LOGGER.debug(f"Given filenames: {args['filenames']}")
    LOGGER.info(f"Scanning only the first filename: '{args['filenames'][0]}'")
    hs = HelperScript(
        args["filenames"][0],
        product_name=args["product_name"],
        version_number=args["version_number"],
        string_length=args["string_length"],
    )

    # Parsing, Extracting and Searching for version-strings
    hs.extract_and_parse_file(args["filenames"][0])

    # output on console
    hs.output()