コード例 #1
0
def compile_all(target: str, **kwargs: str) -> List[CryticCompile]:
    """Given a direct or glob pattern target, compiles all underlying sources and returns
    all the relevant instances of CryticCompile.

    Args:
        target (str): A string representing a file/directory path or glob pattern denoting where compilation should occur.
        **kwargs: optional arguments. Used: "solc_standard_json"

    Raises:
        ValueError: If the target could not be compiled

    Returns:
        List[CryticCompile]: Returns a list of CryticCompile instances for all compilations which occurred.
    """
    use_solc_standard_json = kwargs.get("solc_standard_json", False)

    # Attempt to perform glob expansion of target/filename
    globbed_targets = glob.glob(target, recursive=True)

    # Check if the target refers to a valid target already.
    # If it does not, we assume it's a glob pattern.
    compilations: List[CryticCompile] = []
    if os.path.isfile(target) or is_supported(target):
        if target.endswith(".zip"):
            compilations = load_from_zip(target)
        elif target.endswith(".zip.base64"):
            with tempfile.NamedTemporaryFile() as tmp:
                with open(target, encoding="base64") as target_file:
                    tmp.write(base64.b64decode(target_file.read()))
                    compilations = load_from_zip(tmp.name)
        else:
            compilations.append(CryticCompile(target, **kwargs))
    elif os.path.isdir(target) or len(globbed_targets) > 0:
        # We create a new glob to find solidity files at this path (in case this is a directory)
        filenames = glob.glob(os.path.join(target, "*.sol"))
        if not filenames:
            filenames = glob.glob(os.path.join(target, "*.vy"))
            if not filenames:
                filenames = globbed_targets

        # Determine if we're using --standard-solc option to
        # aggregate many files into a single compilation.
        if use_solc_standard_json:
            # If we're using standard solc, then we generated our
            # input to create a single compilation with all files
            standard_json = solc_standard_json.SolcStandardJson()
            for filename in filenames:
                standard_json.add_source_file(filename)
            compilations.append(CryticCompile(standard_json, **kwargs))
        else:
            # We compile each file and add it to our compilations.
            for filename in filenames:
                compilations.append(CryticCompile(filename, **kwargs))
    else:
        raise ValueError(f"Unresolved target: {str(target)}")

    return compilations
コード例 #2
0
def test_parsing(test_item: Test):
    flavors = ["compact"]
    if not test_item.disable_legacy:
        flavors += ["legacy"]
    for version, flavor in test_item.versions_with_flavors:
        test_file = os.path.join(
            TEST_ROOT, "compile",
            f"{test_item.test_file}-{version}-{flavor}.zip")
        expected_file = os.path.join(
            TEST_ROOT, "expected",
            f"{test_item.test_file}-{version}-{flavor}.json")

        cc = load_from_zip(test_file)[0]

        sl = Slither(
            cc,
            solc_force_legacy_json=flavor == "legacy",
            disallow_partial=True,
            skip_analyze=True,
        )

        actual = generate_output(sl)

        try:
            with open(expected_file, "r", encoding="utf8") as f:
                expected = json.load(f)
        except OSError:
            pytest.xfail("the file for this test was not generated")
            raise

        diff = DeepDiff(expected,
                        actual,
                        ignore_order=True,
                        verbose_level=2,
                        view="tree")

        if diff:
            for change in diff.get("values_changed", []):
                path_list = re.findall(r"\['(.*?)'\]", change.path())
                path = "_".join(path_list)
                with open(
                        f"test_artifacts/{test_item.test_file}_{path}_expected.dot",
                        "w",
                        encoding="utf8",
                ) as f:
                    f.write(change.t1)
                with open(
                        f"test_artifacts/{test_item.test_file}_{version}_{flavor}_{path}_actual.dot",
                        "w",
                        encoding="utf8",
                ) as f:
                    f.write(change.t2)

        assert not diff, diff.pretty()

        sl = Slither(cc,
                     solc_force_legacy_json=flavor == "legacy",
                     disallow_partial=True)
        sl.register_printer(Echidna)
        sl.run_printers()
コード例 #3
0
def test_parsing(test_item: Item):
    flavor = "legacy" if test_item.is_legacy else "compact"
    test_file = os.path.join(
        TEST_ROOT, "compile",
        f"{test_item.test_id}-{test_item.solc_ver}-{flavor}.zip")
    expected_file = os.path.join(
        TEST_ROOT, "expected",
        f"{test_item.test_id}-{test_item.solc_ver}-{flavor}.json")

    if id_test(test_item) in XFAIL:
        pytest.xfail("this test needs to be fixed")

    # set_solc(test_item)

    cc = load_from_zip(test_file)[0]

    sl = Slither(
        cc,
        solc_force_legacy_json=test_item.is_legacy,
        disallow_partial=True,
        skip_analyze=True,
    )

    actual = generate_output(sl)

    try:
        with open(expected_file, "r") as f:
            expected = json.load(f)
    except OSError:
        pytest.xfail("the file for this test was not generated")
        raise

    diff = DeepDiff(expected,
                    actual,
                    ignore_order=True,
                    verbose_level=2,
                    view="tree")

    if diff:
        for change in diff.get("values_changed", []):
            path_list = re.findall(r"\['(.*?)'\]", change.path())
            path = "_".join(path_list)
            with open(
                    f"test_artifacts/{id_test(test_item)}_{path}_expected.dot",
                    "w") as f:
                f.write(change.t1)
            with open(f"test_artifacts/{id_test(test_item)}_{path}_actual.dot",
                      "w") as f:
                f.write(change.t2)

    assert not diff, diff.pretty()

    sl = Slither(cc,
                 solc_force_legacy_json=test_item.is_legacy,
                 disallow_partial=True)
    sl.register_printer(Echidna)
    sl.run_printers()
コード例 #4
0
def _generate_test(test_item: Test, skip_existing=False):
    flavors = ["compact"]
    if not test_item.disable_legacy:
        flavors += ["legacy"]
    for version, flavor in test_item.versions_with_flavors:
        test_file = os.path.join(
            TEST_ROOT, "compile",
            f"{test_item.test_file}-{version}-{flavor}.zip")
        expected_file = os.path.join(
            TEST_ROOT, "expected",
            f"{test_item.test_file}-{version}-{flavor}.json")

        if skip_existing:
            if os.path.isfile(expected_file):
                continue

        try:
            cc = load_from_zip(test_file)[0]
            sl = Slither(
                cc,
                solc_force_legacy_json=flavor == "legacy",
                disallow_partial=True,
                skip_analyze=True,
            )
        # pylint: disable=broad-except
        except Exception as e:
            print(e)
            print(test_item)
            print(f"{expected_file} failed")
            continue

        actual = generate_output(sl)
        print(f"Generate {expected_file}")

        # pylint: disable=no-member
        Path(expected_file).parents[0].mkdir(parents=True, exist_ok=True)

        with open(expected_file, "w", encoding="utf8") as f:
            json.dump(actual, f, indent="  ")