Beispiel #1
0
def _handle_bytecode(crytic_compile: "CryticCompile", target: str,
                     result_b: bytes):
    # There is no direct API to get the bytecode from etherscan
    # The page changes from time to time, we use for now a simple parsing, it will not be robust
    begin = """Search Algorithm">\nSimilar Contracts</button>\n"""
    begin += """<div id="dividcode">\n<pre class=\'wordwrap\' style=\'height: 15pc;\'>0x"""
    result = result_b.decode("utf8")
    # Removing everything before the begin string
    result = result[result.find(begin) + len(begin):]
    bytecode = result[:result.find("<")]

    contract_name = f"Contract_{target}"

    contract_filename = Filename(absolute="", relative="", short="", used="")

    compilation_unit = CompilationUnit(crytic_compile, str(target))

    compilation_unit.contracts_names.add(contract_name)
    compilation_unit.contracts_filenames[contract_name] = contract_filename
    compilation_unit.abis[contract_name] = {}
    compilation_unit.bytecodes_init[contract_name] = bytecode
    compilation_unit.bytecodes_runtime[contract_name] = ""
    compilation_unit.srcmaps_init[contract_name] = []
    compilation_unit.srcmaps_runtime[contract_name] = []

    compilation_unit.compiler_version = CompilerVersion(compiler="unknown",
                                                        version="",
                                                        optimized=None)

    crytic_compile.bytecode_only = True
Beispiel #2
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Compile the target

        Args:
            crytic_compile (CryticCompile): CryticCompile object to populate
            **kwargs: optional arguments. Used "vyper"


        """
        target = self._target

        vyper = kwargs.get("vyper", "vyper")

        targets_json = _run_vyper(target, vyper)

        assert "version" in targets_json
        compilation_unit = CompilationUnit(crytic_compile, str(target))

        compilation_unit.compiler_version = CompilerVersion(
            compiler="vyper", version=targets_json["version"], optimized=False
        )

        assert target in targets_json

        info = targets_json[target]
        contract_filename = convert_filename(target, _relative_to_short, crytic_compile)

        contract_name = Path(target).parts[-1]

        compilation_unit.contracts_names.add(contract_name)
        compilation_unit.filename_to_contracts[contract_filename].add(contract_name)
        compilation_unit.abis[contract_name] = info["abi"]
        compilation_unit.bytecodes_init[contract_name] = info["bytecode"].replace("0x", "")
        compilation_unit.bytecodes_runtime[contract_name] = info["bytecode_runtime"].replace(
            "0x", ""
        )
        # Vyper does not provide the source mapping for the init bytecode
        compilation_unit.srcmaps_init[contract_name] = []
        # info["source_map"]["pc_pos_map"] contains the source mapping in a simpler format
        # However pc_pos_map_compressed" seems to follow solc's format, so for convenience
        # We store the same
        # TODO: create SourceMapping class, so that srcmaps_runtime would store an class
        # That will give more flexebility to different compilers
        compilation_unit.srcmaps_runtime[contract_name] = info["source_map"][
            "pc_pos_map_compressed"
        ]

        crytic_compile.filenames.add(contract_filename)
        compilation_unit.filenames.add(contract_filename)

        # Natspec not yet handled for vyper
        compilation_unit.natspec[contract_name] = Natspec({}, {})

        ast = _get_vyper_ast(target, vyper)
        compilation_unit.asts[contract_filename.absolute] = ast
Beispiel #3
0
def _load_from_compile_legacy2(crytic_compile: "CryticCompile", loaded_json: Dict) -> None:
    """Load from old (old) export

    Args:
        crytic_compile (CryticCompile): CryticCompile object to populate
        loaded_json (Dict): Json representation of the CryticCompile object
    """

    for key, compilation_unit_json in loaded_json["compilation_units"].items():
        compilation_unit = CompilationUnit(crytic_compile, key)
        compilation_unit.compiler_version = CompilerVersion(
            compiler=compilation_unit_json["compiler"]["compiler"],
            version=compilation_unit_json["compiler"]["version"],
            optimized=compilation_unit_json["compiler"]["optimized"],
        )
        for contract_name, contract in compilation_unit_json["contracts"].items():
            compilation_unit.contracts_names.add(contract_name)
            filename = Filename(
                absolute=contract["filenames"]["absolute"],
                relative=contract["filenames"]["relative"],
                short=contract["filenames"]["short"],
                used=contract["filenames"]["used"],
            )
            compilation_unit.filename_to_contracts[filename].add(contract_name)

            compilation_unit.abis[contract_name] = contract["abi"]
            compilation_unit.bytecodes_init[contract_name] = contract["bin"]
            compilation_unit.bytecodes_runtime[contract_name] = contract["bin-runtime"]
            compilation_unit.srcmaps_init[contract_name] = contract["srcmap"].split(";")
            compilation_unit.srcmaps_runtime[contract_name] = contract["srcmap-runtime"].split(";")
            compilation_unit.libraries[contract_name] = contract["libraries"]

            userdoc = contract.get("userdoc", {})
            devdoc = contract.get("devdoc", {})
            compilation_unit.natspec[contract_name] = Natspec(userdoc, devdoc)

            if contract["is_dependency"]:
                crytic_compile.dependencies.add(filename.absolute)
                crytic_compile.dependencies.add(filename.relative)
                crytic_compile.dependencies.add(filename.short)
                crytic_compile.dependencies.add(filename.used)
        compilation_unit.asts = compilation_unit_json["asts"]

        if "filenames" in compilation_unit_json:
            compilation_unit.filenames = {
                _convert_dict_to_filename(filename)
                for filename in compilation_unit_json["filenames"]
            }
        else:
            # For legay code, we recover the filenames from the contracts list
            # This is not perfect, as a filename might not be associated to any contract
            for contract_name, contract in compilation_unit_json["contracts"].items():
                filename = _convert_dict_to_filename(contract["filenames"])
                compilation_unit.filenames.add(filename)
    def compile(self, crytic_compile: "CryticCompile", **kwargs: Any) -> None:
        """[summary]

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile object
            **kwargs: optional arguments. Used: "solc", "solc_disable_warnings", "solc_args", "solc_working_dir",
                "solc_remaps"
        """

        solc: str = kwargs.get("solc", "solc")
        solc_disable_warnings: bool = kwargs.get("solc_disable_warnings",
                                                 False)
        solc_arguments: str = kwargs.get("solc_args", "")

        solc_remaps: Optional[Union[str, List[str]]] = kwargs.get(
            "solc_remaps", None)
        solc_working_dir: Optional[str] = kwargs.get("solc_working_dir", None)

        compilation_unit = CompilationUnit(crytic_compile, "standard_json")

        compilation_unit.compiler_version = CompilerVersion(
            compiler="solc",
            version=get_version(solc, None),
            optimized=is_optimized(solc_arguments),
        )

        # Add all remappings
        if solc_remaps:
            if isinstance(solc_remaps, str):
                solc_remaps = solc_remaps.split(" ")
            for solc_remap in solc_remaps:
                self.add_remapping(solc_remap)

        # Invoke solc
        targets_json = run_solc_standard_json(
            self.to_dict(),
            compilation_unit.compiler_version,
            solc_disable_warnings=solc_disable_warnings,
        )

        parse_standard_json_output(targets_json,
                                   compilation_unit,
                                   solc_working_dir=solc_working_dir)
Beispiel #5
0
def _load_from_compile_current(crytic_compile: "CryticCompile", loaded_json: Dict) -> None:
    for key, compilation_unit_json in loaded_json["compilation_units"].items():
        compilation_unit = CompilationUnit(crytic_compile, key)
        compilation_unit.compiler_version = CompilerVersion(
            compiler=compilation_unit_json["compiler"]["compiler"],
            version=compilation_unit_json["compiler"]["version"],
            optimized=compilation_unit_json["compiler"]["optimized"],
        )
        for contracts_data in compilation_unit_json["contracts"].values():
            for contract_name, contract in contracts_data.items():
                compilation_unit.contracts_names.add(contract_name)
                filename = Filename(
                    absolute=contract["filenames"]["absolute"],
                    relative=contract["filenames"]["relative"],
                    short=contract["filenames"]["short"],
                    used=contract["filenames"]["used"],
                )
                compilation_unit.filename_to_contracts[filename].add(contract_name)

                compilation_unit.abis[contract_name] = contract["abi"]
                compilation_unit.bytecodes_init[contract_name] = contract["bin"]
                compilation_unit.bytecodes_runtime[contract_name] = contract["bin-runtime"]
                compilation_unit.srcmaps_init[contract_name] = contract["srcmap"].split(";")
                compilation_unit.srcmaps_runtime[contract_name] = contract["srcmap-runtime"].split(
                    ";"
                )
                compilation_unit.libraries[contract_name] = contract["libraries"]

                userdoc = contract.get("userdoc", {})
                devdoc = contract.get("devdoc", {})
                compilation_unit.natspec[contract_name] = Natspec(userdoc, devdoc)

                if contract["is_dependency"]:
                    crytic_compile.dependencies.add(filename.absolute)
                    crytic_compile.dependencies.add(filename.relative)
                    crytic_compile.dependencies.add(filename.short)
                    crytic_compile.dependencies.add(filename.used)
            compilation_unit.asts = compilation_unit_json["asts"]
            compilation_unit.filenames = {
                _convert_dict_to_filename(filename)
                for filename in compilation_unit_json["filenames"]
            }
Beispiel #6
0
def _load_from_compile_legacy(crytic_compile: "CryticCompile",
                              loaded_json: Dict):
    compilation_unit = CompilationUnit(crytic_compile, "legacy")
    compilation_unit.asts = loaded_json["asts"]
    compilation_unit.compiler_version = CompilerVersion(
        compiler=loaded_json["compiler"]["compiler"],
        version=loaded_json["compiler"]["version"],
        optimized=loaded_json["compiler"]["optimized"],
    )
    for contract_name, contract in loaded_json["contracts"].items():
        compilation_unit.contracts_names.add(contract_name)
        filename = Filename(
            absolute=contract["filenames"]["absolute"],
            relative=contract["filenames"]["relative"],
            short=contract["filenames"]["short"],
            used=contract["filenames"]["used"],
        )
        compilation_unit.contracts_filenames[contract_name] = filename

        compilation_unit.abis[contract_name] = contract["abi"]
        compilation_unit.bytecodes_init[contract_name] = contract["bin"]
        compilation_unit.bytecodes_runtime[contract_name] = contract[
            "bin-runtime"]
        compilation_unit.srcmaps_init[contract_name] = contract[
            "srcmap"].split(";")
        compilation_unit.srcmaps_runtime[contract_name] = contract[
            "srcmap-runtime"].split(";")
        compilation_unit.libraries[contract_name] = contract["libraries"]

        userdoc = contract.get("userdoc", {})
        devdoc = contract.get("devdoc", {})
        compilation_unit.natspec[contract_name] = Natspec(userdoc, devdoc)

        if contract["is_dependency"]:
            compilation_unit.crytic_compile.dependencies.add(filename.absolute)
            compilation_unit.crytic_compile.dependencies.add(filename.relative)
            compilation_unit.crytic_compile.dependencies.add(filename.short)
            compilation_unit.crytic_compile.dependencies.add(filename.used)
Beispiel #7
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """

        etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile", False) or kwargs.get(
            "ignore_compile", False
        )

        build_directory = "build"

        compile_arguments = kwargs.get("etherlime_compile_arguments", None)

        if not etherlime_ignore_compile:
            cmd = ["etherlime", "compile", self._target, "deleteCompiledFiles=true"]

            if not kwargs.get("npx_disable", False):
                cmd = ["npx"] + cmd

            if compile_arguments:
                cmd += compile_arguments.split(" ")

            try:
                with subprocess.Popen(
                    cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target
                ) as process:
                    stdout_bytes, stderr_bytes = process.communicate()
                    stdout, stderr = (
                        stdout_bytes.decode(),
                        stderr_bytes.decode(),
                    )  # convert bytestrings to unicode strings

                    LOGGER.info(stdout)

                    if stderr:
                        LOGGER.error(stderr)
            except OSError as error:
                # pylint: disable=raise-missing-from
                raise InvalidCompilation(error)

        # similar to truffle
        if not os.path.isdir(os.path.join(self._target, build_directory)):
            raise InvalidCompilation(
                "No truffle build directory found, did you run `truffle compile`?"
            )
        filenames = glob.glob(os.path.join(self._target, build_directory, "*.json"))

        version = None
        compiler = "solc-js"

        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        for file in filenames:
            with open(file, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)

                if version is None:
                    if "compiler" in target_loaded:
                        if "version" in target_loaded["compiler"]:
                            version = re.findall(
                                r"\d+\.\d+\.\d+", target_loaded["compiler"]["version"]
                            )[0]

                if not "ast" in target_loaded:
                    continue

                filename_txt = target_loaded["ast"]["absolutePath"]
                filename = convert_filename(filename_txt, _relative_to_short, crytic_compile)
                compilation_unit.asts[filename.absolute] = target_loaded["ast"]
                crytic_compile.filenames.add(filename)
                contract_name = target_loaded["contractName"]
                compilation_unit.contracts_filenames[contract_name] = filename
                compilation_unit.contracts_names.add(contract_name)
                compilation_unit.abis[contract_name] = target_loaded["abi"]
                compilation_unit.bytecodes_init[contract_name] = target_loaded["bytecode"].replace(
                    "0x", ""
                )
                compilation_unit.bytecodes_runtime[contract_name] = target_loaded[
                    "deployedBytecode"
                ].replace("0x", "")
                compilation_unit.srcmaps_init[contract_name] = target_loaded["sourceMap"].split(";")
                compilation_unit.srcmaps_runtime[contract_name] = target_loaded[
                    "deployedSourceMap"
                ].split(";")

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)
                compilation_unit.natspec[contract_name] = natspec

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler, version=version, optimized=_is_optimized(compile_arguments)
        )
Beispiel #8
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """

        solc = kwargs.get("solc", "solc")
        solc_disable_warnings = kwargs.get("solc_disable_warnings", False)
        solc_arguments = kwargs.get("solc_args", "")

        solc_remaps: Optional[Union[str, List[str]]] = kwargs.get(
            "solc_remaps", None)
        solc_working_dir = kwargs.get("solc_working_dir", None)

        compilation_unit = CompilationUnit(crytic_compile, "standard_json")

        compilation_unit.compiler_version = CompilerVersion(
            compiler="solc",
            version=get_version(solc, dict()),
            optimized=is_optimized(solc_arguments),
        )

        skip_filename = compilation_unit.compiler_version.version in [
            f"0.4.{x}" for x in range(0, 10)
        ]

        # Add all remappings
        if solc_remaps:
            if isinstance(solc_remaps, str):
                solc_remaps = solc_remaps.split(" ")
            for solc_remap in solc_remaps:
                self.add_remapping(solc_remap)

        # Invoke solc
        targets_json = _run_solc_standard_json(
            self.to_dict(), solc, solc_disable_warnings=solc_disable_warnings)

        if "contracts" in targets_json:
            for file_path, file_contracts in targets_json["contracts"].items():
                for contract_name, info in file_contracts.items():
                    # for solc < 0.4.10 we cant retrieve the filename from the ast
                    if skip_filename:
                        contract_filename = convert_filename(
                            self._target,
                            relative_to_short,
                            crytic_compile,
                            working_dir=solc_working_dir,
                        )
                    else:
                        contract_filename = convert_filename(
                            file_path,
                            relative_to_short,
                            crytic_compile,
                            working_dir=solc_working_dir,
                        )
                    compilation_unit.contracts_names.add(contract_name)
                    compilation_unit.contracts_filenames[
                        contract_name] = contract_filename
                    compilation_unit.abis[contract_name] = info["abi"]

                    userdoc = info.get("userdoc", {})
                    devdoc = info.get("devdoc", {})
                    natspec = Natspec(userdoc, devdoc)
                    compilation_unit.natspec[contract_name] = natspec

                    compilation_unit.bytecodes_init[contract_name] = info[
                        "evm"]["bytecode"]["object"]
                    compilation_unit.bytecodes_runtime[contract_name] = info[
                        "evm"]["deployedBytecode"]["object"]
                    compilation_unit.srcmaps_init[contract_name] = info["evm"][
                        "bytecode"]["sourceMap"].split(";")
                    compilation_unit.srcmaps_runtime[contract_name] = info[
                        "evm"]["deployedBytecode"]["sourceMap"].split(";")

        if "sources" in targets_json:
            for path, info in targets_json["sources"].items():
                if skip_filename:
                    path = convert_filename(
                        self._target,
                        relative_to_short,
                        crytic_compile,
                        working_dir=solc_working_dir,
                    )
                else:
                    path = convert_filename(path,
                                            relative_to_short,
                                            crytic_compile,
                                            working_dir=solc_working_dir)
                crytic_compile.filenames.add(path)
                compilation_unit.asts[path.absolute] = info["ast"]
Beispiel #9
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """
        embark_ignore_compile = kwargs.get("embark_ignore_compile", False) or kwargs.get(
            "ignore_compile", False
        )
        embark_overwrite_config = kwargs.get("embark_overwrite_config", False)

        plugin_name = "@trailofbits/embark-contract-info"
        with open(os.path.join(self._target, "embark.json"), encoding="utf8") as file_desc:
            embark_json = json.load(file_desc)
        if embark_overwrite_config:
            write_embark_json = False
            if not "plugins" in embark_json:
                embark_json["plugins"] = {plugin_name: {"flags": ""}}
                write_embark_json = True
            elif not plugin_name in embark_json["plugins"]:
                embark_json["plugins"][plugin_name] = {"flags": ""}
                write_embark_json = True
            if write_embark_json:
                try:
                    with subprocess.Popen(
                        ["npm", "install", plugin_name], cwd=self._target
                    ) as process:
                        _, stderr = process.communicate()
                        with open(
                            os.path.join(self._target, "embark.json"), "w", encoding="utf8"
                        ) as outfile:
                            json.dump(embark_json, outfile, indent=2)
                except OSError as error:
                    # pylint: disable=raise-missing-from
                    raise InvalidCompilation(error)

        else:
            if (not "plugins" in embark_json) or (not plugin_name in embark_json["plugins"]):
                raise InvalidCompilation(
                    "embark-contract-info plugin was found in embark.json. "
                    "Please install the plugin (see "
                    "https://github.com/crytic/crytic-compile/wiki/Usage#embark)"
                    ", or use --embark-overwrite-config."
                )

        if not embark_ignore_compile:
            try:
                cmd = ["embark", "build", "--contracts"]
                if not kwargs.get("npx_disable", False):
                    cmd = ["npx"] + cmd
                # pylint: disable=consider-using-with
                process = subprocess.Popen(
                    cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target
                )
            except OSError as error:
                # pylint: disable=raise-missing-from
                raise InvalidCompilation(error)
            stdout, stderr = process.communicate()
            LOGGER.info("%s\n", stdout.decode())
            if stderr:
                # Embark might return information to stderr, but compile without issue
                LOGGER.error("%s", stderr.decode())
        infile = os.path.join(self._target, "crytic-export", "contracts-embark.json")
        if not os.path.isfile(infile):
            raise InvalidCompilation(
                "Embark did not generate the AST file. Is Embark installed "
                "(npm install -g embark)? Is embark-contract-info installed? (npm install -g embark)."
            )
        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        compilation_unit.compiler_version = _get_version(self._target)

        with open(infile, "r", encoding="utf8") as file_desc:
            targets_loaded = json.load(file_desc)
            for k, ast in targets_loaded["asts"].items():
                filename = convert_filename(
                    k, _relative_to_short, crytic_compile, working_dir=self._target
                )
                compilation_unit.asts[filename.absolute] = ast
                crytic_compile.filenames.add(filename)

            if not "contracts" in targets_loaded:
                LOGGER.error(
                    "Incorrect json file generated. Are you using %s >= 1.1.0?", plugin_name
                )
                raise InvalidCompilation(
                    f"Incorrect json file generated. Are you using {plugin_name} >= 1.1.0?"
                )

            for original_contract_name, info in targets_loaded["contracts"].items():
                contract_name = extract_name(original_contract_name)
                contract_filename = extract_filename(original_contract_name)
                contract_filename = convert_filename(
                    contract_filename, _relative_to_short, crytic_compile, working_dir=self._target
                )

                compilation_unit.contracts_filenames[contract_name] = contract_filename
                compilation_unit.contracts_names.add(contract_name)

                if "abi" in info:
                    compilation_unit.abis[contract_name] = info["abi"]
                if "bin" in info:
                    compilation_unit.bytecodes_init[contract_name] = info["bin"].replace("0x", "")
                if "bin-runtime" in info:
                    compilation_unit.bytecodes_runtime[contract_name] = info["bin-runtime"].replace(
                        "0x", ""
                    )
                if "srcmap" in info:
                    compilation_unit.srcmaps_init[contract_name] = info["srcmap"].split(";")
                if "srcmap-runtime" in info:
                    compilation_unit.srcmaps_runtime[contract_name] = info["srcmap-runtime"].split(
                        ";"
                    )

                userdoc = info.get("userdoc", {})
                devdoc = info.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)
                compilation_unit.natspec[contract_name] = natspec
Beispiel #10
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param kwargs:
        :return:
        """

        build_directory = kwargs.get("truffle_build_directory",
                                     os.path.join("build", "contracts"))
        truffle_ignore_compile = kwargs.get("truffle_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)
        truffle_version = kwargs.get("truffle_version", None)
        # crytic_compile.type = Type.TRUFFLE
        # Truffle on windows has naming conflicts where it will invoke truffle.js directly instead
        # of truffle.cmd (unless in powershell or git bash).
        # The cleanest solution is to explicitly call
        # truffle.cmd. Reference:
        # https://truffleframework.com/docs/truffle/reference/configuration#resolving-naming-conflicts-on-windows

        truffle_overwrite_config = kwargs.get("truffle_overwrite_config",
                                              False)

        if platform.system() == "Windows":
            base_cmd = ["truffle.cmd"]
        elif kwargs.get("npx_disable", False):
            base_cmd = ["truffle"]
        else:
            base_cmd = ["npx", "truffle"]
            if truffle_version:
                if truffle_version.startswith("truffle"):
                    base_cmd = ["npx", truffle_version]
                else:
                    base_cmd = ["npx", f"truffle@{truffle_version}"]
            elif os.path.isfile(os.path.join(self._target, "package.json")):
                with open(os.path.join(self._target, "package.json"),
                          encoding="utf8") as file_desc:
                    package = json.load(file_desc)
                    if "devDependencies" in package:
                        if "truffle" in package["devDependencies"]:
                            version = package["devDependencies"]["truffle"]
                            if version.startswith("^"):
                                version = version[1:]
                            truffle_version = "truffle@{}".format(version)
                            base_cmd = ["npx", truffle_version]
                    if "dependencies" in package:
                        if "truffle" in package["dependencies"]:
                            version = package["dependencies"]["truffle"]
                            if version.startswith("^"):
                                version = version[1:]
                            truffle_version = "truffle@{}".format(version)
                            base_cmd = ["npx", truffle_version]

        if not truffle_ignore_compile:
            cmd = base_cmd + ["compile", "--all"]

            LOGGER.info(
                "'%s' running (use --truffle-version [email protected] to use specific version)",
                " ".join(cmd),
            )

            config_used = None
            config_saved = None
            if truffle_overwrite_config:
                overwritten_version = kwargs.get("truffle_overwrite_version",
                                                 None)
                # If the version is not provided, we try to guess it with the config file
                if overwritten_version is None:
                    version_from_config = _get_version_from_config(
                        self._target)
                    if version_from_config:
                        overwritten_version, _ = version_from_config

                # Save the config file, and write our temporary config
                config_used, config_saved = _save_config(Path(self._target))
                if config_used is None:
                    config_used = Path("truffle-config.js")
                _write_config(Path(self._target), config_used,
                              overwritten_version)

            with subprocess.Popen(cmd,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE,
                                  cwd=self._target) as process:

                stdout_bytes, stderr_bytes = process.communicate()
                stdout, stderr = (
                    stdout_bytes.decode(),
                    stderr_bytes.decode(),
                )  # convert bytestrings to unicode strings

                if truffle_overwrite_config:
                    assert config_used
                    _reload_config(Path(self._target), config_saved,
                                   config_used)

                LOGGER.info(stdout)
                if stderr:
                    LOGGER.error(stderr)
        if not os.path.isdir(os.path.join(self._target, build_directory)):
            if os.path.isdir(os.path.join(self._target, "node_modules")):
                raise InvalidCompilation(
                    f"External dependencies {build_directory} {self._target} not found, please install them. (npm install)"
                )
            raise InvalidCompilation(
                "`truffle compile` failed. Can you run it?")
        filenames = glob.glob(
            os.path.join(self._target, build_directory, "*.json"))

        optimized = None

        version = None
        compiler = None
        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        for filename_txt in filenames:
            with open(filename_txt, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)
                # pylint: disable=too-many-nested-blocks
                if optimized is None:
                    if "metadata" in target_loaded:
                        metadata = target_loaded["metadata"]
                        try:
                            metadata = json.loads(metadata)
                            if "settings" in metadata:
                                if "optimizer" in metadata["settings"]:
                                    if "enabled" in metadata["settings"][
                                            "optimizer"]:
                                        optimized = metadata["settings"][
                                            "optimizer"]["enabled"]
                        except json.decoder.JSONDecodeError:
                            pass

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)

                if not "ast" in target_loaded:
                    continue

                filename = target_loaded["ast"]["absolutePath"]
                try:
                    filename = convert_filename(filename,
                                                _relative_to_short,
                                                crytic_compile,
                                                working_dir=self._target)
                except InvalidCompilation as i:
                    txt = str(i)
                    txt += "\nConsider removing the build/contracts content (rm build/contracts/*)"
                    # pylint: disable=raise-missing-from
                    raise InvalidCompilation(txt)

                compilation_unit.asts[filename.absolute] = target_loaded["ast"]
                crytic_compile.filenames.add(filename)
                contract_name = target_loaded["contractName"]
                compilation_unit.natspec[contract_name] = natspec
                compilation_unit.contracts_filenames[contract_name] = filename
                compilation_unit.contracts_names.add(contract_name)
                compilation_unit.abis[contract_name] = target_loaded["abi"]
                compilation_unit.bytecodes_init[contract_name] = target_loaded[
                    "bytecode"].replace("0x", "")
                compilation_unit.bytecodes_runtime[
                    contract_name] = target_loaded["deployedBytecode"].replace(
                        "0x", "")
                compilation_unit.srcmaps_init[contract_name] = target_loaded[
                    "sourceMap"].split(";")
                compilation_unit.srcmaps_runtime[
                    contract_name] = target_loaded["deployedSourceMap"].split(
                        ";")

                if compiler is None:
                    compiler = target_loaded.get("compiler",
                                                 {}).get("name", None)
                if version is None:
                    version = target_loaded.get("compiler",
                                                {}).get("version", None)
                    if "+" in version:
                        version = version[0:version.find("+")]

        if version is None or compiler is None:
            version_from_config = _get_version_from_config(self._target)
            if version_from_config:
                version, compiler = version_from_config
            else:
                version, compiler = _get_version(base_cmd, cwd=self._target)

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler, version=version, optimized=optimized)
Beispiel #11
0
def _iterate_over_files(crytic_compile: "CryticCompile", target: str,
                        filenames: List[Path]):
    """
    Iterate over the files

    :param crytic_compile:
    :param target:
    :param filenames:
    :return:
    """
    optimized = None
    compiler = "solc"
    version = None

    compilation_unit = CompilationUnit(crytic_compile, str(target))

    for original_filename in filenames:
        with open(original_filename, encoding="utf8") as f_file:
            target_loaded: Dict = json.load(f_file)

            if "ast" not in target_loaded:
                continue

            if optimized is None:
                # Old brownie
                if compiler in target_loaded:
                    compiler_d: Dict = target_loaded["compiler"]
                    optimized = compiler_d.get("optimize", False)
                    version = _get_version(compiler_d)
                if "compiler" in target_loaded:
                    compiler_d = target_loaded["compiler"]
                    optimized = compiler_d.get("optimize", False)
                    version = _get_version(compiler_d)

            # Filter out vyper files
            if "absolutePath" not in target_loaded["ast"]:
                continue

            filename_txt = target_loaded["ast"]["absolutePath"]
            filename: Filename = convert_filename(filename_txt,
                                                  _relative_to_short,
                                                  crytic_compile,
                                                  working_dir=target)

            compilation_unit.asts[filename.absolute] = target_loaded["ast"]
            crytic_compile.filenames.add(filename)
            contract_name = target_loaded["contractName"]
            compilation_unit.contracts_filenames[contract_name] = filename
            compilation_unit.contracts_names.add(contract_name)
            compilation_unit.abis[contract_name] = target_loaded["abi"]
            compilation_unit.bytecodes_init[contract_name] = target_loaded[
                "bytecode"].replace("0x", "")
            compilation_unit.bytecodes_runtime[contract_name] = target_loaded[
                "deployedBytecode"].replace("0x", "")
            compilation_unit.srcmaps_init[contract_name] = target_loaded[
                "sourceMap"].split(";")
            compilation_unit.srcmaps_runtime[contract_name] = target_loaded[
                "deployedSourceMap"].split(";")

            userdoc = target_loaded.get("userdoc", {})
            devdoc = target_loaded.get("devdoc", {})
            natspec = Natspec(userdoc, devdoc)
            compilation_unit.natspec[contract_name] = natspec

    compilation_unit.compiler_version = CompilerVersion(compiler=compiler,
                                                        version=version,
                                                        optimized=optimized)
Beispiel #12
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Run the compilation

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile object
            **kwargs: optional arguments. Used "solc", "etherscan_only_source_code", "etherscan_only_bytecode",
                "etherscan_api_key", "export_dir"

        Raises:
            InvalidCompilation: if etherscan returned an error, or its results were not correctly parsed
        """

        target = self._target

        if target.startswith(tuple(SUPPORTED_NETWORK)):
            prefix: Union[None,
                          str] = SUPPORTED_NETWORK[target[:target.find(":") +
                                                          1]][0]
            prefix_bytecode = SUPPORTED_NETWORK[target[:target.find(":") +
                                                       1]][1]
            addr = target[target.find(":") + 1:]
            etherscan_url = ETHERSCAN_BASE % (prefix, addr)
            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (
                prefix_bytecode, addr)

        else:
            etherscan_url = ETHERSCAN_BASE % (".etherscan.io", target)
            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("etherscan.io",
                                                                target)
            addr = target
            prefix = None

        only_source = kwargs.get("etherscan_only_source_code", False)
        only_bytecode = kwargs.get("etherscan_only_bytecode", False)

        etherscan_api_key = kwargs.get("etherscan_api_key", None)
        arbiscan_api_key = kwargs.get("arbiscan_api_key", None)
        polygonscan_api_key = kwargs.get("polygonscan_api_key", None)
        avax_api_key = kwargs.get("avax_api_key", None)
        ftmscan_api_key = kwargs.get("ftmscan_api_key", None)
        bscan_api_key = kwargs.get("bscan_api_key", None)

        export_dir = kwargs.get("export_dir", "crytic-export")
        export_dir = os.path.join(
            export_dir,
            kwargs.get("etherscan_export_dir", "etherscan-contracts"))

        if etherscan_api_key and "etherscan" in etherscan_url:
            etherscan_url += f"&apikey={etherscan_api_key}"
            etherscan_bytecode_url += f"&apikey={etherscan_api_key}"
        if arbiscan_api_key and "arbiscan" in etherscan_url:
            etherscan_url += f"&apikey={arbiscan_api_key}"
            etherscan_bytecode_url += f"&apikey={arbiscan_api_key}"
        if polygonscan_api_key and "polygonscan" in etherscan_url:
            etherscan_url += f"&apikey={polygonscan_api_key}"
            etherscan_bytecode_url += f"&apikey={polygonscan_api_key}"
        if avax_api_key and "snowtrace" in etherscan_url:
            etherscan_url += f"&apikey={avax_api_key}"
            etherscan_bytecode_url += f"&apikey={avax_api_key}"
        if ftmscan_api_key and "ftmscan" in etherscan_url:
            etherscan_url += f"&apikey={ftmscan_api_key}"
            etherscan_bytecode_url += f"&apikey={ftmscan_api_key}"
        if bscan_api_key and "bscscan" in etherscan_url:
            etherscan_url += f"&apikey={bscan_api_key}"
            etherscan_bytecode_url += f"&apikey={bscan_api_key}"

        source_code: str = ""
        result: Dict[str, Union[bool, str, int]] = {}
        contract_name: str = ""

        if not only_bytecode:
            with urllib.request.urlopen(etherscan_url) as response:
                html = response.read()

            info = json.loads(html)

            if "result" in info and info["result"] == "Max rate limit reached":
                LOGGER.error("Etherscan API rate limit exceeded")
                raise InvalidCompilation("Etherscan api rate limit exceeded")

            if "message" not in info:
                LOGGER.error("Incorrect etherscan request")
                raise InvalidCompilation("Incorrect etherscan request " +
                                         etherscan_url)

            if not info["message"].startswith("OK"):
                LOGGER.error("Contract has no public source code")
                raise InvalidCompilation(
                    "Contract has no public source code: " + etherscan_url)

            if "result" not in info:
                LOGGER.error("Contract has no public source code")
                raise InvalidCompilation(
                    "Contract has no public source code: " + etherscan_url)

            result = info["result"][0]
            # Assert to help mypy
            assert isinstance(result["SourceCode"], str)
            assert isinstance(result["ContractName"], str)
            source_code = result["SourceCode"]
            contract_name = result["ContractName"]

        if source_code == "" and not only_source:
            LOGGER.info(
                "Source code not available, try to fetch the bytecode only")

            req = urllib.request.Request(etherscan_bytecode_url,
                                         headers={"User-Agent": "Mozilla/5.0"})
            with urllib.request.urlopen(req) as response:
                html = response.read()

            _handle_bytecode(crytic_compile, target, html)
            return

        if source_code == "":
            LOGGER.error("Contract has no public source code")
            raise InvalidCompilation("Contract has no public source code: " +
                                     etherscan_url)

        if not os.path.exists(export_dir):
            os.makedirs(export_dir)

        # Assert to help mypy
        assert isinstance(result["CompilerVersion"], str)

        compiler_version = re.findall(
            r"\d+\.\d+\.\d+", _convert_version(result["CompilerVersion"]))[0]

        optimization_used: bool = result["OptimizationUsed"] == "1"

        optimize_runs = None
        if optimization_used:
            optimize_runs = int(result["Runs"])

        working_dir: Optional[str] = None

        try:
            # etherscan might return an object with two curly braces, {{ content }}
            dict_source_code = json.loads(source_code[1:-1])
            filenames, working_dir = _handle_multiple_files(
                dict_source_code, addr, prefix, contract_name, export_dir)
        except JSONDecodeError:
            try:
                # or etherscan might return an object with single curly braces, { content }
                dict_source_code = json.loads(source_code)
                filenames, working_dir = _handle_multiple_files(
                    dict_source_code, addr, prefix, contract_name, export_dir)
            except JSONDecodeError:
                filenames = [
                    _handle_single_file(source_code, addr, prefix,
                                        contract_name, export_dir)
                ]

        compilation_unit = CompilationUnit(crytic_compile, contract_name)

        compilation_unit.compiler_version = CompilerVersion(
            compiler=kwargs.get("solc", "solc"),
            version=compiler_version,
            optimized=optimization_used,
            optimize_runs=optimize_runs,
        )
        compilation_unit.compiler_version.look_for_installed_version()

        solc_standard_json.standalone_compile(filenames,
                                              compilation_unit,
                                              working_dir=working_dir)
Beispiel #13
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """

        dapp_ignore_compile = kwargs.get("dapp_ignore_compile",
                                         False) or kwargs.get(
                                             "ignore_compile", False)
        directory = os.path.join(self._target, "out")

        if not dapp_ignore_compile:
            _run_dapp(self._target)

        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        compilation_unit.compiler_version = _get_version(self._target)

        optimized = False

        with open(os.path.join(directory, "dapp.sol.json")) as file_desc:
            targets_json = json.load(file_desc)

            version = None
            if "version" in targets_json:
                version = re.findall(r"\d+\.\d+\.\d+",
                                     targets_json["version"])[0]

            for original_filename, contracts_info in targets_json[
                    "contracts"].items():
                for original_contract_name, info in contracts_info.items():
                    if "metadata" in info:
                        metadata = json.loads(info["metadata"])
                        if ("settings" in metadata
                                and "optimizer" in metadata["settings"]
                                and "enabled"
                                in metadata["settings"]["optimizer"]):
                            optimized |= metadata["settings"]["optimizer"][
                                "enabled"]
                    contract_name = extract_name(original_contract_name)
                    compilation_unit.contracts_names.add(contract_name)
                    compilation_unit.contracts_filenames[
                        contract_name] = original_filename

                    compilation_unit.abis[contract_name] = info["abi"]
                    compilation_unit.bytecodes_init[contract_name] = info[
                        "evm"]["bytecode"]["object"]
                    compilation_unit.bytecodes_runtime[contract_name] = info[
                        "evm"]["deployedBytecode"]["object"]
                    compilation_unit.srcmaps_init[contract_name] = info["evm"][
                        "bytecode"]["sourceMap"].split(";")
                    compilation_unit.srcmaps_runtime[contract_name] = info[
                        "evm"]["bytecode"]["sourceMap"].split(";")
                    userdoc = info.get("userdoc", {})
                    devdoc = info.get("devdoc", {})
                    natspec = Natspec(userdoc, devdoc)
                    compilation_unit.natspec[contract_name] = natspec

                    if version is None:
                        metadata = json.loads(info["metadata"])
                        version = re.findall(
                            r"\d+\.\d+\.\d+",
                            metadata["compiler"]["version"])[0]

            for path, info in targets_json["sources"].items():
                path = convert_filename(path,
                                        _relative_to_short,
                                        crytic_compile,
                                        working_dir=self._target)
                crytic_compile.filenames.add(path)
                compilation_unit.asts[path.absolute] = info["ast"]

        compilation_unit.compiler_version = CompilerVersion(
            compiler="solc", version=version, optimized=optimized)
Beispiel #14
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Compile the project and populate the CryticCompile object

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile
            **kwargs: optional arguments. Used "waffle_ignore_compile", "ignore_compile", "npx_disable",
                "waffle_config_file"

        Raises:
            InvalidCompilation: If the waffle failed to run
        """

        waffle_ignore_compile = kwargs.get("waffle_ignore_compile",
                                           False) or kwargs.get(
                                               "ignore_compile", False)
        target = self._target

        cmd = ["waffle"]
        if not kwargs.get("npx_disable", False):
            cmd = ["npx"] + cmd

        # Default behaviour (without any config_file)
        build_directory = os.path.join("build")
        compiler = "native"
        config: Dict = {}

        config_file = kwargs.get("waffle_config_file", "waffle.json")

        potential_config_files = list(Path(target).rglob("*waffle*.json"))
        if potential_config_files and len(potential_config_files) == 1:
            config_file = str(potential_config_files[0])

        # Read config file
        if config_file:
            config = _load_config(config_file)

            # old version
            if "compiler" in config:
                compiler = config["compiler"]
            if "compilerType" in config:
                compiler = config["compilerType"]

            if "compilerVersion" in config:
                version = config["compilerVersion"]
            else:
                version = _get_version(compiler, target, config=config)

            if "targetPath" in config:
                build_directory = config["targetPath"]

        else:
            version = _get_version(compiler, target)

        if "outputType" not in config or config["outputType"] != "all":
            config["outputType"] = "all"

        needed_config = {
            "compilerOptions": {
                "outputSelection": {
                    "*": {
                        "*": [
                            "evm.bytecode.object",
                            "evm.deployedBytecode.object",
                            "abi",
                            "evm.bytecode.sourceMap",
                            "evm.deployedBytecode.sourceMap",
                        ],
                        "": ["ast"],
                    }
                }
            }
        }

        # Set the config as it should be
        if "compilerOptions" in config:
            curr_config: Dict = config["compilerOptions"]
            curr_needed_config: Dict = needed_config["compilerOptions"]
            if "outputSelection" in curr_config:
                curr_config = curr_config["outputSelection"]
                curr_needed_config = curr_needed_config["outputSelection"]
                if "*" in curr_config:
                    curr_config = curr_config["*"]
                    curr_needed_config = curr_needed_config["*"]
                    if "*" in curr_config:
                        curr_config["*"] += curr_needed_config["*"]
                    else:
                        curr_config["*"] = curr_needed_config["*"]

                    if "" in curr_config:
                        curr_config[""] += curr_needed_config[""]
                    else:
                        curr_config[""] = curr_needed_config[""]

                else:
                    curr_config["*"] = curr_needed_config["*"]

            else:
                curr_config["outputSelection"] = curr_needed_config[
                    "outputSelection"]
        else:
            config["compilerOptions"] = needed_config["compilerOptions"]

        if not waffle_ignore_compile:
            with tempfile.NamedTemporaryFile(mode="w",
                                             suffix=".json",
                                             dir=target) as file_desc:
                json.dump(config, file_desc)
                file_desc.flush()

                # cmd += [os.path.relpath(file_desc.name)]
                cmd += [Path(file_desc.name).name]

                LOGGER.info("Temporary file created: %s", file_desc.name)
                LOGGER.info("'%s running", " ".join(cmd))

                try:
                    with subprocess.Popen(
                            cmd,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            cwd=target,
                            executable=shutil.which(cmd[0]),
                    ) as process:
                        stdout, stderr = process.communicate()
                        if stdout:
                            LOGGER.info(stdout.decode())
                        if stderr:
                            LOGGER.error(stderr.decode())
                except OSError as error:
                    # pylint: disable=raise-missing-from
                    raise InvalidCompilation(error)

        if not os.path.isdir(os.path.join(target, build_directory)):
            raise InvalidCompilation(
                "`waffle` compilation failed: build directory not found")

        combined_path = os.path.join(target, build_directory,
                                     "Combined-Json.json")
        if not os.path.exists(combined_path):
            raise InvalidCompilation("`Combined-Json.json` not found")

        with open(combined_path, encoding="utf8") as f:
            target_all = json.load(f)

        optimized = None

        compilation_unit = CompilationUnit(crytic_compile, str(target))

        for contract in target_all["contracts"]:
            target_loaded = target_all["contracts"][contract]
            contract = contract.split(":")
            filename = convert_filename(contract[0],
                                        _relative_to_short,
                                        crytic_compile,
                                        working_dir=target)

            contract_name = contract[1]

            compilation_unit.asts[filename.absolute] = target_all["sources"][
                contract[0]]["AST"]
            crytic_compile.filenames.add(filename)
            compilation_unit.filenames.add(filename)
            compilation_unit.filename_to_contracts[filename].add(contract_name)
            compilation_unit.contracts_names.add(contract_name)
            compilation_unit.abis[contract_name] = target_loaded["abi"]

            userdoc = target_loaded.get("userdoc", {})
            devdoc = target_loaded.get("devdoc", {})
            natspec = Natspec(userdoc, devdoc)
            compilation_unit.natspec[contract_name] = natspec

            compilation_unit.bytecodes_init[contract_name] = target_loaded[
                "evm"]["bytecode"]["object"]
            compilation_unit.srcmaps_init[contract_name] = target_loaded[
                "evm"]["bytecode"]["sourceMap"].split(";")
            compilation_unit.bytecodes_runtime[contract_name] = target_loaded[
                "evm"]["deployedBytecode"]["object"]
            compilation_unit.srcmaps_runtime[contract_name] = target_loaded[
                "evm"]["deployedBytecode"]["sourceMap"].split(";")

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler, version=version, optimized=optimized)
Beispiel #15
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: Any) -> None:
        """Run the compilation

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile object
            **kwargs: optional arguments. Used "etherlime_ignore_compile", "ignore_compile"

        Raises:
            InvalidCompilation: if etherlime failed to run
        """

        etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile",
                                              False) or kwargs.get(
                                                  "ignore_compile", False)

        build_directory = "build"
        compile_arguments: Optional[str] = kwargs.get(
            "etherlime_compile_arguments", None)
        npx_disable: bool = kwargs.get("npx_disable", False)

        if not etherlime_ignore_compile:
            _run_etherlime(self._target, npx_disable, compile_arguments)

        # similar to truffle
        if not os.path.isdir(os.path.join(self._target, build_directory)):
            raise InvalidCompilation(
                "No truffle build directory found, did you run `truffle compile`?"
            )
        filenames = glob.glob(
            os.path.join(self._target, build_directory, "*.json"))

        version = None
        compiler = "solc-js"

        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        for file in filenames:
            with open(file, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)

                if version is None:
                    if "compiler" in target_loaded:
                        if "version" in target_loaded["compiler"]:
                            version = re.findall(
                                r"\d+\.\d+\.\d+",
                                target_loaded["compiler"]["version"])[0]

                if "ast" not in target_loaded:
                    continue

                filename_txt = target_loaded["ast"]["absolutePath"]
                filename = convert_filename(filename_txt, _relative_to_short,
                                            crytic_compile)
                compilation_unit.asts[filename.absolute] = target_loaded["ast"]
                compilation_unit.filenames.add(filename)
                crytic_compile.filenames.add(filename)
                contract_name = target_loaded["contractName"]
                compilation_unit.filename_to_contracts[filename].add(
                    contract_name)
                compilation_unit.contracts_names.add(contract_name)
                compilation_unit.abis[contract_name] = target_loaded["abi"]
                compilation_unit.bytecodes_init[contract_name] = target_loaded[
                    "bytecode"].replace("0x", "")
                compilation_unit.bytecodes_runtime[
                    contract_name] = target_loaded["deployedBytecode"].replace(
                        "0x", "")
                compilation_unit.srcmaps_init[contract_name] = target_loaded[
                    "sourceMap"].split(";")
                compilation_unit.srcmaps_runtime[
                    contract_name] = target_loaded["deployedSourceMap"].split(
                        ";")

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)
                compilation_unit.natspec[contract_name] = natspec

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler,
            version=version,
            optimized=_is_optimized(compile_arguments))
Beispiel #16
0
def load_from_compile(crytic_compile: "CryticCompile",
                      loaded_json: Dict) -> Tuple[int, List[str]]:
    """
    Load from json

    :param crytic_compile:
    :param loaded_json:
    :return:
    """
    crytic_compile.package_name = loaded_json.get("package", None)

    if "compilation_units" not in loaded_json:
        _load_from_compile_legacy(crytic_compile, loaded_json)

    else:
        for key, compilation_unit_json in loaded_json[
                "compilation_units"].items():
            compilation_unit = CompilationUnit(crytic_compile, key)
            compilation_unit.compiler_version = CompilerVersion(
                compiler=compilation_unit_json["compiler"]["compiler"],
                version=compilation_unit_json["compiler"]["version"],
                optimized=compilation_unit_json["compiler"]["optimized"],
            )
            for contract_name, contract in compilation_unit_json[
                    "contracts"].items():
                compilation_unit.contracts_names.add(contract_name)
                filename = Filename(
                    absolute=contract["filenames"]["absolute"],
                    relative=contract["filenames"]["relative"],
                    short=contract["filenames"]["short"],
                    used=contract["filenames"]["used"],
                )
                compilation_unit.contracts_filenames[contract_name] = filename

                compilation_unit.abis[contract_name] = contract["abi"]
                compilation_unit.bytecodes_init[contract_name] = contract[
                    "bin"]
                compilation_unit.bytecodes_runtime[contract_name] = contract[
                    "bin-runtime"]
                compilation_unit.srcmaps_init[contract_name] = contract[
                    "srcmap"].split(";")
                compilation_unit.srcmaps_runtime[contract_name] = contract[
                    "srcmap-runtime"].split(";")
                compilation_unit.libraries[contract_name] = contract[
                    "libraries"]

                userdoc = contract.get("userdoc", {})
                devdoc = contract.get("devdoc", {})
                compilation_unit.natspec[contract_name] = Natspec(
                    userdoc, devdoc)

                if contract["is_dependency"]:
                    crytic_compile.dependencies.add(filename.absolute)
                    crytic_compile.dependencies.add(filename.relative)
                    crytic_compile.dependencies.add(filename.short)
                    crytic_compile.dependencies.add(filename.used)
            compilation_unit.asts = compilation_unit_json["asts"]

    # Set our filenames
    for compilation_unit in crytic_compile.compilation_units.values():
        crytic_compile.filenames |= set(
            compilation_unit.contracts_filenames.values())

    crytic_compile.working_dir = loaded_json["working_dir"]

    return loaded_json["type"], loaded_json.get("unit_tests", [])
Beispiel #17
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Compile

        Args:
            crytic_compile (CryticCompile): CryticCompile object to populate
            **kwargs: optional arguments. Used: "foundry_ignore_compile", "foundry_out_directory"

        Raises:
            InvalidCompilation: If foundry failed to run
        """

        ignore_compile = kwargs.get("foundry_ignore_compile",
                                    False) or kwargs.get(
                                        "ignore_compile", False)

        out_directory = kwargs.get("foundry_out_directory", "out")

        if ignore_compile:
            LOGGER.info(
                "--ignore-compile used, if something goes wrong, consider removing the ignore compile flag"
            )

        if not ignore_compile:
            cmd = [
                "forge",
                "build",
                "--extra-output",
                "abi",
                "--extra-output",
                "userdoc",
                "--extra-output",
                "devdoc",
                "--extra-output",
                "evm.methodIdentifiers",
                "--force",
            ]

            LOGGER.info(
                "'%s' running",
                " ".join(cmd),
            )

            with subprocess.Popen(
                    cmd,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE,
                    cwd=self._target,
                    executable=shutil.which(cmd[0]),
            ) as process:

                stdout_bytes, stderr_bytes = process.communicate()
                stdout, stderr = (
                    stdout_bytes.decode(),
                    stderr_bytes.decode(),
                )  # convert bytestrings to unicode strings

                LOGGER.info(stdout)
                if stderr:
                    LOGGER.error(stderr)

        filenames = Path(self._target, out_directory).rglob("*.json")

        # foundry only support solc for now
        compiler = "solc"
        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        for filename_txt in filenames:
            with open(filename_txt, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)

                if not "ast" in target_loaded:
                    continue

                filename = target_loaded["ast"]["absolutePath"]

                try:
                    filename = convert_filename(filename,
                                                lambda x: x,
                                                crytic_compile,
                                                working_dir=self._target)
                except InvalidCompilation as i:
                    txt = str(i)
                    txt += "\nSomething went wrong, please open an issue in https://github.com/crytic/crytic-compile"
                    # pylint: disable=raise-missing-from
                    raise InvalidCompilation(txt)

                compilation_unit.asts[filename.absolute] = target_loaded["ast"]
                crytic_compile.filenames.add(filename)
                compilation_unit.filenames.add(filename)

                contract_name = filename_txt.parts[-1]
                contract_name = contract_name[:-len(".json")]

                compilation_unit.natspec[contract_name] = natspec
                compilation_unit.filename_to_contracts[filename].add(
                    contract_name)
                compilation_unit.contracts_names.add(contract_name)
                compilation_unit.abis[contract_name] = target_loaded["abi"]
                compilation_unit.bytecodes_init[contract_name] = target_loaded[
                    "bytecode"]["object"].replace("0x", "")
                compilation_unit.bytecodes_runtime[
                    contract_name] = target_loaded["deployedBytecode"][
                        "object"].replace("0x", "")
                compilation_unit.srcmaps_init[contract_name] = target_loaded[
                    "bytecode"]["sourceMap"].split(";")
                compilation_unit.srcmaps_runtime[
                    contract_name] = target_loaded["deployedBytecode"][
                        "sourceMap"].split(";")

        version, optimized, runs = _get_config_info(self._target)

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler,
            version=version,
            optimized=optimized,
            optimize_runs=runs)
Beispiel #18
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """

        Compile the tharget
        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """

        target = self._target

        solc = kwargs.get("solc", "solc")

        if target.startswith(tuple(SUPPORTED_NETWORK)):
            prefix: Union[None,
                          str] = SUPPORTED_NETWORK[target[:target.find(":") +
                                                          1]][0]
            prefix_bytecode = SUPPORTED_NETWORK[target[:target.find(":") +
                                                       1]][1]
            addr = target[target.find(":") + 1:]
            etherscan_url = ETHERSCAN_BASE % (prefix, addr)
            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (
                prefix_bytecode, addr)

        elif target.startswith(tuple(ALT_NETWORK)):
            temp_url = ALT_NETWORK[target[:target.find(":") + 1]][0]
            temp_bytecode_url = ALT_NETWORK[target[:target.find(":") + 1]][1]
            addr = target[target.find(":") + 1:]
            prefix = None
            etherscan_url = temp_url % (addr)
            etherscan_bytecode_url = temp_bytecode_url % (addr)

        else:
            etherscan_url = ETHERSCAN_BASE % ("", target)
            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("", target)
            addr = target
            prefix = None

        only_source = kwargs.get("etherscan_only_source_code", False)
        only_bytecode = kwargs.get("etherscan_only_bytecode", False)

        etherscan_api_key = kwargs.get("etherscan_api_key", None)

        export_dir = kwargs.get("export_dir", "crytic-export")
        export_dir = os.path.join(
            export_dir,
            kwargs.get("etherscan_export_dir", "etherscan-contracts"))

        if etherscan_api_key:
            etherscan_url += f"&apikey={etherscan_api_key}"
            etherscan_bytecode_url += f"&apikey={etherscan_api_key}"

        source_code: str = ""
        result: Dict[str, Union[bool, str, int]] = dict()
        contract_name: str = ""

        if not only_bytecode:
            with urllib.request.urlopen(etherscan_url) as response:
                html = response.read()

            info = json.loads(html)

            if "result" in info and info["result"] == "Max rate limit reached":
                LOGGER.error("Etherscan API rate limit exceeded")
                raise InvalidCompilation("Etherscan api rate limit exceeded")

            if "message" not in info:
                LOGGER.error("Incorrect etherscan request")
                raise InvalidCompilation("Incorrect etherscan request " +
                                         etherscan_url)

            if not info["message"].startswith("OK"):
                LOGGER.error("Contract has no public source code")
                raise InvalidCompilation(
                    "Contract has no public source code: " + etherscan_url)

            if "result" not in info:
                LOGGER.error("Contract has no public source code")
                raise InvalidCompilation(
                    "Contract has no public source code: " + etherscan_url)

            result = info["result"][0]
            # Assert to help mypy
            assert isinstance(result["SourceCode"], str)
            assert isinstance(result["ContractName"], str)
            source_code = result["SourceCode"]
            contract_name = result["ContractName"]

        if source_code == "" and not only_source:
            LOGGER.info(
                "Source code not available, try to fetch the bytecode only")

            req = urllib.request.Request(etherscan_bytecode_url,
                                         headers={"User-Agent": "Mozilla/5.0"})
            with urllib.request.urlopen(req) as response:
                html = response.read()

            _handle_bytecode(crytic_compile, target, html)
            return

        if source_code == "":
            LOGGER.error("Contract has no public source code")
            raise InvalidCompilation("Contract has no public source code: " +
                                     etherscan_url)

        if not os.path.exists(export_dir):
            os.makedirs(export_dir)

        # Assert to help mypy
        assert isinstance(result["CompilerVersion"], str)

        compiler_version = re.findall(
            r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[0]

        optimization_used: bool = result["OptimizationUsed"] == "1"

        solc_arguments = None
        if optimization_used:
            optimized_run = int(result["Runs"])
            solc_arguments = f"--optimize --optimize-runs {optimized_run}"

        working_dir = None
        try:
            # etherscan might return an object with two curly braces, {{ content }}
            dict_source_code = json.loads(source_code[1:-1])
            filename, working_dir = _handle_multiple_files(
                dict_source_code, addr, prefix, contract_name, export_dir)
        except JSONDecodeError:
            try:
                # or etherscan might return an object with single curly braces, { content }
                dict_source_code = json.loads(source_code)
                filename, working_dir = _handle_multiple_files(
                    dict_source_code, addr, prefix, contract_name, export_dir)
            except JSONDecodeError:
                filename = _handle_single_file(source_code, addr, prefix,
                                               contract_name, export_dir)

        compilation_unit = CompilationUnit(crytic_compile, str(filename))

        targets_json = _run_solc(
            compilation_unit,
            filename,
            solc=solc,
            solc_disable_warnings=False,
            solc_arguments=solc_arguments,
            env=dict(os.environ, SOLC_VERSION=compiler_version),
            working_dir=working_dir,
        )

        compilation_unit.compiler_version = CompilerVersion(
            compiler="solc",
            version=compiler_version,
            optimized=optimization_used)

        solc_handle_contracts(targets_json, False, compilation_unit, "",
                              working_dir)

        for path, info in targets_json["sources"].items():
            path = convert_filename(path,
                                    _relative_to_short,
                                    crytic_compile,
                                    working_dir=working_dir)
            crytic_compile.filenames.add(path)
            compilation_unit.asts[path.absolute] = info["AST"]
Beispiel #19
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Run the compilation

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile objects
            **kwargs: optional arguments. Used: "buidler_cache_directory", "buidler_ignore_compile", "ignore_compile",
                "buidler_working_dir", "buidler_skip_directory_name_fix", "npx_disable"

        Raises:
            InvalidCompilation: If buidler failed to run
        """

        cache_directory = kwargs.get("buidler_cache_directory", "")
        target_solc_file = os.path.join(cache_directory, "solc-output.json")
        target_vyper_file = os.path.join(cache_directory,
                                         "vyper-docker-updates.json")
        buidler_ignore_compile = kwargs.get("buidler_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)
        buidler_working_dir = kwargs.get("buidler_working_dir", None)
        # See https://github.com/crytic/crytic-compile/issues/116
        skip_directory_name_fix = kwargs.get("buidler_skip_directory_name_fix",
                                             False)

        base_cmd = ["buidler"]
        if not kwargs.get("npx_disable", False):
            base_cmd = ["npx"] + base_cmd

        if not buidler_ignore_compile:
            cmd = base_cmd + ["compile"]

            LOGGER.info(
                "'%s' running",
                " ".join(cmd),
            )

            with subprocess.Popen(
                    cmd,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE,
                    cwd=self._target,
                    executable=shutil.which(cmd[0]),
            ) as process:

                stdout_bytes, stderr_bytes = process.communicate()
                stdout, stderr = (
                    stdout_bytes.decode(),
                    stderr_bytes.decode(),
                )  # convert bytestrings to unicode strings

                LOGGER.info(stdout)
                if stderr:
                    LOGGER.error(stderr)

        if not os.path.isfile(os.path.join(self._target, target_solc_file)):
            if os.path.isfile(os.path.join(self._target, target_vyper_file)):
                txt = "Vyper not yet supported with buidler."
                txt += " Please open an issue in https://github.com/crytic/crytic-compile"
                raise InvalidCompilation(txt)
            txt = f"`buidler compile` failed. Can you run it?\n{os.path.join(self._target, target_solc_file)} not found"
            raise InvalidCompilation(txt)

        compilation_unit = CompilationUnit(crytic_compile,
                                           str(target_solc_file))

        (compiler, version_from_config,
         optimized) = _get_version_from_config(Path(cache_directory))

        compilation_unit.compiler_version = CompilerVersion(
            compiler=compiler,
            version=version_from_config,
            optimized=optimized)

        skip_filename = compilation_unit.compiler_version.version in [
            f"0.4.{x}" for x in range(0, 10)
        ]

        with open(target_solc_file, encoding="utf8") as file_desc:
            targets_json = json.load(file_desc)

            if "contracts" in targets_json:
                for original_filename, contracts_info in targets_json[
                        "contracts"].items():
                    for original_contract_name, info in contracts_info.items():
                        contract_name = extract_name(original_contract_name)

                        if (original_filename.startswith("ontracts/")
                                and not skip_directory_name_fix):
                            original_filename = "c" + original_filename

                        contract_filename = convert_filename(
                            original_filename,
                            relative_to_short,
                            crytic_compile,
                            working_dir=buidler_working_dir,
                        )

                        compilation_unit.contracts_names.add(contract_name)
                        compilation_unit.filename_to_contracts[
                            contract_filename].add(contract_name)

                        compilation_unit.abis[contract_name] = info["abi"]
                        compilation_unit.bytecodes_init[contract_name] = info[
                            "evm"]["bytecode"]["object"]
                        compilation_unit.bytecodes_runtime[
                            contract_name] = info["evm"]["deployedBytecode"][
                                "object"]
                        compilation_unit.srcmaps_init[contract_name] = info[
                            "evm"]["bytecode"]["sourceMap"].split(";")
                        compilation_unit.srcmaps_runtime[contract_name] = info[
                            "evm"]["deployedBytecode"]["sourceMap"].split(";")
                        userdoc = info.get("userdoc", {})
                        devdoc = info.get("devdoc", {})
                        natspec = Natspec(userdoc, devdoc)
                        compilation_unit.natspec[contract_name] = natspec

            if "sources" in targets_json:
                for path, info in targets_json["sources"].items():

                    if path.startswith(
                            "ontracts/") and not skip_directory_name_fix:
                        path = "c" + path

                    if skip_filename:
                        path = convert_filename(
                            self._target,
                            relative_to_short,
                            crytic_compile,
                            working_dir=buidler_working_dir,
                        )
                    else:
                        path = convert_filename(
                            path,
                            relative_to_short,
                            crytic_compile,
                            working_dir=buidler_working_dir)
                    compilation_unit.filenames.add(path)
                    crytic_compile.filenames.add(path)
                    compilation_unit.asts[path.absolute] = info["ast"]