def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ embark_ignore_compile = kwargs.get("embark_ignore_compile", False) or kwargs.get( "ignore_compile", False ) embark_overwrite_config = kwargs.get("embark_overwrite_config", False) plugin_name = "@trailofbits/embark-contract-info" with open(os.path.join(self._target, "embark.json"), encoding="utf8") as file_desc: embark_json = json.load(file_desc) if embark_overwrite_config: write_embark_json = False if not "plugins" in embark_json: embark_json["plugins"] = {plugin_name: {"flags": ""}} write_embark_json = True elif not plugin_name in embark_json["plugins"]: embark_json["plugins"][plugin_name] = {"flags": ""} write_embark_json = True if write_embark_json: try: process = subprocess.Popen(["npm", "install", plugin_name], cwd=self._target) except OSError as error: raise InvalidCompilation(error) _, stderr = process.communicate() with open( os.path.join(self._target, "embark.json"), "w", encoding="utf8" ) as outfile: json.dump(embark_json, outfile, indent=2) else: if (not "plugins" in embark_json) or (not plugin_name in embark_json["plugins"]): raise InvalidCompilation( "embark-contract-info plugin was found in embark.json. " "Please install the plugin (see " "https://github.com/crytic/crytic-compile/wiki/Usage#embark)" ", or use --embark-overwrite-config." ) if not embark_ignore_compile: try: cmd = ["embark", "build", "--contracts"] if not kwargs.get("npx_disable", False): cmd = ["npx"] + cmd process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target) except OSError as error: raise InvalidCompilation(error) stdout, stderr = process.communicate() LOGGER.info("%s\n", stdout.decode()) if stderr: # Embark might return information to stderr, but compile without issue LOGGER.error("%s", stderr.decode()) infile = os.path.join(self._target, "crytic-export", "contracts-embark.json") if not os.path.isfile(infile): raise InvalidCompilation( "Embark did not generate the AST file. Is Embark installed " "(npm install -g embark)? Is embark-contract-info installed? (npm install -g embark)." ) crytic_compile.compiler_version = _get_version(self._target) with open(infile, "r", encoding="utf8") as file_desc: targets_loaded = json.load(file_desc) for k, ast in targets_loaded["asts"].items(): filename = convert_filename( k, _relative_to_short, crytic_compile, working_dir=self._target ) crytic_compile.asts[filename.absolute] = ast crytic_compile.filenames.add(filename) if not "contracts" in targets_loaded: LOGGER.error( "Incorrect json file generated. Are you using %s >= 1.1.0?", plugin_name ) raise InvalidCompilation( f"Incorrect json file generated. Are you using {plugin_name} >= 1.1.0?" ) for original_contract_name, info in targets_loaded["contracts"].items(): contract_name = extract_name(original_contract_name) contract_filename = extract_filename(original_contract_name) contract_filename = convert_filename( contract_filename, _relative_to_short, crytic_compile, working_dir=self._target ) crytic_compile.contracts_filenames[contract_name] = contract_filename crytic_compile.contracts_names.add(contract_name) if "abi" in info: crytic_compile.abis[contract_name] = info["abi"] if "bin" in info: crytic_compile.bytecodes_init[contract_name] = info["bin"].replace("0x", "") if "bin-runtime" in info: crytic_compile.bytecodes_runtime[contract_name] = info["bin-runtime"].replace( "0x", "" ) if "srcmap" in info: crytic_compile.srcmaps_init[contract_name] = info["srcmap"].split(";") if "srcmap-runtime" in info: crytic_compile.srcmaps_runtime[contract_name] = info["srcmap-runtime"].split( ";" ) userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile", False) or kwargs.get( "ignore_compile", False) build_directory = "build" compile_arguments = kwargs.get("etherlime_compile_arguments", None) if not etherlime_ignore_compile: cmd = [ "etherlime", "compile", self._target, "deleteCompiledFiles=true" ] if not kwargs.get("npx_disable", False): cmd = ["npx"] + cmd if compile_arguments: cmd += compile_arguments.split(" ") try: process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target) except OSError as error: # pylint: disable=raise-missing-from raise InvalidCompilation(error) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) # similar to truffle if not os.path.isdir(os.path.join(self._target, build_directory)): raise InvalidCompilation( "No truffle build directory found, did you run `truffle compile`?" ) filenames = glob.glob( os.path.join(self._target, build_directory, "*.json")) version = None compiler = "solc-js" for file in filenames: with open(file, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) if version is None: if "compiler" in target_loaded: if "version" in target_loaded["compiler"]: version = re.findall( r"\d+\.\d+\.\d+", target_loaded["compiler"]["version"])[0] if not "ast" in target_loaded: continue filename_txt = target_loaded["ast"]["absolutePath"] filename = convert_filename(filename_txt, _relative_to_short, crytic_compile) crytic_compile.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] crytic_compile.bytecodes_init[contract_name] = target_loaded[ "bytecode"].replace("0x", "") crytic_compile.bytecodes_runtime[ contract_name] = target_loaded["deployedBytecode"].replace( "0x", "") crytic_compile.srcmaps_init[contract_name] = target_loaded[ "sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = target_loaded[ "deployedSourceMap"].split(";") userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=_is_optimized(compile_arguments))
def compile( crytic_compile: "CryticCompile", target: Union[str, dict, "SolcStandardJson"], **kwargs: str ): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ crytic_compile.type = Type.SOLC_STANDARD_JSON solc = kwargs.get("solc", "solc") solc_disable_warnings = kwargs.get("solc_disable_warnings", False) solc_arguments = kwargs.get("solc_args", "") solc_remaps: Optional[Union[str, List[str]]] = kwargs.get("solc_remaps", None) solc_working_dir = kwargs.get("solc_working_dir", None) crytic_compile.compiler_version = CompilerVersion( compiler="solc", version=get_version(solc), optimized=_is_optimized(solc_arguments) ) skip_filename = crytic_compile.compiler_version.version in [f"0.4.{x}" for x in range(0, 10)] # Initialize our solc input target = SolcStandardJson(target) # Add all remappings if solc_remaps: if isinstance(solc_remaps, str): solc_remaps = solc_remaps.split(" ") for solc_remap in solc_remaps: target.add_remapping(solc_remap) # Invoke solc targets_json = _run_solc_standard_json( target.to_dict(), solc, solc_disable_warnings=solc_disable_warnings ) if "contracts" in targets_json: for file_path, file_contracts in targets_json["contracts"].items(): for contract_name, info in file_contracts.items(): # for solc < 0.4.10 we cant retrieve the filename from the ast if skip_filename: # TODO investigate the mypy type issue contract_filename = convert_filename( target, _relative_to_short, crytic_compile, working_dir=solc_working_dir ) else: contract_filename = convert_filename( file_path, _relative_to_short, crytic_compile, working_dir=solc_working_dir ) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[contract_name] = contract_filename crytic_compile.abis[contract_name] = info["abi"] crytic_compile.bytecodes_init[contract_name] = info["evm"]["bytecode"]["object"] crytic_compile.bytecodes_runtime[contract_name] = info["evm"]["deployedBytecode"][ "object" ] crytic_compile.srcmaps_init[contract_name] = info["evm"]["bytecode"][ "sourceMap" ].split(";") crytic_compile.srcmaps_runtime[contract_name] = info["evm"]["deployedBytecode"][ "sourceMap" ].split(";") if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( target, _relative_to_short, crytic_compile, working_dir=solc_working_dir ) else: path = convert_filename( path, _relative_to_short, crytic_compile, working_dir=solc_working_dir ) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["ast"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param kwargs: :return: """ build_directory = kwargs.get("truffle_build_directory", os.path.join("build", "contracts")) truffle_ignore_compile = kwargs.get("truffle_ignore_compile", False) or kwargs.get( "ignore_compile", False) truffle_version = kwargs.get("truffle_version", None) # crytic_compile.type = Type.TRUFFLE # Truffle on windows has naming conflicts where it will invoke truffle.js directly instead # of truffle.cmd (unless in powershell or git bash). # The cleanest solution is to explicitly call # truffle.cmd. Reference: # https://truffleframework.com/docs/truffle/reference/configuration#resolving-naming-conflicts-on-windows truffle_overwrite_config = kwargs.get("truffle_overwrite_config", False) if platform.system() == "Windows": base_cmd = ["truffle.cmd"] elif kwargs.get("npx_disable", False): base_cmd = ["truffle"] else: base_cmd = ["npx", "truffle"] if truffle_version: if truffle_version.startswith("truffle"): base_cmd = ["npx", truffle_version] else: base_cmd = ["npx", f"truffle@{truffle_version}"] elif os.path.isfile(os.path.join(self._target, "package.json")): with open(os.path.join(self._target, "package.json"), encoding="utf8") as file_desc: package = json.load(file_desc) if "devDependencies" in package: if "truffle" in package["devDependencies"]: version = package["devDependencies"]["truffle"] if version.startswith("^"): version = version[1:] truffle_version = "truffle@{}".format(version) base_cmd = ["npx", truffle_version] if "dependencies" in package: if "truffle" in package["dependencies"]: version = package["dependencies"]["truffle"] if version.startswith("^"): version = version[1:] truffle_version = "truffle@{}".format(version) base_cmd = ["npx", truffle_version] if not truffle_ignore_compile: cmd = base_cmd + ["compile", "--all"] LOGGER.info( "'%s' running (use --truffle-version [email protected] to use specific version)", " ".join(cmd), ) config_used = None config_saved = None if truffle_overwrite_config: overwritten_version = kwargs.get("truffle_overwrite_version", None) # If the version is not provided, we try to guess it with the config file if overwritten_version is None: version_from_config = _get_version_from_config( self._target) if version_from_config: overwritten_version, _ = version_from_config # Save the config file, and write our temporary config config_used, config_saved = _save_config(Path(self._target)) if config_used is None: config_used = Path("truffle-config.js") _write_config(Path(self._target), config_used, overwritten_version) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings if truffle_overwrite_config: _reload_config(Path(self._target), config_saved, config_used) LOGGER.info(stdout) if stderr: LOGGER.error(stderr) if not os.path.isdir(os.path.join(self._target, build_directory)): if os.path.isdir(os.path.join(self._target, "node_modules")): raise InvalidCompilation( f"External dependencies {build_directory} {self._target} not found, please install them. (npm install)" ) raise InvalidCompilation( "`truffle compile` failed. Can you run it?") filenames = glob.glob( os.path.join(self._target, build_directory, "*.json")) optimized = None version = None compiler = None for filename_txt in filenames: with open(filename_txt, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) # pylint: disable=too-many-nested-blocks if optimized is None: if "metadata" in target_loaded: metadata = target_loaded["metadata"] try: metadata = json.loads(metadata) if "settings" in metadata: if "optimizer" in metadata["settings"]: if "enabled" in metadata["settings"][ "optimizer"]: optimized = metadata["settings"][ "optimizer"]["enabled"] except json.decoder.JSONDecodeError: pass userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) if not "ast" in target_loaded: continue filename = target_loaded["ast"]["absolutePath"] try: filename = convert_filename(filename, _relative_to_short, crytic_compile, working_dir=self._target) except InvalidCompilation as i: txt = str(i) txt += "\nConsider removing the build/contracts content (rm build/contracts/*)" # pylint: disable=raise-missing-from raise InvalidCompilation(txt) crytic_compile.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] crytic_compile.natspec[contract_name] = natspec crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] crytic_compile.bytecodes_init[contract_name] = target_loaded[ "bytecode"].replace("0x", "") crytic_compile.bytecodes_runtime[ contract_name] = target_loaded["deployedBytecode"].replace( "0x", "") crytic_compile.srcmaps_init[contract_name] = target_loaded[ "sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = target_loaded[ "deployedSourceMap"].split(";") if compiler is None: compiler = target_loaded.get("compiler", {}).get("name", None) if version is None: version = target_loaded.get("compiler", {}).get("version", None) if "+" in version: version = version[0:version.find("+")] if version is None or compiler is None: version_from_config = _get_version_from_config(self._target) if version_from_config: version, compiler = version_from_config else: version, compiler = _get_version(base_cmd, cwd=self._target) crytic_compile.compiler_version = CompilerVersion(compiler=compiler, version=version, optimized=optimized)
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param kwargs: :return: """ cache_directory = kwargs.get("buidler_cache_directory", "") target_solc_file = os.path.join(cache_directory, "solc-output.json") target_vyper_file = os.path.join(cache_directory, "vyper-docker-updates.json") buidler_ignore_compile = kwargs.get("buidler_ignore_compile", False) or kwargs.get( "buidler_compile", False) buidler_working_dir = kwargs.get("buidler_working_dir", None) base_cmd = ["buidler"] if not kwargs.get("npx_disable", False): base_cmd = ["npx"] + base_cmd if not buidler_ignore_compile: cmd = base_cmd + ["compile"] LOGGER.info( "'%s' running", " ".join(cmd), ) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) if not os.path.isfile(os.path.join(self._target, target_solc_file)): if os.path.isfile(os.path.join(self._target, target_vyper_file)): txt = 'Vyper not yet supported with buidler.' txt += ' Please open an issue in https://github.com/crytic/crytic-compile' raise InvalidCompilation(txt) txt = f"`buidler compile` failed. Can you run it?\n{os.path.join(self._target, target_solc_file)} not found" raise InvalidCompilation(txt) (compiler, version_from_config, optimized) = _get_version_from_config(cache_directory) crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version_from_config, optimized=optimized) skip_filename = crytic_compile.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] with open(target_solc_file, encoding="utf8") as file_desc: targets_json = json.load(file_desc) if "contracts" in targets_json: for original_filename, contracts_info in targets_json[ "contracts"].items(): for original_contract_name, info in contracts_info.items(): contract_name = extract_name(original_contract_name) contract_filename = convert_filename( original_filename, relative_to_short, crytic_compile, working_dir=buidler_working_dir, ) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[ contract_name] = contract_filename crytic_compile.abis[contract_name] = info["abi"] crytic_compile.bytecodes_init[contract_name] = info[ "evm"]["bytecode"]["object"] crytic_compile.bytecodes_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["object"] crytic_compile.srcmaps_init[contract_name] = info[ "evm"]["bytecode"]["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = info[ "evm"]["bytecode"]["sourceMap"].split(";") userdoc = json.loads(info.get("userdoc", "{}")) devdoc = json.loads(info.get("devdoc", "{}")) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=buidler_working_dir, ) else: path = convert_filename( path, relative_to_short, crytic_compile, working_dir=buidler_working_dir) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["ast"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: """Compile Args: crytic_compile (CryticCompile): CryticCompile object to populate **kwargs: optional arguments. Used: "foundry_ignore_compile", "foundry_out_directory" Raises: InvalidCompilation: If foundry failed to run """ ignore_compile = kwargs.get("foundry_ignore_compile", False) or kwargs.get( "ignore_compile", False) out_directory = kwargs.get("foundry_out_directory", "out") if ignore_compile: LOGGER.info( "--ignore-compile used, if something goes wrong, consider removing the ignore compile flag" ) if not ignore_compile: cmd = [ "forge", "build", "--extra-output", "abi", "--extra-output", "userdoc", "--extra-output", "devdoc", "--extra-output", "evm.methodIdentifiers", "--force", ] LOGGER.info( "'%s' running", " ".join(cmd), ) with subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target, executable=shutil.which(cmd[0]), ) as process: stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) filenames = Path(self._target, out_directory).rglob("*.json") # foundry only support solc for now compiler = "solc" compilation_unit = CompilationUnit(crytic_compile, str(self._target)) for filename_txt in filenames: with open(filename_txt, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) if not "ast" in target_loaded: continue filename = target_loaded["ast"]["absolutePath"] try: filename = convert_filename(filename, lambda x: x, crytic_compile, working_dir=self._target) except InvalidCompilation as i: txt = str(i) txt += "\nSomething went wrong, please open an issue in https://github.com/crytic/crytic-compile" # pylint: disable=raise-missing-from raise InvalidCompilation(txt) compilation_unit.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) compilation_unit.filenames.add(filename) contract_name = filename_txt.parts[-1] contract_name = contract_name[:-len(".json")] compilation_unit.natspec[contract_name] = natspec compilation_unit.filename_to_contracts[filename].add( contract_name) compilation_unit.contracts_names.add(contract_name) compilation_unit.abis[contract_name] = target_loaded["abi"] compilation_unit.bytecodes_init[contract_name] = target_loaded[ "bytecode"]["object"].replace("0x", "") compilation_unit.bytecodes_runtime[ contract_name] = target_loaded["deployedBytecode"][ "object"].replace("0x", "") compilation_unit.srcmaps_init[contract_name] = target_loaded[ "bytecode"]["sourceMap"].split(";") compilation_unit.srcmaps_runtime[ contract_name] = target_loaded["deployedBytecode"][ "sourceMap"].split(";") version, optimized, runs = _get_config_info(self._target) compilation_unit.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=optimized, optimize_runs=runs)
def compile(crytic_compile: "CryticCompile", target: str, **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile", False) crytic_compile.type = Type.ETHERLIME build_directory = "build" compile_arguments = kwargs.get("etherlime_compile_arguments", None) if not etherlime_ignore_compile: cmd = ["etherlime", "compile", target] if compile_arguments: cmd += compile_arguments.split(" ") process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) # similar to truffle if not os.path.isdir(os.path.join(target, build_directory)): raise InvalidCompilation("No truffle build directory found, did you run `truffle compile`?") filenames = glob.glob(os.path.join(target, build_directory, "*.json")) version = None compiler = "solc-js" for file in filenames: with open(file, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) if version is None: if "compiler" in target_loaded: if "version" in target_loaded["compiler"]: version = re.findall( r"\d+\.\d+\.\d+", target_loaded["compiler"]["version"] )[0] if not "ast" in target_loaded: continue filename_txt = target_loaded["ast"]["absolutePath"] filename = convert_filename(filename_txt, _relative_to_short, crytic_compile) crytic_compile.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] crytic_compile.bytecodes_init[contract_name] = target_loaded["bytecode"].replace( "0x", "" ) crytic_compile.bytecodes_runtime[contract_name] = target_loaded[ "deployedBytecode" ].replace("0x", "") crytic_compile.srcmaps_init[contract_name] = target_loaded["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = target_loaded[ "deployedSourceMap" ].split(";") crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=_is_optimized(compile_arguments) )
def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: """Run the compilation Args: crytic_compile (CryticCompile): Associated CryticCompile object **kwargs: optional arguments. Used: "hardhat_ignore", "hardhat_ignore_compile", "ignore_compile", "hardhat_artifacts_directory","hardhat_working_dir","npx_disable" Raises: InvalidCompilation: If hardhat failed to run """ hardhat_ignore_compile = kwargs.get("hardhat_ignore_compile", False) or kwargs.get( "ignore_compile", False ) build_directory = Path( self._target, kwargs.get("hardhat_artifacts_directory", "artifacts"), "build-info" ) hardhat_working_dir = kwargs.get("hardhat_working_dir", self._target) base_cmd = ["hardhat"] if not kwargs.get("npx_disable", False): base_cmd = ["npx"] + base_cmd if not hardhat_ignore_compile: cmd = base_cmd + ["compile", "--force"] LOGGER.info( "'%s' running", " ".join(cmd), ) with subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target, executable=shutil.which(cmd[0]), ) as process: stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) files = sorted( os.listdir(build_directory), key=lambda x: os.path.getmtime(Path(build_directory, x)) ) files = [f for f in files if f.endswith(".json")] if not files: txt = f"`hardhat compile` failed. Can you run it?\n{build_directory} is empty" raise InvalidCompilation(txt) for file in files: build_info = Path(build_directory, file) # The file here should always ends .json, but just in case use ife uniq_id = file if ".json" not in file else file[0:-5] compilation_unit = CompilationUnit(crytic_compile, uniq_id) with open(build_info, encoding="utf8") as file_desc: loaded_json = json.load(file_desc) targets_json = loaded_json["output"] version_from_config = loaded_json["solcVersion"] # TODO supper vyper input_json = loaded_json["input"] compiler = "solc" if input_json["language"] == "Solidity" else "vyper" optimized = input_json["settings"]["optimizer"]["enabled"] compilation_unit.compiler_version = CompilerVersion( compiler=compiler, version=version_from_config, optimized=optimized ) skip_filename = compilation_unit.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] if "contracts" in targets_json: for original_filename, contracts_info in targets_json["contracts"].items(): for original_contract_name, info in contracts_info.items(): contract_name = extract_name(original_contract_name) contract_filename = convert_filename( original_filename, relative_to_short, crytic_compile, working_dir=hardhat_working_dir, ) compilation_unit.contracts_names.add(contract_name) compilation_unit.filename_to_contracts[contract_filename].add( contract_name ) compilation_unit.abis[contract_name] = info["abi"] compilation_unit.bytecodes_init[contract_name] = info["evm"][ "bytecode" ]["object"] compilation_unit.bytecodes_runtime[contract_name] = info["evm"][ "deployedBytecode" ]["object"] compilation_unit.srcmaps_init[contract_name] = info["evm"]["bytecode"][ "sourceMap" ].split(";") compilation_unit.srcmaps_runtime[contract_name] = info["evm"][ "deployedBytecode" ]["sourceMap"].split(";") userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) compilation_unit.natspec[contract_name] = natspec if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=hardhat_working_dir, ) else: path = convert_filename( path, relative_to_short, crytic_compile, working_dir=hardhat_working_dir, ) crytic_compile.filenames.add(path) compilation_unit.filenames.add(path) compilation_unit.asts[path.absolute] = info["ast"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: Any) -> None: """Run the compilation Args: crytic_compile (CryticCompile): Associated CryticCompile object **kwargs: optional arguments. Used "etherlime_ignore_compile", "ignore_compile" Raises: InvalidCompilation: if etherlime failed to run """ etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile", False) or kwargs.get( "ignore_compile", False) build_directory = "build" compile_arguments: Optional[str] = kwargs.get( "etherlime_compile_arguments", None) npx_disable: bool = kwargs.get("npx_disable", False) if not etherlime_ignore_compile: _run_etherlime(self._target, npx_disable, compile_arguments) # similar to truffle if not os.path.isdir(os.path.join(self._target, build_directory)): raise InvalidCompilation( "No truffle build directory found, did you run `truffle compile`?" ) filenames = glob.glob( os.path.join(self._target, build_directory, "*.json")) version = None compiler = "solc-js" compilation_unit = CompilationUnit(crytic_compile, str(self._target)) for file in filenames: with open(file, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) if version is None: if "compiler" in target_loaded: if "version" in target_loaded["compiler"]: version = re.findall( r"\d+\.\d+\.\d+", target_loaded["compiler"]["version"])[0] if "ast" not in target_loaded: continue filename_txt = target_loaded["ast"]["absolutePath"] filename = convert_filename(filename_txt, _relative_to_short, crytic_compile) compilation_unit.asts[filename.absolute] = target_loaded["ast"] compilation_unit.filenames.add(filename) crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] compilation_unit.filename_to_contracts[filename].add( contract_name) compilation_unit.contracts_names.add(contract_name) compilation_unit.abis[contract_name] = target_loaded["abi"] compilation_unit.bytecodes_init[contract_name] = target_loaded[ "bytecode"].replace("0x", "") compilation_unit.bytecodes_runtime[ contract_name] = target_loaded["deployedBytecode"].replace( "0x", "") compilation_unit.srcmaps_init[contract_name] = target_loaded[ "sourceMap"].split(";") compilation_unit.srcmaps_runtime[ contract_name] = target_loaded["deployedSourceMap"].split( ";") userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) compilation_unit.natspec[contract_name] = natspec compilation_unit.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=_is_optimized(compile_arguments))
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param kwargs: :return: """ hardhat_ignore_compile = kwargs.get("hardhat_ignore_compile", False) or kwargs.get( "ignore_compile", False) cache_directory = kwargs.get("hardhat_cache_directory", "cache") config_file = Path(cache_directory, "solidity-files-cache.json") build_directory = Path( kwargs.get("hardhat_artifacts_directory", "artifacts"), "build-info") hardhat_working_dir = kwargs.get("hardhat_working_dir", None) base_cmd = ["hardhat"] if not kwargs.get("npx_disable", False): base_cmd = ["npx"] + base_cmd if not hardhat_ignore_compile: cmd = base_cmd + ["compile"] LOGGER.info( "'%s' running", " ".join(cmd), ) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) (compiler, version_from_config, optimized) = _get_version_from_config(config_file) crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version_from_config, optimized=optimized) skip_filename = crytic_compile.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] files = sorted( os.listdir(build_directory), key=lambda x: os.path.getmtime(Path(build_directory, x))) if not files: txt = f"`hardhat compile` failed. Can you run it?\n{build_directory} is empty" raise InvalidCompilation(txt) build_info = Path(build_directory, files[0]) with open(build_info, encoding="utf8") as file_desc: targets_json = json.load(file_desc)["output"] if "contracts" in targets_json: for original_filename, contracts_info in targets_json[ "contracts"].items(): for original_contract_name, info in contracts_info.items(): contract_name = extract_name(original_contract_name) contract_filename = convert_filename( original_filename, relative_to_short, crytic_compile, working_dir=hardhat_working_dir, ) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[ contract_name] = contract_filename crytic_compile.abis[contract_name] = info["abi"] crytic_compile.bytecodes_init[contract_name] = info[ "evm"]["bytecode"]["object"] crytic_compile.bytecodes_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["object"] crytic_compile.srcmaps_init[contract_name] = info[ "evm"]["bytecode"]["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["sourceMap"].split(";") userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=hardhat_working_dir, ) else: path = convert_filename( path, relative_to_short, crytic_compile, working_dir=hardhat_working_dir) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["ast"]
def compile(crytic_compile: "CryticCompile", target: str, **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ waffle_ignore_compile = kwargs.get("waffle_ignore_compile", False) crytic_compile.type = Type.WAFFLE cmd = ["waffle"] if not kwargs.get("npx_disable", False): cmd = ["npx"] + cmd # Default behaviour (without any config_file) build_directory = os.path.join("build") compiler = "native" version = _get_version(compiler, target) config = dict() config_file = kwargs.get("waffle_config_file", None) # Read config file if config_file: config = _load_config(config_file) version = _get_version(compiler, target, config=config) if "targetPath" in config: build_directory = config["targetPath"] if "compiler" in config: compiler = config["compiler"] if "outputType" not in config or config["outputType"] != "all": config["outputType"] = "all" needed_config = { "compilerOptions": { "outputSelection": { "*": { "*": [ "evm.bytecode.object", "evm.deployedBytecode.object", "abi", "evm.bytecode.sourceMap", "evm.deployedBytecode.sourceMap", ], "": ["ast"], } } } } # Set the config as it should be if "compilerOptions" in config: curr_config = config["compilerOptions"] curr_needed_config = needed_config["compilerOptions"] if "outputSelection" in curr_config: curr_config = curr_config["outputSelection"] curr_needed_config = curr_needed_config["outputSelection"] if "*" in curr_config: curr_config = curr_config["*"] curr_needed_config = curr_needed_config["*"] if "*" in curr_config: curr_config["*"] += curr_needed_config["*"] else: curr_config["*"] = curr_needed_config["*"] if "" in curr_config: curr_config[""] += curr_needed_config[""] else: curr_config[""] = curr_needed_config[""] else: curr_config["*"] = curr_needed_config["*"] else: curr_config["outputSelection"] = curr_needed_config[ "outputSelection"] else: config["compilerOptions"] = needed_config["compilerOptions"] if not waffle_ignore_compile: with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as file_desc: json.dump(config, file_desc) file_desc.flush() cmd += [os.path.relpath(file_desc.name)] LOGGER.info("Temporary file created: %s", file_desc.name) LOGGER.info("'%s running", " ".join(cmd)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target) stdout, stderr = process.communicate() stdout, stderr = ( stdout.decode(), stderr.decode(), ) # convert bytestrings to unicode strings if stdout: LOGGER.info(stdout) if stderr: LOGGER.error(stderr) if not os.path.isdir(os.path.join(target, build_directory)): raise InvalidCompilation( "`waffle` compilation failed: build directory not found") combined_path = os.path.join(target, build_directory, "Combined-Json.json") if not os.path.exists(combined_path): raise InvalidCompilation("`Combined-Json.json` not found") with open(combined_path, "r") as file_desc: target_all = json.load(file_desc) optimized = None for contract in target_all["contracts"]: target_loaded = target_all["contracts"][contract] contract = contract.split(":") filename_rel = os.path.join(target, contract[0]) filename = convert_filename(filename_rel, _relative_to_short, crytic_compile, working_dir=target) contract_name = contract[1] crytic_compile.asts[filename.absolute] = target_all["sources"][ contract[0]]["AST"] crytic_compile.filenames.add(filename) crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] crytic_compile.bytecodes_init[contract_name] = target_loaded["evm"][ "bytecode"]["object"] crytic_compile.srcmaps_init[contract_name] = target_loaded["evm"][ "bytecode"]["sourceMap"].split(";") crytic_compile.bytecodes_runtime[contract_name] = target_loaded["evm"][ "deployedBytecode"]["object"] crytic_compile.srcmaps_runtime[contract_name] = target_loaded["evm"][ "deployedBytecode"]["sourceMap"].split(";") crytic_compile.compiler_version = CompilerVersion(compiler=compiler, version=version, optimized=optimized)
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the tharget :param crytic_compile: :param target: :param kwargs: :return: """ target = self._target solc = kwargs.get("solc", "solc") if target.startswith(tuple(SUPPORTED_NETWORK)): prefix: Union[None, str] = SUPPORTED_NETWORK[target[:target.find(":") + 1]][0] prefix_bytecode = SUPPORTED_NETWORK[target[:target.find(":") + 1]][1] addr = target[target.find(":") + 1:] etherscan_url = ETHERSCAN_BASE % (prefix, addr) etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ( prefix_bytecode, addr) elif target.startswith(tuple(ALT_NETWORK)): temp_url = ALT_NETWORK[target[:target.find(":") + 1]][0] temp_bytecode_url = ALT_NETWORK[target[:target.find(":") + 1]][1] addr = target[target.find(":") + 1:] prefix = None etherscan_url = temp_url % (addr) etherscan_bytecode_url = temp_bytecode_url % (addr) else: etherscan_url = ETHERSCAN_BASE % ("", target) etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("", target) addr = target prefix = None only_source = kwargs.get("etherscan_only_source_code", False) only_bytecode = kwargs.get("etherscan_only_bytecode", False) etherscan_api_key = kwargs.get("etherscan_api_key", None) export_dir = kwargs.get("export_dir", "crytic-export") export_dir = os.path.join( export_dir, kwargs.get("etherscan_export_dir", "etherscan-contracts")) if etherscan_api_key: etherscan_url += f"&apikey={etherscan_api_key}" etherscan_bytecode_url += f"&apikey={etherscan_api_key}" source_code: str = "" result: Dict[str, Union[bool, str, int]] = dict() contract_name: str = "" if not only_bytecode: with urllib.request.urlopen(etherscan_url) as response: html = response.read() info = json.loads(html) if "result" in info and info["result"] == "Max rate limit reached": LOGGER.error("Etherscan API rate limit exceeded") raise InvalidCompilation("Etherscan api rate limit exceeded") if "message" not in info: LOGGER.error("Incorrect etherscan request") raise InvalidCompilation("Incorrect etherscan request " + etherscan_url) if not info["message"].startswith("OK"): LOGGER.error("Contract has no public source code") raise InvalidCompilation( "Contract has no public source code: " + etherscan_url) if "result" not in info: LOGGER.error("Contract has no public source code") raise InvalidCompilation( "Contract has no public source code: " + etherscan_url) result = info["result"][0] # Assert to help mypy assert isinstance(result["SourceCode"], str) assert isinstance(result["ContractName"], str) source_code = result["SourceCode"] contract_name = result["ContractName"] if source_code == "" and not only_source: LOGGER.info( "Source code not available, try to fetch the bytecode only") req = urllib.request.Request(etherscan_bytecode_url, headers={"User-Agent": "Mozilla/5.0"}) with urllib.request.urlopen(req) as response: html = response.read() _handle_bytecode(crytic_compile, target, html) return if source_code == "": LOGGER.error("Contract has no public source code") raise InvalidCompilation("Contract has no public source code: " + etherscan_url) if not os.path.exists(export_dir): os.makedirs(export_dir) # Assert to help mypy assert isinstance(result["CompilerVersion"], str) compiler_version = re.findall( r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[0] optimization_used: bool = result["OptimizationUsed"] == "1" solc_arguments = None if optimization_used: optimized_run = int(result["Runs"]) solc_arguments = f"--optimize --optimize-runs {optimized_run}" working_dir = None try: # etherscan might return an object with two curly braces, {{ content }} dict_source_code = json.loads(source_code[1:-1]) filename, working_dir = _handle_multiple_files( dict_source_code, addr, prefix, contract_name, export_dir) except JSONDecodeError: try: # or etherscan might return an object with single curly braces, { content } dict_source_code = json.loads(source_code) filename, working_dir = _handle_multiple_files( dict_source_code, addr, prefix, contract_name, export_dir) except JSONDecodeError: filename = _handle_single_file(source_code, addr, prefix, contract_name, export_dir) compilation_unit = CompilationUnit(crytic_compile, str(filename)) targets_json = _run_solc( compilation_unit, filename, solc=solc, solc_disable_warnings=False, solc_arguments=solc_arguments, env=dict(os.environ, SOLC_VERSION=compiler_version), working_dir=working_dir, ) compilation_unit.compiler_version = CompilerVersion( compiler="solc", version=compiler_version, optimized=optimization_used) solc_handle_contracts(targets_json, False, compilation_unit, "", working_dir) for path, info in targets_json["sources"].items(): path = convert_filename(path, _relative_to_short, crytic_compile, working_dir=working_dir) crytic_compile.filenames.add(path) compilation_unit.asts[path.absolute] = info["AST"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: """Run the compilation Args: crytic_compile (CryticCompile): Associated CryticCompile objects **kwargs: optional arguments. Used: "buidler_cache_directory", "buidler_ignore_compile", "ignore_compile", "buidler_working_dir", "buidler_skip_directory_name_fix", "npx_disable" Raises: InvalidCompilation: If buidler failed to run """ cache_directory = kwargs.get("buidler_cache_directory", "") target_solc_file = os.path.join(cache_directory, "solc-output.json") target_vyper_file = os.path.join(cache_directory, "vyper-docker-updates.json") buidler_ignore_compile = kwargs.get("buidler_ignore_compile", False) or kwargs.get( "ignore_compile", False) buidler_working_dir = kwargs.get("buidler_working_dir", None) # See https://github.com/crytic/crytic-compile/issues/116 skip_directory_name_fix = kwargs.get("buidler_skip_directory_name_fix", False) base_cmd = ["buidler"] if not kwargs.get("npx_disable", False): base_cmd = ["npx"] + base_cmd if not buidler_ignore_compile: cmd = base_cmd + ["compile"] LOGGER.info( "'%s' running", " ".join(cmd), ) with subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._target, executable=shutil.which(cmd[0]), ) as process: stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) if not os.path.isfile(os.path.join(self._target, target_solc_file)): if os.path.isfile(os.path.join(self._target, target_vyper_file)): txt = "Vyper not yet supported with buidler." txt += " Please open an issue in https://github.com/crytic/crytic-compile" raise InvalidCompilation(txt) txt = f"`buidler compile` failed. Can you run it?\n{os.path.join(self._target, target_solc_file)} not found" raise InvalidCompilation(txt) compilation_unit = CompilationUnit(crytic_compile, str(target_solc_file)) (compiler, version_from_config, optimized) = _get_version_from_config(Path(cache_directory)) compilation_unit.compiler_version = CompilerVersion( compiler=compiler, version=version_from_config, optimized=optimized) skip_filename = compilation_unit.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] with open(target_solc_file, encoding="utf8") as file_desc: targets_json = json.load(file_desc) if "contracts" in targets_json: for original_filename, contracts_info in targets_json[ "contracts"].items(): for original_contract_name, info in contracts_info.items(): contract_name = extract_name(original_contract_name) if (original_filename.startswith("ontracts/") and not skip_directory_name_fix): original_filename = "c" + original_filename contract_filename = convert_filename( original_filename, relative_to_short, crytic_compile, working_dir=buidler_working_dir, ) compilation_unit.contracts_names.add(contract_name) compilation_unit.filename_to_contracts[ contract_filename].add(contract_name) compilation_unit.abis[contract_name] = info["abi"] compilation_unit.bytecodes_init[contract_name] = info[ "evm"]["bytecode"]["object"] compilation_unit.bytecodes_runtime[ contract_name] = info["evm"]["deployedBytecode"][ "object"] compilation_unit.srcmaps_init[contract_name] = info[ "evm"]["bytecode"]["sourceMap"].split(";") compilation_unit.srcmaps_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["sourceMap"].split(";") userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) compilation_unit.natspec[contract_name] = natspec if "sources" in targets_json: for path, info in targets_json["sources"].items(): if path.startswith( "ontracts/") and not skip_directory_name_fix: path = "c" + path if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=buidler_working_dir, ) else: path = convert_filename( path, relative_to_short, crytic_compile, working_dir=buidler_working_dir) compilation_unit.filenames.add(path) crytic_compile.filenames.add(path) compilation_unit.asts[path.absolute] = info["ast"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ waffle_ignore_compile = kwargs.get("waffle_ignore_compile", False) or kwargs.get( "ignore_compile", False ) target = self._target cmd = ["waffle"] if not kwargs.get("npx_disable", False): cmd = ["npx"] + cmd # Default behaviour (without any config_file) build_directory = os.path.join("build") compiler = "native" version = _get_version(compiler, target) config: Dict = dict() config_file = kwargs.get("waffle_config_file", None) if config_file is None: potential_config_files = list(Path(target).rglob("*waffle*.json")) if potential_config_files and len(potential_config_files) == 1: config_file = potential_config_files[0] # Read config file if config_file: config = _load_config(config_file) version = _get_version(compiler, target, config=config) if "targetPath" in config: build_directory = config["targetPath"] if "compiler" in config: compiler = config["compiler"] if "outputType" not in config or config["outputType"] != "all": config["outputType"] = "all" needed_config = { "compilerOptions": { "outputSelection": { "*": { "*": [ "evm.bytecode.object", "evm.deployedBytecode.object", "abi", "evm.bytecode.sourceMap", "evm.deployedBytecode.sourceMap", ], "": ["ast"], } } } } # Set the config as it should be if "compilerOptions" in config: curr_config: Dict = config["compilerOptions"] curr_needed_config: Dict = needed_config["compilerOptions"] if "outputSelection" in curr_config: curr_config = curr_config["outputSelection"] curr_needed_config = curr_needed_config["outputSelection"] if "*" in curr_config: curr_config = curr_config["*"] curr_needed_config = curr_needed_config["*"] if "*" in curr_config: curr_config["*"] += curr_needed_config["*"] else: curr_config["*"] = curr_needed_config["*"] if "" in curr_config: curr_config[""] += curr_needed_config[""] else: curr_config[""] = curr_needed_config[""] else: curr_config["*"] = curr_needed_config["*"] else: curr_config["outputSelection"] = curr_needed_config["outputSelection"] else: config["compilerOptions"] = needed_config["compilerOptions"] if not waffle_ignore_compile: with tempfile.NamedTemporaryFile(mode="w", suffix=".json", dir=target) as file_desc: json.dump(config, file_desc) file_desc.flush() # cmd += [os.path.relpath(file_desc.name)] cmd += [Path(file_desc.name).name] LOGGER.info("Temporary file created: %s", file_desc.name) LOGGER.info("'%s running", " ".join(cmd)) try: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target ) except OSError as error: # pylint: disable=raise-missing-from raise InvalidCompilation(error) stdout, stderr = process.communicate() if stdout: LOGGER.info(stdout.decode()) if stderr: LOGGER.error(stderr.decode()) if not os.path.isdir(os.path.join(target, build_directory)): raise InvalidCompilation("`waffle` compilation failed: build directory not found") combined_path = os.path.join(target, build_directory, "Combined-Json.json") if not os.path.exists(combined_path): raise InvalidCompilation("`Combined-Json.json` not found") with open(combined_path, "r") as file_desc: target_all = json.load(file_desc) optimized = None for contract in target_all["contracts"]: target_loaded = target_all["contracts"][contract] contract = contract.split(":") filename = convert_filename( contract[0], _relative_to_short, crytic_compile, working_dir=target ) contract_name = contract[1] crytic_compile.asts[filename.absolute] = target_all["sources"][contract[0]]["AST"] crytic_compile.filenames.add(filename) crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec crytic_compile.bytecodes_init[contract_name] = target_loaded["evm"]["bytecode"][ "object" ] crytic_compile.srcmaps_init[contract_name] = target_loaded["evm"]["bytecode"][ "sourceMap" ].split(";") crytic_compile.bytecodes_runtime[contract_name] = target_loaded["evm"][ "deployedBytecode" ]["object"] crytic_compile.srcmaps_runtime[contract_name] = target_loaded["evm"][ "deployedBytecode" ]["sourceMap"].split(";") crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=optimized )
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param kwargs: :return: """ solc = kwargs.get("solc", "solc") solc_disable_warnings = kwargs.get("solc_disable_warnings", False) solc_arguments = kwargs.get("solc_args", "") solc_remaps = kwargs.get("solc_remaps", None) solc_working_dir = kwargs.get("solc_working_dir", None) force_legacy_json = kwargs.get("solc_force_legacy_json", False) crytic_compile.compiler_version = CompilerVersion( compiler="solc", version=get_version(solc), optimized=is_optimized(solc_arguments) ) # From config file, solcs is a dict (version -> path) # From command line, solc is a list # The guessing of version only works from config file # This is to prevent too complex command line solcs_path: Optional[Union[str, Dict, List[str]]] = kwargs.get("solc_solcs_bin") # solcs_env is always a list. It matches solc-select list solcs_env = kwargs.get("solc_solcs_select") if solcs_path: if isinstance(solcs_path, str): solcs_path = solcs_path.split(",") targets_json = _run_solcs_path( crytic_compile, self._target, solcs_path, solc_disable_warnings, solc_arguments, solc_remaps=solc_remaps, working_dir=solc_working_dir, force_legacy_json=force_legacy_json, ) elif solcs_env: solcs_env_list = solcs_env.split(",") targets_json = _run_solcs_env( crytic_compile, self._target, solc, solc_disable_warnings, solc_arguments, solcs_env=solcs_env_list, solc_remaps=solc_remaps, working_dir=solc_working_dir, force_legacy_json=force_legacy_json, ) else: targets_json = _run_solc( crytic_compile, self._target, solc, solc_disable_warnings, solc_arguments, solc_remaps=solc_remaps, working_dir=solc_working_dir, force_legacy_json=force_legacy_json, ) skip_filename = crytic_compile.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] if "contracts" in targets_json: for original_contract_name, info in targets_json["contracts"].items(): contract_name = extract_name(original_contract_name) contract_filename = extract_filename(original_contract_name) # for solc < 0.4.10 we cant retrieve the filename from the ast if skip_filename: contract_filename = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) else: contract_filename = convert_filename( contract_filename, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[contract_name] = contract_filename crytic_compile.abis[contract_name] = json.loads(info["abi"]) crytic_compile.bytecodes_init[contract_name] = info["bin"] crytic_compile.bytecodes_runtime[contract_name] = info["bin-runtime"] crytic_compile.srcmaps_init[contract_name] = info["srcmap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = info["srcmap-runtime"].split(";") userdoc = json.loads(info.get("userdoc", "{}")) devdoc = json.loads(info.get("devdoc", "{}")) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) else: path = convert_filename( path, relative_to_short, crytic_compile, working_dir=solc_working_dir ) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["AST"]
def compile(crytic_compile: "CryticCompile", target: str, **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ build_directory = kwargs.get("truffle_build_directory", os.path.join("build", "contracts")) truffle_ignore_compile = kwargs.get("truffle_ignore_compile", False) truffle_version = kwargs.get("truffle_version", None) crytic_compile.type = Type.TRUFFLE # Truffle on windows has naming conflicts where it will invoke truffle.js directly instead # of truffle.cmd (unless in powershell or git bash). The cleanest solution is to explicitly call # truffle.cmd. Reference: # https://truffleframework.com/docs/truffle/reference/configuration#resolving-naming-conflicts-on-windows if platform.system() == "Windows": base_cmd = ["truffle.cmd"] elif kwargs.get("npx_disable", False): base_cmd = ["truffle"] else: base_cmd = ["npx", "truffle"] if truffle_version: if truffle_version.startswith("truffle"): base_cmd = ["npx", truffle_version] else: base_cmd = ["npx", f"truffle@{truffle_version}"] elif os.path.isfile(os.path.join(target, "package.json")): with open(os.path.join(target, "package.json"), encoding="utf8") as file_desc: package = json.load(file_desc) if "devDependencies" in package: if "truffle" in package["devDependencies"]: version = package["devDependencies"]["truffle"] if version.startswith("^"): version = version[1:] truffle_version = "truffle@{}".format(version) base_cmd = ["npx", truffle_version] if "dependencies" in package: if "truffle" in package["dependencies"]: version = package["dependencies"]["truffle"] if version.startswith("^"): version = version[1:] truffle_version = "truffle@{}".format(version) base_cmd = ["npx", truffle_version] version_from_config = _get_version_from_config(target) if version_from_config: version, compiler = version_from_config else: version, compiler = _get_version(base_cmd, cwd=target) if not truffle_ignore_compile: cmd = base_cmd + ["compile", "--all"] LOGGER.info( "'%s' running (use --truffle-version [email protected] to use specific version)", " ".join(cmd), ) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target) stdout_bytes, stderr_bytes = process.communicate() stdout, stderr = ( stdout_bytes.decode(), stderr_bytes.decode(), ) # convert bytestrings to unicode strings LOGGER.info(stdout) if stderr: LOGGER.error(stderr) if not os.path.isdir(os.path.join(target, build_directory)): if os.path.isdir(os.path.join(target, "node_modules")): raise InvalidCompilation( f"External dependencies {target} not found, please install them. (npm install)" ) raise InvalidCompilation("`truffle compile` failed. Can you run it?") filenames = glob.glob(os.path.join(target, build_directory, "*.json")) optimized = None for filename_txt in filenames: with open(filename_txt, encoding="utf8") as file_desc: target_loaded = json.load(file_desc) if optimized is None: if "metadata" in target_loaded: metadata = target_loaded["metadata"] try: metadata = json.loads(metadata) if "settings" in metadata: if "optimizer" in metadata["settings"]: if "enabled" in metadata["settings"]["optimizer"]: optimized = metadata["settings"]["optimizer"]["enabled"] except json.decoder.JSONDecodeError: pass if not "ast" in target_loaded: continue filename = target_loaded["ast"]["absolutePath"] filename = convert_filename( filename, _relative_to_short, crytic_compile, working_dir=target ) crytic_compile.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] crytic_compile.contracts_filenames[contract_name] = filename crytic_compile.contracts_names.add(contract_name) crytic_compile.abis[contract_name] = target_loaded["abi"] crytic_compile.bytecodes_init[contract_name] = target_loaded["bytecode"].replace( "0x", "" ) crytic_compile.bytecodes_runtime[contract_name] = target_loaded[ "deployedBytecode" ].replace("0x", "") crytic_compile.srcmaps_init[contract_name] = target_loaded["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = target_loaded[ "deployedSourceMap" ].split(";") crytic_compile.compiler_version = CompilerVersion( compiler=compiler, version=version, optimized=optimized )
def _iterate_over_files(crytic_compile: "CryticCompile", target: str, filenames: List[Path]): """ Iterate over the files :param crytic_compile: :param target: :param filenames: :return: """ optimized = None compiler = "solc" version = None compilation_unit = CompilationUnit(crytic_compile, str(target)) for original_filename in filenames: with open(original_filename, encoding="utf8") as f_file: target_loaded: Dict = json.load(f_file) if "ast" not in target_loaded: continue if optimized is None: # Old brownie if compiler in target_loaded: compiler_d: Dict = target_loaded["compiler"] optimized = compiler_d.get("optimize", False) version = _get_version(compiler_d) if "compiler" in target_loaded: compiler_d = target_loaded["compiler"] optimized = compiler_d.get("optimize", False) version = _get_version(compiler_d) # Filter out vyper files if "absolutePath" not in target_loaded["ast"]: continue filename_txt = target_loaded["ast"]["absolutePath"] filename: Filename = convert_filename(filename_txt, _relative_to_short, crytic_compile, working_dir=target) compilation_unit.asts[filename.absolute] = target_loaded["ast"] crytic_compile.filenames.add(filename) contract_name = target_loaded["contractName"] compilation_unit.contracts_filenames[contract_name] = filename compilation_unit.contracts_names.add(contract_name) compilation_unit.abis[contract_name] = target_loaded["abi"] compilation_unit.bytecodes_init[contract_name] = target_loaded[ "bytecode"].replace("0x", "") compilation_unit.bytecodes_runtime[contract_name] = target_loaded[ "deployedBytecode"].replace("0x", "") compilation_unit.srcmaps_init[contract_name] = target_loaded[ "sourceMap"].split(";") compilation_unit.srcmaps_runtime[contract_name] = target_loaded[ "deployedSourceMap"].split(";") userdoc = target_loaded.get("userdoc", {}) devdoc = target_loaded.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) compilation_unit.natspec[contract_name] = natspec compilation_unit.compiler_version = CompilerVersion(compiler=compiler, version=version, optimized=optimized)
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the tharget :param crytic_compile: :param target: :param kwargs: :return: """ target = self._target solc = kwargs.get("solc", "solc") if target.startswith(tuple(SUPPORTED_NETWORK)): prefix: Union[None, str] = SUPPORTED_NETWORK[target[:target.find(":") + 1]][0] prefix_bytecode = SUPPORTED_NETWORK[target[:target.find(":") + 1]][1] addr = target[target.find(":") + 1:] etherscan_url = ETHERSCAN_BASE % (prefix, addr) etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ( prefix_bytecode, addr) else: etherscan_url = ETHERSCAN_BASE % ("", target) etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("", target) addr = target prefix = None only_source = kwargs.get("etherscan_only_source_code", False) only_bytecode = kwargs.get("etherscan_only_bytecode", False) etherscan_api_key = kwargs.get("etherscan_api_key", None) if etherscan_api_key: etherscan_url += f"&apikey={etherscan_api_key}" etherscan_bytecode_url += f"&apikey={etherscan_api_key}" source_code: str = "" result: Dict[str, Union[bool, str, int]] = dict() contract_name: str = "" if not only_bytecode: with urllib.request.urlopen(etherscan_url) as response: html = response.read() info = json.loads(html) if "message" not in info: LOGGER.error("Incorrect etherscan request") raise InvalidCompilation("Incorrect etherscan request " + etherscan_url) if not info["message"].startswith("OK"): LOGGER.error("Contract has no public source code") raise InvalidCompilation( "Contract has no public source code: " + etherscan_url) if "result" not in info: LOGGER.error("Contract has no public source code") raise InvalidCompilation( "Contract has no public source code: " + etherscan_url) result = info["result"][0] # Assert to help mypy assert isinstance(result["SourceCode"], str) assert isinstance(result["ContractName"], str) source_code = result["SourceCode"] contract_name = result["ContractName"] if source_code == "" and not only_source: LOGGER.info( "Source code not available, try to fetch the bytecode only") req = urllib.request.Request(etherscan_bytecode_url, headers={"User-Agent": "Mozilla/5.0"}) with urllib.request.urlopen(req) as response: html = response.read() _handle_bytecode(crytic_compile, target, html) return if source_code == "": LOGGER.error("Contract has no public source code") raise InvalidCompilation("Contract has no public source code: " + etherscan_url) if prefix: filename = os.path.join("crytic-export", "etherscan_contracts", f"{addr}{prefix}-{contract_name}.sol") else: filename = os.path.join("crytic-export", "etherscan_contracts", f"{addr}-{contract_name}.sol") if not os.path.exists("crytic-export"): os.makedirs("crytic-export") if not os.path.exists( os.path.join("crytic-export", "etherscan_contracts")): os.makedirs(os.path.join("crytic-export", "etherscan_contracts")) with open(filename, "w", encoding="utf8") as file_desc: file_desc.write(source_code) # Assert to help mypy assert isinstance(result["CompilerVersion"], str) compiler_version = re.findall( r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[0] optimization_used: bool = result["OptimizationUsed"] == "1" solc_arguments = None if optimization_used: optimized_run = int(result["Runs"]) solc_arguments = f"--optimize --optimize-runs {optimized_run}" crytic_compile.compiler_version = CompilerVersion( compiler="solc", version=compiler_version, optimized=optimization_used) targets_json = _run_solc( crytic_compile, filename, solc=solc, solc_disable_warnings=False, solc_arguments=solc_arguments, env=dict(os.environ, SOLC_VERSION=compiler_version), ) for original_contract_name, info in targets_json["contracts"].items(): contract_name = extract_name(original_contract_name) contract_filename = extract_filename(original_contract_name) contract_filename = convert_filename(contract_filename, _relative_to_short, crytic_compile) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[ contract_name] = contract_filename crytic_compile.abis[contract_name] = json.loads(info["abi"]) crytic_compile.bytecodes_init[contract_name] = info["bin"] crytic_compile.bytecodes_runtime[contract_name] = info[ "bin-runtime"] crytic_compile.srcmaps_init[contract_name] = info["srcmap"].split( ";") crytic_compile.srcmaps_runtime[contract_name] = info[ "srcmap-runtime"].split(";") userdoc = json.loads(info.get("userdoc", "{}")) devdoc = json.loads(info.get("devdoc", "{}")) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec for path, info in targets_json["sources"].items(): path = convert_filename(path, _relative_to_short, crytic_compile) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["AST"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ solc = kwargs.get("solc", "solc") solc_disable_warnings = kwargs.get("solc_disable_warnings", False) solc_arguments = kwargs.get("solc_args", "") solc_remaps: Optional[Union[str, List[str]]] = kwargs.get( "solc_remaps", None) solc_working_dir = kwargs.get("solc_working_dir", None) crytic_compile.compiler_version = CompilerVersion( compiler="solc", version=get_version(solc), optimized=is_optimized(solc_arguments)) skip_filename = crytic_compile.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] # Add all remappings if solc_remaps: if isinstance(solc_remaps, str): solc_remaps = solc_remaps.split(" ") for solc_remap in solc_remaps: self.add_remapping(solc_remap) # Invoke solc targets_json = _run_solc_standard_json( self.to_dict(), solc, solc_disable_warnings=solc_disable_warnings) if "contracts" in targets_json: for file_path, file_contracts in targets_json["contracts"].items(): for contract_name, info in file_contracts.items(): # for solc < 0.4.10 we cant retrieve the filename from the ast if skip_filename: contract_filename = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) else: contract_filename = convert_filename( file_path, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[ contract_name] = contract_filename crytic_compile.abis[contract_name] = info["abi"] userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec crytic_compile.bytecodes_init[contract_name] = info["evm"][ "bytecode"]["object"] crytic_compile.bytecodes_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["object"] crytic_compile.srcmaps_init[contract_name] = info["evm"][ "bytecode"]["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["sourceMap"].split(";") if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( self._target, relative_to_short, crytic_compile, working_dir=solc_working_dir, ) else: path = convert_filename(path, relative_to_short, crytic_compile, working_dir=solc_working_dir) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["ast"]
def compile(self, crytic_compile: "CryticCompile", **kwargs: str): """ Compile the target :param crytic_compile: :param target: :param kwargs: :return: """ dapp_ignore_compile = kwargs.get("dapp_ignore_compile", False) or kwargs.get( "ignore_compile", False) directory = os.path.join(self._target, "out") if not dapp_ignore_compile: _run_dapp(self._target) crytic_compile.compiler_version = _get_version(self._target) optimized = False with open(os.path.join(directory, "dapp.sol.json")) as file_desc: targets_json = json.load(file_desc) version = None if "version" in targets_json: version = re.findall(r"\d+\.\d+\.\d+", targets_json["version"])[0] for original_filename, contracts_info in targets_json[ "contracts"].items(): for original_contract_name, info in contracts_info.items(): if "metadata" in info: metadata = json.loads(info["metadata"]) if ("settings" in metadata and "optimizer" in metadata["settings"] and "enabled" in metadata["settings"]["optimizer"]): optimized |= metadata["settings"]["optimizer"][ "enabled"] contract_name = extract_name(original_contract_name) crytic_compile.contracts_names.add(contract_name) crytic_compile.contracts_filenames[ contract_name] = original_filename crytic_compile.abis[contract_name] = info["abi"] crytic_compile.bytecodes_init[contract_name] = info["evm"][ "bytecode"]["object"] crytic_compile.bytecodes_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["object"] crytic_compile.srcmaps_init[contract_name] = info["evm"][ "bytecode"]["sourceMap"].split(";") crytic_compile.srcmaps_runtime[contract_name] = info[ "evm"]["bytecode"]["sourceMap"].split(";") userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) crytic_compile.natspec[contract_name] = natspec if version is None: metadata = json.loads(info["metadata"]) version = re.findall( r"\d+\.\d+\.\d+", metadata["compiler"]["version"])[0] for path, info in targets_json["sources"].items(): path = convert_filename(path, _relative_to_short, crytic_compile, working_dir=self._target) crytic_compile.filenames.add(path) crytic_compile.asts[path.absolute] = info["ast"] crytic_compile.compiler_version = CompilerVersion(compiler="solc", version=version, optimized=optimized)
def parse_standard_json_output(targets_json: Dict, compilation_unit: CompilationUnit, solc_working_dir: Optional[str] = None) -> None: """ Parse the targets_json output from solc, and populate compilation_unit accordingly Args: targets_json (Dict): output from solc compilation_unit (CompilationUnit): compilation unit to populate solc_working_dir (Optional[str]): working dir Returns: """ skip_filename = compilation_unit.compiler_version.version in [ f"0.4.{x}" for x in range(0, 10) ] if "contracts" in targets_json: for file_path, file_contracts in targets_json["contracts"].items(): for contract_name, info in file_contracts.items(): # for solc < 0.4.10 we cant retrieve the filename from the ast if skip_filename: contract_filename = convert_filename( file_path, relative_to_short, compilation_unit.crytic_compile, working_dir=solc_working_dir, ) else: contract_filename = convert_filename( file_path, relative_to_short, compilation_unit.crytic_compile, working_dir=solc_working_dir, ) compilation_unit.contracts_names.add(contract_name) compilation_unit.filename_to_contracts[contract_filename].add( contract_name) compilation_unit.abis[contract_name] = info["abi"] userdoc = info.get("userdoc", {}) devdoc = info.get("devdoc", {}) natspec = Natspec(userdoc, devdoc) compilation_unit.natspec[contract_name] = natspec compilation_unit.bytecodes_init[contract_name] = info["evm"][ "bytecode"]["object"] compilation_unit.bytecodes_runtime[contract_name] = info[ "evm"]["deployedBytecode"]["object"] compilation_unit.srcmaps_init[contract_name] = info["evm"][ "bytecode"]["sourceMap"].split(";") compilation_unit.srcmaps_runtime[contract_name] = info["evm"][ "deployedBytecode"]["sourceMap"].split(";") if "sources" in targets_json: for path, info in targets_json["sources"].items(): if skip_filename: path = convert_filename( path, relative_to_short, compilation_unit.crytic_compile, working_dir=solc_working_dir, ) else: path = convert_filename( path, relative_to_short, compilation_unit.crytic_compile, working_dir=solc_working_dir, ) compilation_unit.crytic_compile.filenames.add(path) compilation_unit.filenames.add(path) compilation_unit.asts[path.absolute] = info.get("ast")