def compile_from_input_json( input_json: Dict, silent: bool = True, allow_paths: Optional[str] = None ) -> Dict: """ Compiles contracts from a standard input json. Args: input_json: solc input json silent: verbose reporting allow_paths: compiler allowed filesystem import path Returns: standard compiler output json """ optimizer = input_json["settings"]["optimizer"] input_json["settings"].setdefault("evmVersion", None) if input_json["settings"]["evmVersion"] in EVM_EQUIVALENTS: input_json["settings"]["evmVersion"] = EVM_EQUIVALENTS[input_json["settings"]["evmVersion"]] if not silent: print(f"Compiling contracts...\n Solc version: {str(solcx.get_solc_version())}") opt = f"Enabled Runs: {optimizer['runs']}" if optimizer["enabled"] else "Disabled" print(f" Optimizer: {opt}") if input_json["settings"]["evmVersion"]: print(f" EVM Version: {input_json['settings']['evmVersion'].capitalize()}") try: return solcx.compile_standard(input_json, allow_paths=allow_paths,) except solcx.exceptions.SolcError as e: raise CompilerError(e, "solc")
def compile_source_file(file_path, name): input = { 'language': 'Solidity', 'sources': { name: { 'urls': [file_path + "/" + name] } }, 'settings': { 'outputSelection': { '*': { '*': ["metadata", "evm.bytecode", "evm.bytecode.sourceMap"], }, 'def': { name: ["abi", "evm.bytecode.opcodes"] }, } } } output = compile_standard(input, allow_paths=file_path) contracts = output["contracts"] contract = contracts[list(contracts.keys())[0]] bytecode = contract[list(contract.keys())[0]]["evm"]["bytecode"]["object"] metadata = contract[list(contract.keys())[0]]["metadata"] metadata = json.loads(metadata) abi = metadata["output"]["abi"] return bytecode, abi
def compile(source: str, file: str, name: str) -> Tuple[str, str]: spec = { "language": "Solidity", "sources": { file: { "urls": [source] } }, "settings": { "optimizer": { "enabled": True }, "outputSelection": { "*": { "*": ["metadata", "evm.bytecode", "abi"] } }, }, } out = solcx.compile_standard(spec, allow_paths=".") abi = out["contracts"][file][name]["abi"] bytecode = out["contracts"][file][name]["evm"]["bytecode"]["object"] return abi, bytecode
def create_contract(w3, source_code): compiled_sol = compile_standard({ "language": "Solidity", "sources": { "TestERC20.sol": { "content": source_code } }, "settings": { "outputSelection": { "*": { "*": ["metadata", "evm.bytecode", "evm.bytecode.sourceMap"] } } }, }) w3.eth.default_account = w3.eth.accounts[0] bytecode = compiled_sol["contracts"]["TestERC20.sol"]["TestERC20"]["evm"][ "bytecode"]["object"] abi = json.loads(compiled_sol["contracts"]["TestERC20.sol"]["TestERC20"] ["metadata"])["output"]["abi"] TestERC20 = w3.eth.contract(abi=abi, bytecode=bytecode) tx_hash = TestERC20.constructor().transact() tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) contract_address = tx_receipt["contractAddress"] return tx_hash, contract_address
def compile_from_input_json(input_json, silent=True): '''Compiles contracts from a standard input json. Args: input_json: solc input json silent: verbose reporting Returns: standard compiler output json''' optimizer = input_json['settings']['optimizer'] input_json['settings'].setdefault('evmVersion', None) if not silent: print("Compiling contracts...") print(f" Solc {solcx.get_solc_version_string()}") print(" Optimizer: " + (f"Enabled Runs: {optimizer['runs']}" if optimizer['enabled'] else 'Disabled')) if input_json['settings']['evmVersion']: print( f" EVM Version: {input_json['settings']['evmVersion'].capitalize()}" ) try: return solcx.compile_standard( input_json, optimize=optimizer['enabled'], optimize_runs=optimizer['runs'], evm_version=input_json['settings']['evmVersion'], allow_paths=".") except solcx.exceptions.SolcError as e: raise CompilerError(e)
def compile_from_input_json(input_json: Dict, silent: bool = True) -> Dict: """Compiles contracts from a standard input json. Args: input_json: solc input json silent: verbose reporting Returns: standard compiler output json""" optimizer = input_json["settings"]["optimizer"] input_json["settings"].setdefault("evmVersion", None) if not silent: print("Compiling contracts...") print(f" Solc {solcx.get_solc_version_string()}") print(" Optimizer: " + (f"Enabled Runs: {optimizer['runs']}" if optimizer["enabled"] else "Disabled")) if input_json["settings"]["evmVersion"]: print( f" EVM Version: {input_json['settings']['evmVersion'].capitalize()}" ) try: return solcx.compile_standard( input_json, optimize=optimizer["enabled"], optimize_runs=optimizer["runs"], evm_version=input_json["settings"]["evmVersion"], allow_paths=".", ) except solcx.exceptions.SolcError as e: raise CompilerError(e)
def solcx_compile(self, path, remappings, enable_scribble, scribble_file): return solcx.compile_standard( input_data={ "language": "Solidity", "sources": { scribble_file or self.target: { "urls": [scribble_file or self.target] } }, "settings": { "remappings": [r.format(pwd=path) for r in remappings] or [ f"openzeppelin-solidity/={path}/node_modules/openzeppelin-solidity/", f"openzeppelin-zos/={path}/node_modules/openzeppelin-zos/", f"zos-lib/={path}/node_modules/zos-lib/", ], "outputSelection": { "*": { "*": [ "evm.bytecode.object", "evm.bytecode.sourceMap", "evm.deployedBytecode.object", "evm.deployedBytecode.sourceMap", ], "": ["ast"], } }, "optimizer": { "enabled": True, "runs": 200 }, }, }, # if scribble enabled, allow access to temporary file allow_paths=path if not enable_scribble else scribble_file, )
def compile_all(self): """Compiles all of the contracts in the self.contracts_dir directory Creates {contract name}.json files in self.output_dir that contain the build output for each contract. """ # Solidity input JSON solc_input = self.get_solc_input() # Compile the contracts real_path = os.path.realpath(self.contracts_dir) compilation_result = compile_standard(solc_input, allow_paths=real_path) # Create the output folder if it doesn't already exist os.makedirs(self.output_dir, exist_ok=True) # Write the contract ABI to output files compiled_contracts = compilation_result['contracts'] for contract_file in compiled_contracts: for contract in compiled_contracts[contract_file]: contract_name = contract.split('.')[0] contract_data = compiled_contracts[contract_file][ contract_name] contract_data_path = self.output_dir + '/{0}.json'.format( contract_name) with open(contract_data_path, "w+") as contract_data_file: json.dump(contract_data, contract_data_file)
def compile(): # sol_files = [join(source_path, entry_file)] # for file_name in listdir(join(source_path, 'lib')): # if isfile(join(source_path, 'lib', file_name)) and file_name.endswith('.sol'): # sol_files.append(join(source_path, 'lib', file_name)) # return solcx.compile_files(sol_files, base_path=source_path, optimize=True) # return solcx.compile_files(sol_files, base_path=source_path, allow_paths=[source_path, source_path.joinpath('lib')], optimize=True, no_optimize_yul=True) sources = {} with open(join(source_path, entry_file), 'r') as f: sources[entry_file] = {'content': f.read()} for file_name in listdir(join(source_path, 'lib')): if isfile(join(source_path, 'lib', file_name)) and file_name.endswith('.sol'): with open(join(source_path, 'lib', file_name), 'r') as f: sources[join('lib', file_name)] = {'content': f.read()} return solcx.compile_standard( { 'language': 'Solidity', 'sources': sources, "settings": { "outputSelection": { "*": { "*": ["metadata", "evm.bytecode"] } }, "optimizer": { "enabled": True, "details": { "yul": False } } } }, solc_version='0.7.6')
def compile(solc_version, evm_version, source_code_file): out = None with open(source_code_file, 'r') as file: source_code = file.read() try: if solc_version != solcx.get_solc_version(): solcx.set_solc_version(solc_version, True) out = solcx.compile_standard({ 'language': 'Solidity', 'sources': {source_code_file: {'content': source_code}}, 'settings': { "optimizer": { "enabled": True, "runs": 200 }, "evmVersion": evm_version, "outputSelection": { source_code_file: { "*": [ "abi", "evm.deployedBytecode", "evm.bytecode.object", "evm.legacyAssembly", ], } } } }, allow_paths='.') except Exception as e: print("Error: Solidity compilation failed!") print(e.message) return out
def compileContract(file, lib=None, ldlib=None, file_path="Contracts"): file_path = getbase_dir(file_path) input_json = get_input_json(file_path, file, lib, ldlib) set_solc_version('v0.5.4') # return compile_files([file_path+file]) return compile_standard(input_json, allow_paths=file_path)
def test_compile_standard_with_dependency(input_json, foo_source, bar_source): input_json['sources'] = { 'contracts/Foo.sol': {'content': foo_source}, 'contracts/Bar.sol': {'content': bar_source}, } result = solcx.compile_standard(input_json) _compile_assertions(result, "Foo", "Bar")
def compile_into_ast(src_path): # Convert to absolute path src_path = os.path.join(os.getcwd(), src_path) output_selection = {"*": {"": ["ast"]}} if os.path.isdir(src_path): # Multiple file contract src_data = {} first = True with open(os.path.join(src_path, ".mapping"), "r") as f: file_mapping = json.load(f) for filepath, filename in file_mapping.items(): src_data[filepath] = { "urls": [os.path.join(src_path, filename)] } if first: with open(os.path.join(src_path, filename), "r") as f2: file_data = f2.read() version = re.findall( r"pragma solidity [^0-9]*([0-9]*\.[0-9]*\.[0-9]*).*;", file_data, )[0] first = False else: # Single file with open(src_path, "r") as f: src_file = f.read() _, src_name = os.path.split(src_path) src_data = {src_name: {"urls": [src_path]}} version = re.findall( r"pragma solidity [^0-9]*([0-9]*\.[0-9]*\.[0-9]*).*;", src_file)[0] print(json.dumps(src_data)) install_solc_pragma(version) set_solc_version_pragma(version) compiler_input = { "language": "Solidity", "sources": src_data, "settings": { "outputSelection": output_selection }, } compile_output = compile_standard(compiler_input, allow_paths="/") ast = compile_output["sources"][os.path.basename(src_path)]["ast"] with open(src_path, "rb") as file: ast["source"] = codecs.utf_8_decode(file.read())[0] ast["_solc_version"] = get_solc_version_string() return ast
def test_compile_standard_with_dependency(input_json, foo_source, bar_source): input_json["sources"] = { "contracts/Foo.sol": { "content": foo_source }, "contracts/Bar.sol": { "content": bar_source }, } result = solcx.compile_standard(input_json) _compile_assertions(result, "Foo", "Bar")
def _tryParse(self, input_json, soc_version) -> dict: """ 通过solc 和 astcast 工具,对解析后的AST进行操作 :param contractAddr 合约地址 """ state: bool = True try: # 正确的情况 solcx.set_solc_version_pragma(soc_version) output_json = solcx.compile_standard(input_json, allow_paths=self.allow_paths) except Exception as e: state = False return {'state': state, 'output_json': output_json}
def compile_solidity_json(sol_filename: str, libs: Optional[Dict[str, str]] = None, optimizer_runs: int = -1, output_selection: Tuple = ('metadata', 'evm.bytecode', 'evm.deployedBytecode'), cwd: str = None) -> Dict: """ Compile the given solidity file using solc json interface with the provided options. :param sol_filename: path to solidity file :param libs: [OPTIONAL] dictionary containing <LibraryContractName, LibraryContractAddress> pairs, used for linking :param optimizer_runs: controls the optimize-runs flag, negative values disable the optimizer :param output_selection: determines which fields are included in the compiler output dict :param cwd: working directory :return: dictionary with the compilation results according to output_selection """ solp = pathlib.Path(sol_filename) json_in = { 'language': 'Solidity', 'sources': { solp.name: { 'urls': [str(solp.absolute())] } }, 'settings': { 'outputSelection': { '*': { '*': list(output_selection) } }, } } if optimizer_runs >= 0: json_in['settings']['optimizer'] = { 'enabled': True, 'runs': optimizer_runs } if libs is not None: json_in['settings']['libraries'] = {solp.name: libs} if cwd is None: cwd = solp.absolute().parent old_cwd = os.getcwd() os.chdir(cwd) ret = compile_standard(json_in, allow_paths='.') os.chdir(old_cwd) return ret
def _compile_and_format(input_json): try: compiled = solcx.compile_standard( input_json, optimize=CONFIG['solc']['optimize'], optimize_runs=CONFIG['solc']['runs'], allow_paths="." ) except solcx.exceptions.SolcError as e: raise CompilerError(e) compiled = generate_pcMap(compiled) result = {} compiler_info = CONFIG['solc'].copy() compiler_info['version'] = solcx.get_solc_version_string().strip('\n') for filename in input_json['sources']: for match in re.findall( "\n(?:contract|library|interface) [^ {]{1,}", input_json['sources'][filename]['content'] ): type_, name = match.strip('\n').split(' ') data = compiled['contracts'][filename][name] evm = data['evm'] ref = [ (k, x) for v in evm['bytecode']['linkReferences'].values() for k, x in v.items() ] for n, loc in [(i[0], x['start']*2) for i in ref for x in i[1]]: evm['bytecode']['object'] = "{}__{:_<36}__{}".format( evm['bytecode']['object'][:loc], n[:36], evm['bytecode']['object'][loc+40:] ) result[name] = { 'abi': data['abi'], 'ast': compiled['sources'][filename]['ast'], 'bytecode': evm['bytecode']['object'], 'compiler': compiler_info, 'contractName': name, 'deployedBytecode': evm['deployedBytecode']['object'], 'deployedSourceMap': evm['deployedBytecode']['sourceMap'], 'networks': {}, 'opcodes': evm['deployedBytecode']['opcodes'], 'sha1': sha1(input_json['sources'][filename]['content'].encode()).hexdigest(), 'source': input_json['sources'][filename]['content'], 'sourceMap': evm['bytecode']['sourceMap'], 'sourcePath': filename, 'type': type_, 'pcMap': evm['deployedBytecode']['pcMap'] } return result
def test_verification_info(tmp_path_factory, version): header = f""" // SPDX-License-Identifier: MIT pragma solidity {version}; """ # setup directory dir: Path = tmp_path_factory.mktemp("verify-project") # initialize brownie project new(dir.as_posix()) modded_sources = {} for fp, src in sources: with dir.joinpath(fp).open("w") as f: f.write(header + src) modded_sources[fp] = header + src find_best_solc_version(modded_sources, install_needed=True) project = load(dir, "TestImportProject") for contract_name in ("Foo", "Bar", "Baz"): contract = getattr(project, contract_name) input_data = contract.get_verification_info()["standard_json_input"] # output selection isn't included in the verification info because # etherscan replaces it regardless. Here we just replicate with what they # would include input_data["settings"]["outputSelection"] = { "*": {"*": ["evm.bytecode", "evm.deployedBytecode", "abi"]} } compiler_version, _ = contract._build["compiler"]["version"].split("+") output_data = solcx.compile_standard(input_data, solc_version=compiler_version) # keccak256 = 0xd61b13a841b15bc814760b36086983db80788946ca38aa90a06bebf287a67205 build_info = output_data["contracts"][f"{contract_name}.sol"][contract_name] assert build_info["abi"] == contract.abi # ignore the metadata at the end of the bytecode, etherscan does the same assert build_info["evm"]["bytecode"]["object"][:-96] == contract.bytecode[:-96] assert ( build_info["evm"]["deployedBytecode"]["object"][:-96] == contract._build["deployedBytecode"][:-96] ) project.close()
def _getRootNode(self, contractAddr: str, versionString: str) -> solcast.nodes.NodeBase: solc_input_json_str = self.solc_input_json_str_skeleton % ( contractAddr, self.contract_src_path_skeleton % (contractAddr)) input_json = json.loads(solc_input_json_str) # TODO: calculate the most suitable complier version self._setMostSuitableVersion(versionString) output_json = solcx.compile_standard(input_json, allow_paths=self.allow_paths) # print(self.allow_paths) nodes: List[ solcast.nodes.IterableNodeBase] = solcast.from_standard_output( output_json) return nodes[0]
def compile_from_input_json(input_json, silent=True): '''Compiles contracts from a standard input json. Args: input_json: solc input json silent: verbose reporting Returns: standard compiler output json''' optimizer = input_json['settings']['optimizer'] if not silent: print("Compiling contracts...") print("Optimizer: " + (f"Enabled Runs: {optimizer['runs']}" if optimizer['enabled'] else 'Disabled')) try: return solcx.compile_standard(input_json, optimize=optimizer['enabled'], optimize_runs=optimizer['runs'], allow_paths=".") except solcx.exceptions.SolcError as e: raise CompilerError(e)
def deployContractConstructor(self, ContractLocation, ContractFileName, NameOfContract, Account, parameterOne, parameterTwo): self.ContractLocation = ContractLocation self.NameOfContract = NameOfContract self.ContractFileName = ContractFileName self.Account = Account self.data = self.loadContract() compiled_sol = compile_standard({ "language": "Solidity", "sources": { self.NameOfContract : { "content" : self.data } }, "settings": { "outputSelection": { "*": { "*": [ "metadata", "evm.bytecode" , "evm.bytecode.sourceMap" ] } } } }) #Assume that file name and name of contract are the same bytecode = compiled_sol['contracts'][self.NameOfContract][self.ContractFileName]['evm']['bytecode']['object'] abi = json.loads(compiled_sol['contracts'][self.NameOfContract][self.ContractFileName]['metadata'])['output']['abi'] DeployedContract = self.web3.eth.contract(abi=abi, bytecode=bytecode) tx_hash = DeployedContract.constructor(parameterOne,parameterTwo).transact() tx_receipt = self.web3.eth.waitForTransactionReceipt(tx_hash) DeployedContract = self.web3.eth.contract( address=tx_receipt.contractAddress, abi=abi ) return DeployedContract
def publish(self, scfile, name): path = Path(scfile).resolve() tmp = self.load_lib_add() tmp['sources'] = {path.name: {"urls": [str(path)]}} sc = tmp log.debug(str(path.parent)) compiled_sol = compile_standard(sc, allow_paths=str(path.parent)) contract_interface = compiled_sol['contracts'][path.name][name] w3 = self.web3 w3.eth.defaultAccount = w3.eth.accounts[0] bytecode = contract_interface['evm']['bytecode']['object'] abi = json.loads(contract_interface['metadata'])['output']['abi'] tester = w3.eth.contract(abi=abi, bytecode=bytecode) tx_hash = tester.constructor().transact() tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) # contract = {scfile: tx_receipt.contractAddress} # with open('contract_address.json', 'w') as wf: # json.dump(contract, wf, indent=4) # print(tx_receipt) self.contract_instance = self.contract(tx_receipt, contract_interface) return tx_receipt, contract_interface
def compile_single(src: str): solc_result = solcx.compile_standard({ "language": "Solidity", "sources": { "arbitrary_string": { "content": src } }, "settings": { "outputSelection": { "*": { "*": ["metadata", "evm.bytecode", "evm.bytecode.sourceMap"] } } } }) contract_bin = next( iter(solc_result["contracts"]["arbitrary_string"].values())) contract_metadata = json.loads(contract_bin["metadata"]) return ContractFactory(bytecode=contract_bin["evm"]["bytecode"]["object"], abi=contract_metadata["output"]["abi"])
def compile(filePath: str, fileName: str, contractName: str, w3: Web3): with open(filePath) as contract_file: contract_code = contract_file.read() compiled_contract = compile_standard({ 'language': 'Solidity', 'sources': { fileName: { 'content': contract_code } }, 'settings': { 'outputSelection': { '*': { '*': ['metadata', 'evm.bytecode', 'evm.bytecode.sourceMap'] } } } }) bytecode = compiled_contract['contracts'][fileName][contractName][ 'evm']['bytecode']['object'] abi = json.loads(compiled_contract['contracts'][fileName][contractName] ['metadata'])['output']['abi'] return bytecode, abi
def test_compile_standard_empty(): with pytest.raises(ContractsNotFound): solcx.compile_standard({"language": "Solidity", "sources": {}})
def test_compile_standard(input_json, foo_source): input_json["sources"] = {"contracts/Foo.sol": {"content": foo_source}} result = solcx.compile_standard(input_json) _compile_assertions(result, "Foo")
def test_compile_standard_invalid_source(input_json, invalid_source): input_json["sources"] = {"contracts/Foo.sol": {"content": invalid_source}} with pytest.raises(SolcError): solcx.compile_standard(input_json)
import json from web3 import Web3 from solcx import compile_standard compiled_sol = compile_standard({ "language": "Solidity", "sources": {"Greeter.sol": { "content": ''' pragma solidity ^0.5.0; contract Greeter { string public greeting; constructor() public { greeting = 'Hello'; } function setGreeting(string memory _greeting) public { greeting = _greeting; } function greet() view public returns (string memory) { return greeting; } } '''} }, "settings": {"outputSelection": {"*": {"*": ["metadata", "evm.bytecode", "evm.bytecode.sourceMap"]}}} }) ganeche_url = "HTTP://127.0.0.1:7545" web3 = Web3(Web3.HTTPProvider(ganeche_url)) web3.eth.defaultAccount = web3.eth.accounts[0]
def test_compile_standard_with_file_paths(input_json, foo_path): input_json["sources"] = {"contracts/Foo.sol": {"urls": [foo_path]}} result = solcx.compile_standard( input_json, allow_paths=Path(foo_path).parent.as_posix()) _compile_assertions(result, "Foo")
def generate_payloads( self, version: Optional[str], contract: str = None, remappings: Tuple[str] = None, enable_scribble: bool = False, scribble_path: str = "scribble", ): """Generate a MythX analysis request from a given Solidity file. This function will open the file, try to detect the used solc version from the pragma definition, and automatically compile it. If the given solc version is not installed on the client's system, it will be automatically downloaded. From the solc output, the following data is sent to the MythX API for analysis: * :code:`abi` * :code:`ast` * :code:`bin` * :code:`bin-runtime` * :code:`srcmap` * :code:`srcmap-runtime` :param version: The solc version to use for compilation :param contract: The contract name(s) to submit :param remappings: Import remappings to pass to solcx :param enable_scribble: Enable instrumentation with scribble :param scribble_path: Optional path to the scribble executable """ with open(self.target) as f: source = f.read() solc_version = re.findall(PRAGMA_PATTERN, source) LOGGER.debug(f"solc version matches in {self.target}: {solc_version}") if not (solc_version or version): # no pragma found, user needs to specify the version raise click.exceptions.UsageError( "No pragma found - please specify a solc version with --solc-version" ) solc_version = f"v{version or solc_version[0]}" if solc_version not in solcx.get_installed_solc_versions(): try: LOGGER.debug(f"Installing solc {solc_version}") solcx.install_solc(solc_version, allow_osx=True) except Exception as e: raise click.exceptions.UsageError( f"Error installing solc version {solc_version}: {e}") solcx.set_solc_version(solc_version, silent=True) # instrument with scribble if requested scribble_file = None if enable_scribble: process = subprocess.run( [scribble_path, self.target], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) if process.returncode != 0: click.echo( f"Scribble has encountered an error (code: {process.returncode})" ) click.echo("=====STDERR=====") click.echo(process.stderr.decode()) click.echo("=====STDOUT=====") process.stdout.decode() sys.exit(process.returncode) # don't delete temp file on close but manually unlink # after payload has been generated scribble_output_f = tempfile.NamedTemporaryFile(mode="w+", delete=False, suffix=".sol") scribble_stdout = process.stdout.decode() scribble_output_f.write(scribble_stdout) scribble_file = scribble_output_f.name scribble_output_f.close() try: cwd = str(Path.cwd().absolute()) LOGGER.debug( f"Compiling {scribble_file or self.target} under allowed path {cwd}" ) result = solcx.compile_standard( input_data={ "language": "Solidity", "sources": { scribble_file or self.target: { "urls": [scribble_file or self.target] } }, "settings": { "remappings": [r.format(pwd=cwd) for r in remappings] or [ f"openzeppelin-solidity/={cwd}/node_modules/openzeppelin-solidity/", f"openzeppelin-zos/={cwd}/node_modules/openzeppelin-zos/", f"zos-lib/={cwd}/node_modules/zos-lib/", ], "outputSelection": { "*": { "*": [ "evm.bytecode.object", "evm.bytecode.sourceMap", "evm.deployedBytecode.object", "evm.deployedBytecode.sourceMap", ], "": ["ast"], } }, "optimizer": { "enabled": True, "runs": 200 }, }, }, # if scribble enabled, allow access to temporary file allow_paths=cwd if not enable_scribble else scribble_file, ) except solcx.exceptions.SolcError as e: raise click.exceptions.UsageError( f"Error compiling source with solc {solc_version}: {e}") compiled_sources = result.get("sources", {}) payload = { "sources": {}, "solc_version": solc_version, "main_source": scribble_file or self.target, "source_list": [None] * len(compiled_sources), } for file_path, file_data in compiled_sources.items(): # fill source list entry payload["source_list"][file_data.get("id")] = file_path payload_dict = payload["sources"][file_path] = {} # add AST for file if it's present ast = file_data.get("ast") if ast: payload_dict["ast"] = ast # add source from file path with open(file_path, newline="") as source_f: payload_dict["source"] = source_f.read() if contract: LOGGER.debug("Contract specified - targeted payload selection") try: # if contract specified, set its bytecode and source mapping payload["contract_name"] = contract payload["bytecode"] = patch_solc_bytecode(result["contracts"][ scribble_file or self.target][contract]["evm"]["bytecode"]["object"]) payload["source_map"] = result["contracts"][ scribble_file or self.target][contract]["evm"]["bytecode"]["sourceMap"] payload["deployed_bytecode"] = patch_solc_bytecode( result["contracts"][scribble_file or self.target][contract] ["evm"]["deployedBytecode"]["object"]) payload["deployed_source_map"] = result["contracts"][ scribble_file or self. target][contract]["evm"]["deployedBytecode"]["sourceMap"] self.payloads.append(payload) return except KeyError: LOGGER.warning( f"Could not find contract {contract} in compilation artifacts. The CLI will find the " f"largest bytecode artifact in the compilation output and submit it instead." ) # extract the largest bytecode from the compilation result and add it bytecode_max = 0 for file_path, file_element in result.get("contracts", {}).items(): for contract, contract_data in file_element.items(): contract_bytecode = contract_data["evm"]["bytecode"]["object"] contract_source_map = contract_data["evm"]["bytecode"][ "sourceMap"] contract_deployed_bytecode = contract_data["evm"][ "deployedBytecode"]["object"] contract_deployed_source_map = contract_data["evm"][ "deployedBytecode"]["sourceMap"] bytecode_length = len(contract_bytecode) if bytecode_length > bytecode_max: bytecode_max = bytecode_length payload["contract_name"] = contract payload["bytecode"] = patch_solc_bytecode( contract_bytecode) payload["source_map"] = contract_source_map payload["deployed_bytecode"] = patch_solc_bytecode( contract_deployed_bytecode) payload[ "deployed_source_map"] = contract_deployed_source_map if enable_scribble: # replace scribble tempfile name with prefixed one scribble_payload = payload["sources"].pop(scribble_file) payload["sources"]["scribble-" + str(self.target)] = scribble_payload payload["source_list"] = [ "scribble-" + str(self.target) if item == scribble_file else item for item in payload["source_list"] ] payload["main_source"] = "scribble-" + str(self.target) # delete scribble temp file os.unlink(scribble_file) self.payloads.append(payload)