def _compile(self, data): code = data.get('code') if not code: return {'status': 'failed', 'message': 'No "code" key supplied'}, 400 if not isinstance(code, str): return {'status': 'failed', 'message': '"code" must be a non-empty string'}, 400 try: code = data['code'] out_dict = vyper.compile_codes( {'': code}, vyper.compiler.OUTPUT_FORMATS.keys(), )[''] out_dict['ir'] = str(out_dict['ir']) except ParserException as e: return { 'status': 'failed', 'message': str(e), 'column': e.col_offset, 'line': e.lineno }, 400 except SyntaxError as e: return { 'status': 'failed', 'message': str(e), 'column': e.offset, 'line': e.lineno }, 400 out_dict.update({'status': "success"}) return out_dict, 200
def _compile(self, data): code = data.get("code") if not code: return {"status": "failed", "message": 'No "code" key supplied'}, 400 if not isinstance(code, str): return {"status": "failed", "message": '"code" must be a non-empty string'}, 400 try: code = data["code"] out_dict = vyper.compile_codes( {"": code}, list(vyper.compiler.OUTPUT_FORMATS.keys()), evm_version=data.get("evm_version", DEFAULT_EVM_VERSION), )[""] out_dict["ir"] = str(out_dict["ir"]) except VyperException as e: return ( {"status": "failed", "message": str(e), "column": e.col_offset, "line": e.lineno}, 400, ) except SyntaxError as e: return ( {"status": "failed", "message": str(e), "column": e.offset, "line": e.lineno}, 400, ) out_dict.update({"status": "success"}) return out_dict, 200
def compile_files( input_files: Iterable[str], output_formats: OutputFormats, root_folder: str = ".", show_gas_estimates: bool = False, evm_version: str = DEFAULT_EVM_VERSION, no_optimize: bool = False, storage_layout: Iterable[str] = None, ) -> OrderedDict: root_path = Path(root_folder).resolve() if not root_path.exists(): raise FileNotFoundError(f"Invalid root path - '{root_path.as_posix()}' does not exist") contract_sources: ContractCodes = OrderedDict() for file_name in input_files: file_path = Path(file_name) try: file_str = file_path.resolve().relative_to(root_path).as_posix() except ValueError: file_str = file_path.as_posix() with file_path.open() as fh: # trailing newline fixes python parsing bug when source ends in a comment # https://bugs.python.org/issue35107 contract_sources[file_str] = fh.read() + "\n" storage_layouts = OrderedDict() if storage_layout: for storage_file_name, contract_name in zip(storage_layout, contract_sources.keys()): storage_file_path = Path(storage_file_name) with storage_file_path.open() as sfh: storage_layouts[contract_name] = json.load(sfh) show_version = False if "combined_json" in output_formats: if len(output_formats) > 1: raise ValueError("If using combined_json it must be the only output format requested") output_formats = combined_json_outputs show_version = True translate_map = {"abi_python": "abi", "json": "abi", "ast": "ast_dict", "ir_json": "ir_dict"} final_formats = [translate_map.get(i, i) for i in output_formats] compiler_data = vyper.compile_codes( contract_sources, final_formats, exc_handler=exc_handler, interface_codes=get_interface_codes(root_path, contract_sources), evm_version=evm_version, no_optimize=no_optimize, storage_layouts=storage_layouts, show_gas_estimates=show_gas_estimates, ) if show_version: compiler_data["version"] = vyper.__version__ return compiler_data
def test_opcodes(): code = """ @public def a() -> bool: return True """ out = vyper.compile_codes({'': code}, ['opcodes_runtime', 'opcodes'])[0] assert len(out['opcodes']) > len(out['opcodes_runtime']) assert out['opcodes_runtime'] in out['opcodes']
def test_bytecode_runtime(): code = """ @public def a() -> bool: return True """ out = vyper.compile_codes({'': code}, ['bytecode_runtime', 'bytecode'])[0] assert len(out['bytecode']) > len(out['bytecode_runtime']) assert out['bytecode_runtime'][2:] in out['bytecode'][2:]
def compile_from_input_dict( input_dict: Dict, exc_handler: Callable = exc_handler_raises, root_folder: Union[str, None] = None, ) -> Tuple[Dict, Dict]: root_path = None if root_folder is not None: root_path = Path(root_folder).resolve() if not root_path.exists(): raise FileNotFoundError( f"Invalid root path - '{root_path.as_posix()}' does not exist") if input_dict["language"] != "Vyper": raise JSONError( f"Invalid language '{input_dict['language']}' - Only Vyper is supported." ) evm_version = get_evm_version(input_dict) no_optimize = not input_dict["settings"].get("optimize", True) contract_sources: ContractCodes = get_input_dict_contracts(input_dict) interface_sources = get_input_dict_interfaces(input_dict) output_formats = get_input_dict_output_formats(input_dict, contract_sources) compiler_data, warning_data = {}, {} warnings.simplefilter("always") for id_, contract_path in enumerate(sorted(contract_sources)): with warnings.catch_warnings(record=True) as caught_warnings: try: interface_codes = get_interface_codes(root_path, contract_path, contract_sources, interface_sources) except Exception as exc: return exc_handler(contract_path, exc, "parser"), {} try: data = vyper.compile_codes( {contract_path: contract_sources[contract_path]}, output_formats[contract_path], interface_codes=interface_codes, initial_id=id_, no_optimize=no_optimize, evm_version=evm_version, ) except Exception as exc: return exc_handler(contract_path, exc, "compiler"), {} compiler_data[contract_path] = data[contract_path] if caught_warnings: warning_data[contract_path] = caught_warnings return compiler_data, warning_data
def compile_files(input_files: Iterable[str], output_formats: OutputFormats, root_folder: str = '.', show_gas_estimates: bool = False, evm_version: str = DEFAULT_EVM_VERSION) -> OrderedDict: if show_gas_estimates: parser_utils.LLLnode.repr_show_gas = True root_path = Path(root_folder).resolve() if not root_path.exists(): raise FileNotFoundError( f"Invalid root path - '{root_path.as_posix()}' does not exist") contract_sources: ContractCodes = OrderedDict() for file_name in input_files: file_path = Path(file_name) try: file_str = file_path.resolve().relative_to(root_path).as_posix() except ValueError: file_str = file_path.as_posix() with file_path.open() as fh: contract_sources[file_str] = fh.read() show_version = False if 'combined_json' in output_formats: if len(output_formats) > 1: raise ValueError( "If using combined_json it must be the only output format requested" ) output_formats = [ 'bytecode', 'bytecode_runtime', 'abi', 'source_map', 'method_identifiers' ] show_version = True translate_map = {'abi_python': 'abi', 'json': 'abi', 'ast': 'ast_dict'} final_formats = [translate_map.get(i, i) for i in output_formats] compiler_data = vyper.compile_codes( contract_sources, final_formats, exc_handler=exc_handler, interface_codes=get_interface_codes(root_path, contract_sources), evm_version=evm_version, ) if show_version: compiler_data['version'] = vyper.__version__ return compiler_data
def compile(contract): path = f"Contracts/{contract}" contract_json = open(f'Build/{contract[:-3]}.json', 'w') current_dir = os.curdir with open(path, 'r') as f: read_contract = f.read() smart_contract = {} smart_contract[current_dir] = read_contract compiled_contract_code = vyper.compile_codes(smart_contract, ['abi', 'bytecode'], 'dict') smart_contract_json = { 'contract_name' : contract, 'abi' : compiled_contract_code[current_dir]['abi'], 'bytecode' : compiled_contract_code[current_dir]['bytecode'] } json.dump(smart_contract_json, contract_json) contract_json.close()
def compile_all_files(source_code_directory: Path, build_directory: Path, migration_directory: Path): vyper_files = source_code_directory.glob("*.vy") for vyper_file in vyper_files: with open(vyper_file, 'r') as f: build_dir_str = str(build_directory) smart_contract_name = Path(vyper_file).with_suffix('').name content = f.read() smart_contract = {} smart_contract[build_dir_str] = content formats = [ "abi", "bytecode", "ast_dict", "external_interface", "interface", "method_identifiers", "asm", "source_map", "bytecode_runtime", "opcodes", "opcodes_runtime", "devdoc", "userdoc" ] compiled_code = compile_codes(smart_contract, formats, 'dict') smart_contract_json = { "contractName": smart_contract_name, "compiler": { "name": "vyper", "version": version } } for format in formats: smart_contract_json[format] = compiled_code[build_dir_str][ format] contract_json_file = build_dir_str + '/' + smart_contract_name + '.json' with open(contract_json_file, 'w') as smart_contract_build_file: dump(smart_contract_json, smart_contract_build_file, indent=4) migration_sample = Path(__file__).parent / Path( "compiled_files") / Path("migration_sample.py") prefix_deploy_file = "deploy_" migration_file_name = f"{prefix_deploy_file}{smart_contract_name}.py" migration_file = migration_directory / Path(migration_file_name) with open(migration_file, "w") as file: migration_sample_content = migration_sample.read_text() content = migration_sample_content.format( smart_contract_name=smart_contract_name) file.write(content) solidity_files = source_code_directory.glob("*.sol") for solidity_file in solidity_files: with open(solidity_file, 'r') as f: build_dir_str = str(build_directory) smart_contract_name = Path(solidity_file).with_suffix('').name option_str = "abi,asm,ast,bin,bin-runtime,compact-format,devdoc,generated-sources,generated-sources-runtime,hashes,interface,metadata,opcodes,srcmap,srcmap-runtime,storage-layout,userdoc" try: p = subprocess.Popen( ["solc", "--combined-json", option_str, solidity_file], stdout=subprocess.PIPE) out, err = p.communicate() out_str = out.decode() output_json = loads(out_str) for contract in output_json["contracts"]: contract_compilation = output_json["contracts"][contract] compiler_version = output_json["version"] smart_contract_name = contract.split(":")[1] smart_contract_json = { "contractName": smart_contract_name, "compiler": { "name": "solidity", "version": compiler_version } } for format in contract_compilation: smart_contract_json[format] = contract_compilation[ format] contract_json_file = build_dir_str + '/' + smart_contract_name + '.json' with open(contract_json_file, 'w') as smart_contract_build_file: dump(smart_contract_json, smart_contract_build_file, indent=4) migration_sample = Path(__file__).parent / Path( "compiled_files") / Path("migration_sample.py") prefix_deploy_file = "deploy_" migration_file_name = f"{prefix_deploy_file}{smart_contract_name}.py" migration_file = migration_directory / Path( migration_file_name) with open(migration_file, "w") as file: migration_sample_content = migration_sample.read_text() content = migration_sample_content.format( smart_contract_name=smart_contract_name) file.write(content) except FileNotFoundError: print("You must install Solidity compiler first.")
self.name = "Satoshi Nakamoto" @public def change_name(new_name: bytes[24]): self.name = new_name @public def say_hello() -> bytes[32]: return concat("Hello, ", self.name) ''' smart_contract = {} smart_contract['hello'] = contract_source_code format = ['abi', 'bytecode'] compiled_code = compile_codes(smart_contract, format, 'dict') abi = compiled_code['hello']['abi'] w3 = Web3(HTTPProvider('http://localhost:7545')) # Change the address of the smart contract, the private key, and the account according to your situation address = "0x9Dc44aa8d05c86388E647F954D00CaA858837804" private_key = '0x1a369cedacf0bf2f5fd16b5215527e8c8767cbd761ebefa28d9df0d389c60b6e' w3.eth.defaultAccount = '0xb105F01Ce341Ef9282dc2201BDfdA2c26903da77' Hello = w3.eth.contract(address=address, abi=abi) print(Hello.functions.name().call()) print(Hello.functions.say_hello().call())