def _contract_type( name: str, compiler_output: Dict[str, Any], alias: Optional[str], selected_fields: Optional[List[str]], manifest: Manifest, ) -> Manifest: contracts_by_name = normalize_compiler_output(compiler_output) try: all_type_data = contracts_by_name[name] except KeyError: raise ManifestBuildingError( f"Contract name: {name} not found in the provided compiler output." ) if selected_fields: contract_type_data = { k: v for k, v in all_type_data.items() if k in selected_fields } else: contract_type_data = all_type_data if alias: return assoc_in( manifest, ["contract_types", alias], assoc(contract_type_data, "contract_type", name), ) return assoc_in(manifest, ["contract_types", name], contract_type_data)
def _expect(post_state: Dict[str, Any], networks: Any, transaction: TransactionDict, filler: Dict[str, Any]) -> Dict[str, Any]: test_name = get_test_name(filler) test = filler[test_name] test_update: Dict[str, Dict[Any, Any]] = {test_name: {}} pre_state = test.get("pre", {}) post_state = normalize_state(post_state or {}) defaults = { address: { "balance": 0, "nonce": 0, "code": b"", "storage": {}, } for address in post_state } result = deep_merge(defaults, pre_state, normalize_state(post_state)) new_expect = {"result": result} if transaction is not None: transaction = normalize_transaction( merge(get_default_transaction(networks), transaction)) if "transaction" not in test: transaction_group = apply_formatters_to_dict( { "data": wrap_in_list, "gasLimit": wrap_in_list, "value": wrap_in_list, }, transaction) indexes = { index_key: 0 for transaction_key, index_key in [ ("gasLimit", "gas"), ("value", "value"), ("data", "data"), ] if transaction_key in transaction_group } else: transaction_group, indexes = add_transaction_to_group( test["transaction"], transaction) new_expect = assoc(new_expect, "indexes", indexes) test_update = assoc_in(test_update, [test_name, "transaction"], transaction_group) if networks is not None: networks = normalize_networks(networks) new_expect = assoc(new_expect, "networks", networks) existing_expects = test.get("expect", []) expect = existing_expects + [new_expect] test_update = assoc_in(test_update, [test_name, "expect"], expect) return deep_merge(filler, test_update)
def activate_registry(uri_or_alias: str, config: Config) -> None: store_path = config.xdg_ethpmcli_root / REGISTRY_STORE store_data = json.loads(store_path.read_text()) registry = resolve_uri_or_alias(uri_or_alias, store_path) active_registry = get_active_registry(store_path) if registry.uri != active_registry.uri: deactivated_store_data = assoc_in(store_data, [active_registry.uri, "active"], False) activated_store_data = assoc_in(deactivated_store_data, [registry.uri, "active"], True) write_store_data_to_disk(activated_store_data, store_path)
def add_compilers_to_manifest(compiler_info: Dict[str, Any], contract_type: str, manifest: Manifest) -> Manifest: """ Adds a compiler information object to a manifest's top-level `compilers`. """ if "compilers" not in manifest: compiler_info['contractTypes'] = [contract_type] return assoc_in(manifest, ["compilers"], [compiler_info]) updated_compiler_info = update_compilers_object(compiler_info, contract_type, manifest["compilers"]) return assoc_in(manifest, ["compilers"], updated_compiler_info)
def test_equality(identity_scheme_registry): base_kwargs = { "sequence_number": 0, "kv_pairs": { b"id": b"mock", b"key1": b"value1", b"key2": b"value2", }, "signature": b"signature", "identity_scheme_registry": identity_scheme_registry, } base_enr = ENR(**base_kwargs) equal_enr = ENR(**base_kwargs) enr_different_sequence_number = ENR( **assoc(base_kwargs, "sequence_number", 1)) enr_different_kv_pairs = ENR( **assoc_in(base_kwargs, ("kv_pairs", b"key1"), b"value2"), ) enr_different_signature = ENR( **assoc(base_kwargs, "signature", b"different-signature")) assert base_enr == base_enr assert equal_enr == base_enr assert enr_different_sequence_number != base_enr assert enr_different_kv_pairs != base_enr assert enr_different_signature != base_enr
def _inline_source( name: str, compiler_output: Dict[str, Any], package_root_dir: Optional[Path], manifest: Manifest, ) -> Manifest: names_and_paths = get_names_and_paths(compiler_output) cwd = Path.cwd() source_path_suffix = names_and_paths[name] if package_root_dir: if (package_root_dir / source_path_suffix).is_file(): source_data = (package_root_dir / source_path_suffix).read_text() else: raise ManifestBuildingError( f"Contract source: {source_path_suffix} cannot be found in " f"provided package_root_dir: {package_root_dir}.") elif (cwd / source_path_suffix).is_file(): source_data = (cwd / source_path_suffix).read_text() else: raise ManifestBuildingError( "Contract source cannot be resolved, please make sure that the working " "directory is set to the correct directory or provide `package_root_dir`." ) return assoc_in(manifest, ["sources", source_path_suffix], source_data)
def _pin_source( name: str, compiler_output: Dict[str, Any], ipfs_backend: BaseIPFSBackend, package_root_dir: Optional[Path], manifest: Manifest, ) -> Manifest: names_and_paths = get_names_and_paths(compiler_output) source_path = names_and_paths[name] if package_root_dir: if not (package_root_dir / source_path).is_file(): raise ManifestBuildingError( f"Unable to find and pin contract source: {source_path} " f"under specified package_root_dir: {package_root_dir}.") (ipfs_data, ) = ipfs_backend.pin_assets(package_root_dir / source_path) else: cwd = Path.cwd() if not (cwd / source_path).is_file(): raise ManifestBuildingError( f"Unable to find and pin contract source: {source_path} " f"current working directory: {cwd}.") (ipfs_data, ) = ipfs_backend.pin_assets(cwd / source_path) return assoc_in(manifest, ["sources", source_path], f"ipfs://{ipfs_data['Hash']}")
def _inline_source( name: str, compiler_output: Dict[str, Any], package_root_dir: Optional[Path], manifest: Manifest, ) -> Manifest: names_and_paths = get_names_and_paths(compiler_output) cwd = Path.cwd() try: source_path = names_and_paths[name] except KeyError: raise ManifestBuildingError( f"Unable to inline source: {name}. " f"Available sources include: {list(sorted(names_and_paths.keys()))}." ) if package_root_dir: if (package_root_dir / source_path).is_file(): source_data = (package_root_dir / source_path).read_text() else: raise ManifestBuildingError( f"Contract source: {source_path} cannot be found in " f"provided package_root_dir: {package_root_dir}.") elif (cwd / source_path).is_file(): source_data = (cwd / source_path).read_text() else: raise ManifestBuildingError( "Contract source cannot be resolved, please make sure that the working " "directory is set to the correct directory or provide `package_root_dir`." ) # rstrip used here since Path.read_text() adds a newline to returned contents return assoc_in(manifest, ["sources", source_path], source_data.rstrip("\n"))
def fill_test(filler: Dict[str, Any], info: Dict[str, Any]=None, apply_formatter: bool=True, **kwargs: Any) -> Dict[str, Any]: test_name = get_test_name(filler) test = filler[test_name] if "transaction" in test: filled = fill_state_test(filler) formatter = filled_state_test_formatter elif "exec" in test: filled = fill_vm_test(filler, **kwargs) formatter = filled_vm_test_formatter else: raise ValueError("Given filler does not appear to be for VM or state test") info = merge( {"filledwith": FILLED_WITH_TEMPLATE.format(version=get_version_from_git())}, info if info else {} ) filled = assoc_in(filled, [test_name, "_info"], info) if apply_formatter: return formatter(filled) else: return filled
def _pin_source( name: str, compiler_output: Dict[str, Any], ipfs_backend: BaseIPFSBackend, package_root_dir: Optional[Path], manifest: Manifest, ) -> Manifest: names_and_paths = get_names_and_paths(compiler_output) try: source_path = names_and_paths[name] except KeyError: raise ManifestBuildingError( f"Unable to pin source: {name}. " f"Available sources include: {list(sorted(names_and_paths.keys()))}." ) if package_root_dir: if not (package_root_dir / source_path).is_file(): raise ManifestBuildingError( f"Unable to find and pin contract source: {source_path} " f"under specified package_root_dir: {package_root_dir}.") (ipfs_data, ) = ipfs_backend.pin_assets(package_root_dir / source_path) else: cwd = Path.cwd() if not (cwd / source_path).is_file(): raise ManifestBuildingError( f"Unable to find and pin contract source: {source_path} " f"current working directory: {cwd}.") (ipfs_data, ) = ipfs_backend.pin_assets(cwd / source_path) source_data_object = { "urls": [f"ipfs://{ipfs_data['Hash']}"], "type": "solidity", "installPath": source_path, } return assoc_in(manifest, ["sources", source_path], source_data_object)
def _build_dependency(package_name: str, uri: URI, manifest: Manifest) -> Manifest: validate_package_name(package_name) if not is_supported_content_addressed_uri(uri): raise EthPMValidationError( f"{uri} is not a supported content-addressed URI. " "Currently only IPFS and Github blob uris are supported.") return assoc_in(manifest, ("buildDependencies", package_name), uri)
def check_compiler(contract_name: str, data: Dict[str, Any], warnings: Dict[str, str]) -> Dict[str, str]: if "compiler" not in data or not data["compiler"]: return assoc_in( warnings, ["contract_types", contract_name, "compiler"], WARNINGS["compiler_missing"].format(contract_name), ) return warnings
def check_userdoc(contract_name: str, data: Dict[str, Any], warnings: Dict[str, str]) -> Dict[str, str]: if "userdoc" not in data or not data["userdoc"]: return assoc_in( warnings, ["contractTypes", contract_name, "userdoc"], WARNINGS["userdoc_missing"].format(contract_name), ) return warnings
def owned_package(ethpm_spec_dir): manifest = get_ethpm_spec_manifest("owned", "v3.json") # source_id missing `./` prefix in ethpm-spec ("Owned.sol"/"./Owned.sol" though both are valid) source_obj = manifest['sources'].pop('Owned.sol') updated_manifest = assoc_in(manifest, ['sources', './Owned.sol'], source_obj) compiler = get_ethpm_local_manifest("owned", "output_v3.json")["contracts"] contracts_dir = ethpm_spec_dir / "examples" / "owned" / "contracts" return contracts_dir, updated_manifest, compiler
def safe_math_lib_package_with_alias(deployer, w3): safe_math_lib_manifest = ASSETS_DIR / "safe-math-lib" / "1.0.1.json" safe_math_deployer = deployer(safe_math_lib_manifest) pkg = safe_math_deployer.deploy("SafeMathLib") blockchain_uri = list(pkg.manifest["deployments"].keys())[0] deployment_data = pkg.manifest["deployments"][blockchain_uri]["SafeMathLib"] aliased_manifest = assoc_in( pkg.manifest, ["deployments", blockchain_uri], {"safe-math-lib-alias": deployment_data}, ) return Package(aliased_manifest, w3)
def test_builder_with_single_build_dependency(): expected_build_dep = { "package": "ipfs://QmUYcVzTfSwJoigggMxeo2g5STWAgJdisQsqcXHws7b1FW" } expected = assoc_in(BASE_MANIFEST, ["build_dependencies"], expected_build_dep) actual = build( BASE_MANIFEST, build_dependency( "package", "ipfs://QmUYcVzTfSwJoigggMxeo2g5STWAgJdisQsqcXHws7b1FW"), ) assert actual == expected
def check_runtime_bytecode(contract_name: str, data: Dict[str, Any], warnings: Dict[str, str]) -> Dict[str, str]: if "runtime_bytecode" not in data or not data["runtime_bytecode"]: return assoc_in( warnings, ["contract_types", contract_name, "runtime_bytecode"], WARNINGS["runtime_bytecode_missing"].format(contract_name), ) return build( warnings, check_bytecode_object(contract_name, "runtime", data["runtime_bytecode"]), )
def check_bytecode_object( contract_name: str, bytecode_type: str, bytecode_data: Dict[str, Any], warnings: Dict[str, str], ) -> Dict[str, str]: # todo: check if bytecode has link_refs & validate link_refs present in object if "bytecode" not in bytecode_data or not bytecode_data["bytecode"]: return assoc_in( warnings, ["contract_types", contract_name, f"{bytecode_type}_bytecode"], WARNINGS["bytecode_subfield_missing"].format(contract_name, bytecode_type), ) return warnings
def _contract_type( name: str, compiler_output: Dict[str, Any], alias: Optional[str], selected_fields: Optional[List[str]], manifest: Manifest, ) -> Manifest: contracts_by_name = normalize_compiler_output(compiler_output) try: all_type_data = contracts_by_name[name] except KeyError: raise ManifestBuildingError( f"Contract name: {name} not found in the provided compiler output." ) if selected_fields: contract_type_data = filter_all_data_by_selected_fields( all_type_data, selected_fields ) else: contract_type_data = all_type_data if "compiler" in contract_type_data: compiler_info = contract_type_data.pop('compiler') contract_type_ref = alias if alias else name manifest_with_compilers = add_compilers_to_manifest( compiler_info, contract_type_ref, manifest ) else: manifest_with_compilers = manifest if alias: return assoc_in( manifest_with_compilers, ["contractTypes", alias], assoc(contract_type_data, "contractType", name), ) return assoc_in(manifest_with_compilers, ["contractTypes", name], contract_type_data)
def state_definition_to_dict(state_definition: GeneralState) -> AccountState: """Convert a state definition to the canonical dict form. State can either be defined in the canonical form, or as a list of sub states that are then merged to one. Sub states can either be given as dictionaries themselves, or as tuples where the last element is the value and all others the keys for this value in the nested state dictionary. Example: ``` [ ("0xaabb", "balance", 3), ("0xaabb", "storage", { 4: 5, }), "0xbbcc", { "balance": 6, "nonce": 7 } ] ``` """ if isinstance(state_definition, Mapping): state_dict = state_definition elif isinstance(state_definition, Iterable): state_dicts = [ assoc_in( {}, state_item[:-1], state_item[-1] ) if not isinstance(state_item, Mapping) else state_item for state_item in state_definition ] if not is_cleanly_mergable(*state_dicts): raise ValidationError("Some state item is defined multiple times") state_dict = deep_merge(*state_dicts) else: assert TypeError("State definition must either be a mapping or a sequence") seen_keys = set(concat(d.keys() for d in state_dict.values())) bad_keys = seen_keys - set(["balance", "nonce", "storage", "code"]) if bad_keys: raise ValidationError( "State definition contains the following invalid account fields: {}".format( ", ".join(bad_keys) ) ) return state_dict
def test_builder_with_multiple_build_dependencies(): expected_build_deps = { "escrow": "ipfs://QmPDwMHk8e1aMEZg3iKsUiPSkhHkywpGB3KHKM52RtGrkv", "package": "ipfs://QmUYcVzTfSwJoigggMxeo2g5STWAgJdisQsqcXHws7b1FW", } expected = assoc_in(BASE_MANIFEST, ["build_dependencies"], expected_build_deps) actual = build( BASE_MANIFEST, build_dependency( "package", "ipfs://QmUYcVzTfSwJoigggMxeo2g5STWAgJdisQsqcXHws7b1FW"), build_dependency( "escrow", "ipfs://QmPDwMHk8e1aMEZg3iKsUiPSkhHkywpGB3KHKM52RtGrkv"), ) assert actual == expected
def _pre_state(filler: Dict[str, Any]) -> Dict[str, Any]: test_name = get_test_name(filler) old_pre_state = filler[test_name].get("pre_state", {}) pre_state = normalize_state(raw_state) defaults = { address: { "balance": 0, "nonce": 0, "code": b"", "storage": {}, } for address in pre_state } new_pre_state = deep_merge(defaults, old_pre_state, pre_state) return assoc_in(filler, [test_name, "pre"], new_pre_state)
def insert_deployment( package: Package, deployment_name: str, deployment_data: Dict[str, str], latest_block_uri: URI, ) -> Manifest: """ Returns a new manifest. If a matching chain uri is found in the old manifest, it will update the chain uri along with the new deployment data. If no match, it will simply add the new chain uri and deployment data. """ old_deployments_data = package.manifest.get("deployments") if old_deployments_data and contains_matching_uri(old_deployments_data, package.w3): old_chain_uri = pluck_matching_uri(old_deployments_data, package.w3) old_deployments_chain_data = old_deployments_data[old_chain_uri] # Replace specific on-chain deployment (i.e. deployment_name) new_deployments_chain_data_init = dissoc(old_deployments_chain_data, deployment_name) new_deployments_chain_data = { **new_deployments_chain_data_init, **{ deployment_name: deployment_data }, } # Replace all on-chain deployments new_deployments_data_init = dissoc(old_deployments_data, "deployments", old_chain_uri) new_deployments_data = { **new_deployments_data_init, **{ latest_block_uri: new_deployments_chain_data }, } return assoc(package.manifest, "deployments", new_deployments_data) return assoc_in( package.manifest, ("deployments", latest_block_uri, deployment_name), deployment_data, )
def _deployment( contract_instance: str, contract_type: str, deployment_bytecode: Dict[str, Any], runtime_bytecode: Dict[str, Any], compiler: Dict[str, Any], block_uri: URI, address: HexStr, tx: HexStr, block: HexStr, manifest: Manifest, ) -> Manifest: validate_address(address) if not is_BIP122_block_uri(block_uri): raise ManifestBuildingError(f"{block_uri} is not a valid BIP122 URI.") if tx: if not is_string(tx) and not is_hex(tx): raise ManifestBuildingError( f"Transaction hash: {tx} is not a valid hexstring" ) if block: if not is_string(block) and not is_hex(block): raise ManifestBuildingError(f"Block hash: {block} is not a valid hexstring") # todo: validate db, rb and compiler are properly formatted dicts deployment_data = _build_deployments_object( contract_type, deployment_bytecode, runtime_bytecode, compiler, address, tx, block, manifest, ) return assoc_in( manifest, ["deployments", block_uri, contract_instance], deployment_data )
def link(contract: str, linked_type: str, package: Package) -> Package: """ Return a new package, created with a new manifest after applying the linked type reference to the contract factory. """ deployment_address = get_deployment_address(linked_type, package) unlinked_factory = package.get_contract_factory(contract) if not unlinked_factory.needs_bytecode_linking: raise LinkerError( f"Contract factory: {unlinked_factory.__repr__()} does not need bytecode linking, " "so it is not a valid contract type for link()") linked_factory = unlinked_factory.link_bytecode( {linked_type: deployment_address}) # todo replace runtime_bytecode in manifest manifest = assoc_in( package.manifest, ("contract_types", contract, "deployment_bytecode", "bytecode"), to_hex(linked_factory.bytecode), ) logger.info( "%s linked to %s at address %s." % (contract, linked_type, to_checksum_address(deployment_address))) return Package(manifest, package.w3)
def license(license: str, manifest: Manifest) -> Manifest: """ Return a copy of manifest with `license` set to "meta": {"license": `license`} """ return assoc_in(manifest, ("meta", "license"), license)
def _authors(authors: Set[str], manifest: Manifest) -> Manifest: return assoc_in(manifest, ("meta", "authors"), list(authors))
def description(description: str, manifest: Manifest) -> Manifest: """ Return a copy of manifest with `description` set to "meta": {"descriptions": `description`} """ return assoc_in(manifest, ("meta", "description"), description)
def _keywords(keywords: Set[str], manifest: Manifest) -> Manifest: return assoc_in(manifest, ("meta", "keywords"), list(keywords))
def _links(link_dict: Dict[str, str], manifest: Manifest) -> Manifest: return assoc_in(manifest, ("meta", "links"), link_dict)