def fetch_uri_contents(self, uri: str) -> bytes: ipfs_hash = extract_ipfs_path_from_uri(uri) contents = self.client.cat(ipfs_hash) validation_hash = generate_file_hash(contents) if validation_hash != ipfs_hash: raise ValidationError( f"Hashed IPFS contents retrieved from uri: {uri} do not match its content hash." ) return contents
def fetch_uri_contents(self, uri: str) -> bytes: ipfs_hash = extract_ipfs_path_from_uri(uri) contents = self.client.cat(ipfs_hash) # Local validation of hashed contents only works for non-chunked files ~< 256kb # Improved validation WIP @ https://github.com/ethpm/py-ethpm/pull/165 if len(contents) <= 262144: validation_hash = generate_file_hash(contents) if validation_hash != ipfs_hash: raise EthPMValidationError( f"Hashed IPFS contents retrieved from uri: {uri} do not match its content hash." ) return contents
def test_generate_file_hash(tmpdir, file_name, file_contents, expected): p = tmpdir.mkdir("sub").join(file_name) p.write(file_contents) ipfs_multihash = generate_file_hash(Path(p).read_bytes()) assert ipfs_multihash == expected
def create_manifest(project_path: Path, package_config: Dict, pin_assets: bool = False, silent: bool = True) -> Tuple[Dict, str]: """ Creates a manifest from a project, and optionally pins it to IPFS. Arguments: project_path: Path to the root folder of the project package_config: Configuration settings for the manifest pin_assets: if True, all source files and the manifest will be uploaded onto IPFS via Infura. Returns: generated manifest, ipfs uri of manifest """ package_config = _remove_empty_fields(package_config) _verify_package_name(package_config["package_name"]) if pin_assets: ipfs_backend = InfuraIPFSBackend() manifest = { "manifest_version": "2", "package_name": package_config["package_name"], "version": package_config["version"], "sources": {}, "contract_types": {}, } if "meta" in package_config: manifest["meta"] = package_config["meta"] # load packages.json and add build_dependencies packages_json: Dict = {"sources": {}, "packages": {}} if not package_config["settings"]["include_dependencies"]: installed, modified = get_installed_packages(project_path) if modified: raise InvalidManifest( f"Dependencies have been modified locally: {', '.join([i[0] for i in modified])}" ) if installed: packages_json = _load_packages_json(project_path) manifest["build_dependencies"] = dict( (k, v["manifest_uri"]) for k, v in packages_json["packages"].items()) # add sources contract_path = project_path.joinpath("contracts") for path in contract_path.glob("**/*.sol"): if path.relative_to( project_path).as_posix() in packages_json["sources"]: continue if pin_assets: if not silent: print( f'Pinning "{color("bright magenta")}{path.name}{color}"...' ) uri = ipfs_backend.pin_assets(path)[0]["Hash"] else: with path.open("rb") as fp: uri = generate_file_hash(fp.read()) manifest["sources"][ f"./{path.relative_to(contract_path).as_posix()}"] = f"ipfs://{uri}" # add contract_types for path in project_path.glob("build/contracts/*.json"): with path.open() as fp: build_json = json.load(fp) if not build_json["bytecode"]: # skip contracts that cannot deploy continue if build_json["sourcePath"] in packages_json["sources"]: # skip dependencies continue manifest["contract_types"][ build_json["contractName"]] = _get_contract_type(build_json) # add deployments deployment_networks = package_config["settings"]["deployment_networks"] if deployment_networks: active_network = network.show_active() if active_network: network.disconnect() manifest["deployments"] = {} if isinstance(deployment_networks, str): deployment_networks = [deployment_networks] if deployment_networks == ["*"]: deployment_networks = [ i.stem for i in project_path.glob("build/deployments/*") ] for network_name in deployment_networks: instances = list( project_path.glob(f"build/deployments/{network_name}/*.json")) if not instances: continue instances.sort(key=lambda k: k.stat().st_mtime, reverse=True) network.connect(network_name) manifest["deployments"][web3.chain_uri] = {} for path in instances: with path.open() as fp: build_json = json.load(fp) alias = build_json["contractName"] source_path = build_json["sourcePath"] if source_path in packages_json["sources"]: alias = f"{packages_json['sources'][source_path]['packages'][0]}:{alias}" if alias in manifest["contract_types"]: # skip deployment if bytecode does not match that of contract_type bytecode = manifest["contract_types"][alias][ "deployment_bytecode"]["bytecode"] if f"0x{build_json['bytecode']}" != bytecode: continue else: # add contract_type for dependency manifest["contract_types"][alias] = _get_contract_type( build_json) key = build_json["contractName"] for i in itertools.count(1): if key not in manifest["deployments"][web3.chain_uri]: break key = f"{build_json['contractName']}-{i}" manifest["deployments"][web3.chain_uri][key] = { "address": path.stem, "contract_type": alias, } network.disconnect() if active_network: network.connect(active_network) if not manifest["deployments"]: del manifest["deployments"] uri = None if pin_assets: if not silent: print("Pinning manifest...") temp_path = Path(tempfile.gettempdir()).joinpath("manifest.json") with temp_path.open("w") as fp: json.dump(manifest, fp, sort_keys=True, separators=(",", ":")) uri = ipfs_backend.pin_assets(temp_path)[0]["Hash"] return manifest, uri
def get_ipfs_hash(input_data): return generate_file_hash(input_data)