def test_data_token_creation(web3, alice_wallet, dtfactory_address): """Tests that a data token can be created using a DTFactory object.""" dtfactory = DTFactory(web3, dtfactory_address) dt_address = dtfactory.createToken( "foo_blob", "DT1", "DT1", to_wei(1000), from_wallet=alice_wallet ) dt = DataToken(web3, dtfactory.get_token_address(dt_address)) assert isinstance(dt, DataToken) assert dt.blob() == "foo_blob" assert dtfactory.verify_data_token(dt.address)
def create( self, metadata: dict, publisher_wallet: Wallet, service_descriptors: list = None, owner_address: str = None, data_token_address: str = None, provider_uri=None, dt_name: str = None, dt_symbol: str = None, dt_blob: str = None, dt_cap: float = None, ) -> (Asset, None): """ Register an asset on-chain by creating/deploying a DataToken contract and in the Metadata store (Aquarius). :param metadata: dict conforming to the Metadata accepted by Ocean Protocol. :param publisher_wallet: Wallet of the publisher registering this asset :param service_descriptors: list of ServiceDescriptor tuples of length 2. The first item must be one of ServiceTypes and the second item is a dict of parameters and values required by the service :param owner_address: hex str the ethereum address to assign asset ownership to. After registering the asset on-chain, the ownership is transferred to this address :param data_token_address: hex str the address of the data token smart contract. The new asset will be associated with this data token address. :param provider_uri: str URL of service provider. This will be used as base to construct the serviceEndpoint for the `access` (download) service :param dt_name: str name of DataToken if creating a new one :param dt_symbol: str symbol of DataToken if creating a new one :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text to be stored with the ERC20 DataToken contract for any purpose. :param dt_cap: float :return: DDO instance """ assert isinstance( metadata, dict), f"Expected metadata of type dict, got {type(metadata)}" assert service_descriptors is None or isinstance( service_descriptors, list ), f"bad type of `service_descriptors` {type(service_descriptors)}" # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) asset_type = metadata_copy["main"]["type"] assert asset_type in ( "dataset", "algorithm", ), f"Invalid/unsupported asset type {asset_type}" if not plecos.is_valid_dict_local(metadata_copy): errors = plecos.list_errors_dict_local(metadata_copy) msg = f"Metadata has validation errors: {errors}" logger.error(msg) raise ValueError(msg) service_descriptors = service_descriptors or [] services = self._process_service_descriptors(service_descriptors, metadata_copy, provider_uri, publisher_wallet) stype_to_service = {s.type: s for s in services} checksum_dict = dict() for service in services: checksum_dict[str(service.index)] = checksum(service.main) # Create a DDO object asset = Asset() # Adding proof to the ddo. asset.add_proof(checksum_dict, publisher_wallet) ################# # DataToken address = DTFactory.configured_address(Web3Helper.get_network_name(), self._config.address_file) dtfactory = DTFactory(address) if not data_token_address: blob = dt_blob or "" name = dt_name or metadata["main"]["name"] symbol = dt_symbol or name # register on-chain _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP tx_id = dtfactory.createToken(blob, name, symbol, to_base_18(_cap), from_wallet=publisher_wallet) data_token = DataToken(dtfactory.get_token_address(tx_id)) if not data_token: logger.warning("Creating new data token failed.") return None data_token_address = data_token.address logger.info(f"Successfully created data token with address " f"{data_token.address} for new dataset asset.") # owner_address is set as minter only if creating new data token. So if # `data_token_address` is set `owner_address` has no effect. if owner_address: data_token.proposeMinter(owner_address, from_wallet=publisher_wallet) else: # verify data_token_address dt = DataToken(data_token_address) minter = dt.contract_concise.minter() if not minter: raise AssertionError( f"datatoken address {data_token_address} does not seem to be a valid DataToken contract." ) elif minter.lower() != publisher_wallet.address.lower(): raise AssertionError( f"Minter of datatoken {data_token_address} is not the same as the publisher." ) elif not dtfactory.verify_data_token(data_token_address): raise AssertionError( f"datatoken address {data_token_address} is not found in the DTFactory events." ) assert ( data_token_address ), "data_token_address is required for publishing a dataset asset." # Generating the did and adding to the ddo. did = asset.assign_did( f"did:op:{remove_0x_prefix(data_token_address)}") logger.debug(f"Using datatoken address as did: {did}") # Check if it's already registered first! if did in self._get_aquarius().list_assets(): raise OceanDIDAlreadyExist( f"Asset id {did} is already registered to another asset.") md_service = stype_to_service[ServiceTypes.METADATA] ddo_service_endpoint = md_service.service_endpoint if "{did}" in ddo_service_endpoint: ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did) md_service.set_service_endpoint(ddo_service_endpoint) # Populate the ddo services asset.add_service(md_service) access_service = stype_to_service.get(ServiceTypes.ASSET_ACCESS, None) compute_service = stype_to_service.get(ServiceTypes.CLOUD_COMPUTE, None) if access_service: asset.add_service(access_service) if compute_service: asset.add_service(compute_service) asset.proof["signatureValue"] = Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(asset.asset_id), publisher_wallet) # Add public key and authentication asset.add_public_key(did, publisher_wallet.address) asset.add_authentication(did, PUBLIC_KEY_TYPE_RSA) # Setup metadata service # First compute files_encrypted assert metadata_copy["main"][ "files"], "files is required in the metadata main attributes." logger.debug("Encrypting content urls in the metadata.") publisher_signature = self._data_provider.sign_message( publisher_wallet, asset.asset_id, self._config) _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint( provider_uri) files_encrypted = self._data_provider.encrypt_files_dict( metadata_copy["main"]["files"], encrypt_endpoint, asset.asset_id, publisher_wallet.address, publisher_signature, ) # only assign if the encryption worked if files_encrypted: logger.debug( f"Content urls encrypted successfully {files_encrypted}") index = 0 for file in metadata_copy["main"]["files"]: file["index"] = index index = index + 1 del file["url"] metadata_copy["encryptedFiles"] = files_encrypted else: raise AssertionError("Encrypting the files failed.") logger.debug(f"Generated asset and services, DID is {asset.did}," f" metadata service @{ddo_service_endpoint}.") # Set datatoken address in the asset asset.data_token_address = data_token_address try: # publish the new ddo in ocean-db/Aquarius ddo_registry = self.ddo_registry() web3 = Web3Provider.get_web3() tx_id = ddo_registry.create( asset.asset_id, bytes([1]), lzma.compress(web3.toBytes(text=asset.as_text())), publisher_wallet, ) if not ddo_registry.verify_tx(tx_id): raise AssertionError( f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}" ) logger.info("Asset/ddo published on-chain successfully.") except ValueError as ve: raise ValueError( f"Invalid value to publish in the metadata: {str(ve)}") except Exception as e: logger.error(f"Publish asset on-chain failed: {str(e)}") raise return asset
def create( self, metadata: dict, publisher_wallet: Wallet, services: Optional[list] = None, owner_address: Optional[str] = None, data_token_address: Optional[str] = None, provider_uri: Optional[str] = None, dt_name: Optional[str] = None, dt_symbol: Optional[str] = None, dt_blob: Optional[str] = None, dt_cap: Optional[int] = None, encrypt: Optional[bool] = False, ) -> Optional[V3Asset]: """Register an asset on-chain. Creating/deploying a DataToken contract and in the Metadata store (Aquarius). :param metadata: dict conforming to the Metadata accepted by Ocean Protocol. :param publisher_wallet: Wallet of the publisher registering this asset :param services: list of Service objects. :param owner_address: hex str the ethereum address to assign asset ownership to. After registering the asset on-chain, the ownership is transferred to this address :param data_token_address: hex str the address of the data token smart contract. The new asset will be associated with this data token address. :param provider_uri: str URL of service provider. This will be used as base to construct the serviceEndpoint for the `access` (download) service :param dt_name: str name of DataToken if creating a new one :param dt_symbol: str symbol of DataToken if creating a new one :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text to be stored with the ERC20 DataToken contract for any purpose. :param dt_cap: int amount of DataTokens to mint, denoted in wei :return: DDO instance """ assert isinstance( metadata, dict ), f"Expected metadata of type dict, got {type(metadata)}" # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) asset_type = metadata_copy["main"]["type"] assert asset_type in ( "dataset", "algorithm", ), f"Invalid/unsupported asset type {asset_type}" validation_result, validation_errors = self.validate(metadata) if not validation_result: msg = f"Metadata has validation errors: {validation_errors}" logger.error(msg) raise ValueError(msg) urls = [item["url"] for item in metadata["main"]["files"]] if not provider_uri: provider_uri = DataServiceProvider.get_url(self._config) for url in urls: if not DataServiceProvider.check_single_file_info(url, provider_uri): msg = f"The URL of this service can not be accessed: {url}." logger.error(msg) raise ValueError(msg) services = services or [] services = self._add_defaults( services, metadata_copy, provider_uri, publisher_wallet ) checksum_dict = dict() for service in services: checksum_dict[str(service.index)] = checksum(service.main) # Create a DDO object asset = V3Asset() # Adding proof to the ddo. asset.add_proof(checksum_dict, publisher_wallet) ################# # DataToken address = DTFactory.configured_address( get_network_name(web3=self._web3), self._config.address_file ) dtfactory = DTFactory(self._web3, address) if not data_token_address: blob = dt_blob or "" name = dt_name or metadata["main"]["name"] symbol = dt_symbol or name # register on-chain _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP tx_id = dtfactory.createToken( blob, name, symbol, _cap, from_wallet=publisher_wallet ) data_token = DataToken(self._web3, dtfactory.get_token_address(tx_id)) if not data_token: logger.warning("Creating new data token failed.") return None data_token_address = data_token.address logger.info( f"Successfully created data token with address " f"{data_token.address} for new dataset asset." ) # owner_address is set as minter only if creating new data token. So if # `data_token_address` is set `owner_address` has no effect. if owner_address: data_token.proposeMinter(owner_address, from_wallet=publisher_wallet) else: if not dtfactory.verify_data_token(data_token_address): raise ContractNotFound( f"datatoken address {data_token_address} is not found in the DTFactory events." ) # verify data_token_address dt = DataToken(self._web3, data_token_address) minter = dt.contract.caller.minter() if not minter: raise AssertionError( f"datatoken address {data_token_address} does not seem to be a valid DataToken contract." ) elif minter.lower() != publisher_wallet.address.lower(): raise AssertionError( f"Minter of datatoken {data_token_address} is not the same as the publisher." ) assert ( data_token_address ), "data_token_address is required for publishing a dataset asset." # Generating the did and adding to the ddo. did = f"did:op:{remove_0x_prefix(data_token_address)}" asset.did = did logger.debug(f"Using datatoken address as did: {did}") # Check if it's already registered first! if self._get_aquarius().ddo_exists(did): raise AquariusError( f"Asset id {did} is already registered to another asset." ) for service in services: if service.type == ServiceTypes.METADATA: ddo_service_endpoint = service.service_endpoint if "{did}" in ddo_service_endpoint: ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did) service.service_endpoint = ddo_service_endpoint asset.add_service(service) asset.proof["signatureValue"] = sign_hash( encode_defunct(text=asset.asset_id), publisher_wallet ) # Setup metadata service # First compute files_encrypted assert metadata_copy["main"][ "files" ], "files is required in the metadata main attributes." logger.debug("Encrypting content urls in the metadata.") publisher_signature = self._data_provider.sign_message( publisher_wallet, asset.asset_id, provider_uri=provider_uri ) _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(provider_uri) files_encrypted = self._data_provider.encrypt_files_dict( metadata_copy["main"]["files"], encrypt_endpoint, asset.asset_id, publisher_wallet.address, publisher_signature, ) # only assign if the encryption worked if files_encrypted: logger.debug(f"Content urls encrypted successfully {files_encrypted}") index = 0 for file in metadata_copy["main"]["files"]: file["index"] = index index = index + 1 del file["url"] metadata_copy["encryptedFiles"] = files_encrypted else: raise AssertionError("Encrypting the files failed.") logger.debug( f"Generated asset and services, DID is {asset.did}," f" metadata service @{ddo_service_endpoint}." ) # Set datatoken address in the asset asset.data_token_address = data_token_address flags, asset_contents = self._build_asset_contents(asset, encrypt) try: # publish the new ddo in ocean-db/Aquarius ddo_registry = self.ddo_registry() tx_id = ddo_registry.create( asset.asset_id, flags, asset_contents, publisher_wallet ) if not ddo_registry.verify_tx(tx_id): raise VerifyTxFailed( f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}" ) logger.info("Asset/ddo published on-chain successfully.") except ValueError as ve: raise ValueError(f"Invalid value to publish in the metadata: {str(ve)}") except Exception as e: logger.error(f"Publish asset on-chain failed: {str(e)}") raise return asset