def _deployAndMintToken(symbol: str, to_address: str) -> btoken.BToken: wallet = get_factory_deployer_wallet(_NETWORK) dt_address = DataToken.deploy( wallet.web3, wallet, None, "Template Contract", "TEMPLATE", wallet.address, to_base_18(1000), DTFactory.FIRST_BLOB, to_address, ) dt_factory = DTFactory( DTFactory.deploy(wallet.web3, wallet, None, dt_address, to_address) ) token_address = dt_factory.get_token_address( dt_factory.createToken( symbol, symbol, symbol, DataToken.DEFAULT_CAP_BASE, wallet ) ) token = DataToken(token_address) token.mint(to_address, to_base_18(1000), wallet) return btoken.BToken(token.address)
def test_get_token_minter(web3, alice_wallet, dtfactory_address, alice_address): """Tests proper retrieval of token minter from DTFactory.""" dtfactory = DTFactory(web3, dtfactory_address) dt_address = dtfactory.createToken( "foo_blob", "DT1", "DT1", to_wei(1000), from_wallet=alice_wallet ) dt = DataToken(web3, dtfactory.get_token_address(dt_address)) dt.mint(alice_address, to_wei(10), from_wallet=alice_wallet) assert dtfactory.get_token_minter(dt.address) == alice_address
def test1(network, alice_wallet, dtfactory_address): dtfactory = DTFactory(dtfactory_address) dt_address = dtfactory.createToken("foo_blob", "DT1", "DT1", to_base_18(1000), from_wallet=alice_wallet) dt = DataToken(dtfactory.get_token_address(dt_address)) assert isinstance(dt, DataToken) assert dt.blob() == "foo_blob"
def test_data_token_creation(web3, alice_wallet, dtfactory_address): """Tests that a data token can be created using a DTFactory object.""" dtfactory = DTFactory(web3, dtfactory_address) dt_address = dtfactory.createToken( "foo_blob", "DT1", "DT1", to_wei(1000), from_wallet=alice_wallet ) dt = DataToken(web3, dtfactory.get_token_address(dt_address)) assert isinstance(dt, DataToken) assert dt.blob() == "foo_blob" assert dtfactory.verify_data_token(dt.address)
def test_data_token_event_registered(alice_wallet, dtfactory_address, alice_ocean): """Tests that a token registration event is created and can be retrieved.""" dtfactory = DTFactory(dtfactory_address) dt_address = dtfactory.createToken( "foo_blob", "DT1", "DT1", to_base_18(1000.0), from_wallet=alice_wallet ) dt = DataToken(dtfactory.get_token_address(dt_address)) block = alice_ocean.web3.eth.blockNumber # with explicit address registered_event = dtfactory.get_token_registered_event( block - 1, block + 1, token_address=dt.address ) assert registered_event.args.tokenAddress == dt.address
def test_data_token_event_registered( web3, alice_wallet, dtfactory_address, alice_ocean ): """Tests that a token registration event is created and can be retrieved.""" dtfactory = DTFactory(web3, dtfactory_address) dt_address = dtfactory.createToken( "foo_blob", "DT1", "DT1", to_wei(1000), from_wallet=alice_wallet ) dt = DataToken(web3, dtfactory.get_token_address(dt_address)) block = alice_ocean.web3.eth.block_number # with explicit address block_confirmations = alice_ocean.config.block_confirmations.value registered_event = dtfactory.get_token_registered_event( block - (block_confirmations + 1), block, token_address=dt.address ) assert registered_event.args.tokenAddress == dt.address
def create( self, metadata: dict, publisher_wallet: Wallet, service_descriptors: list = None, owner_address: str = None, data_token_address: str = None, provider_uri=None, dt_name: str = None, dt_symbol: str = None, dt_blob: str = None, dt_cap: float = None, ) -> (Asset, None): """ Register an asset on-chain by creating/deploying a DataToken contract and in the Metadata store (Aquarius). :param metadata: dict conforming to the Metadata accepted by Ocean Protocol. :param publisher_wallet: Wallet of the publisher registering this asset :param service_descriptors: list of ServiceDescriptor tuples of length 2. The first item must be one of ServiceTypes and the second item is a dict of parameters and values required by the service :param owner_address: hex str the ethereum address to assign asset ownership to. After registering the asset on-chain, the ownership is transferred to this address :param data_token_address: hex str the address of the data token smart contract. The new asset will be associated with this data token address. :param provider_uri: str URL of service provider. This will be used as base to construct the serviceEndpoint for the `access` (download) service :param dt_name: str name of DataToken if creating a new one :param dt_symbol: str symbol of DataToken if creating a new one :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text to be stored with the ERC20 DataToken contract for any purpose. :param dt_cap: float :return: DDO instance """ assert isinstance( metadata, dict), f"Expected metadata of type dict, got {type(metadata)}" assert service_descriptors is None or isinstance( service_descriptors, list ), f"bad type of `service_descriptors` {type(service_descriptors)}" # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) asset_type = metadata_copy["main"]["type"] assert asset_type in ( "dataset", "algorithm", ), f"Invalid/unsupported asset type {asset_type}" if not plecos.is_valid_dict_local(metadata_copy): errors = plecos.list_errors_dict_local(metadata_copy) msg = f"Metadata has validation errors: {errors}" logger.error(msg) raise ValueError(msg) service_descriptors = service_descriptors or [] services = self._process_service_descriptors(service_descriptors, metadata_copy, provider_uri, publisher_wallet) stype_to_service = {s.type: s for s in services} checksum_dict = dict() for service in services: checksum_dict[str(service.index)] = checksum(service.main) # Create a DDO object asset = Asset() # Adding proof to the ddo. asset.add_proof(checksum_dict, publisher_wallet) ################# # DataToken address = DTFactory.configured_address(Web3Helper.get_network_name(), self._config.address_file) dtfactory = DTFactory(address) if not data_token_address: blob = dt_blob or "" name = dt_name or metadata["main"]["name"] symbol = dt_symbol or name # register on-chain _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP tx_id = dtfactory.createToken(blob, name, symbol, to_base_18(_cap), from_wallet=publisher_wallet) data_token = DataToken(dtfactory.get_token_address(tx_id)) if not data_token: logger.warning("Creating new data token failed.") return None data_token_address = data_token.address logger.info(f"Successfully created data token with address " f"{data_token.address} for new dataset asset.") # owner_address is set as minter only if creating new data token. So if # `data_token_address` is set `owner_address` has no effect. if owner_address: data_token.proposeMinter(owner_address, from_wallet=publisher_wallet) else: # verify data_token_address dt = DataToken(data_token_address) minter = dt.contract_concise.minter() if not minter: raise AssertionError( f"datatoken address {data_token_address} does not seem to be a valid DataToken contract." ) elif minter.lower() != publisher_wallet.address.lower(): raise AssertionError( f"Minter of datatoken {data_token_address} is not the same as the publisher." ) elif not dtfactory.verify_data_token(data_token_address): raise AssertionError( f"datatoken address {data_token_address} is not found in the DTFactory events." ) assert ( data_token_address ), "data_token_address is required for publishing a dataset asset." # Generating the did and adding to the ddo. did = asset.assign_did( f"did:op:{remove_0x_prefix(data_token_address)}") logger.debug(f"Using datatoken address as did: {did}") # Check if it's already registered first! if did in self._get_aquarius().list_assets(): raise OceanDIDAlreadyExist( f"Asset id {did} is already registered to another asset.") md_service = stype_to_service[ServiceTypes.METADATA] ddo_service_endpoint = md_service.service_endpoint if "{did}" in ddo_service_endpoint: ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did) md_service.set_service_endpoint(ddo_service_endpoint) # Populate the ddo services asset.add_service(md_service) access_service = stype_to_service.get(ServiceTypes.ASSET_ACCESS, None) compute_service = stype_to_service.get(ServiceTypes.CLOUD_COMPUTE, None) if access_service: asset.add_service(access_service) if compute_service: asset.add_service(compute_service) asset.proof["signatureValue"] = Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(asset.asset_id), publisher_wallet) # Add public key and authentication asset.add_public_key(did, publisher_wallet.address) asset.add_authentication(did, PUBLIC_KEY_TYPE_RSA) # Setup metadata service # First compute files_encrypted assert metadata_copy["main"][ "files"], "files is required in the metadata main attributes." logger.debug("Encrypting content urls in the metadata.") publisher_signature = self._data_provider.sign_message( publisher_wallet, asset.asset_id, self._config) _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint( provider_uri) files_encrypted = self._data_provider.encrypt_files_dict( metadata_copy["main"]["files"], encrypt_endpoint, asset.asset_id, publisher_wallet.address, publisher_signature, ) # only assign if the encryption worked if files_encrypted: logger.debug( f"Content urls encrypted successfully {files_encrypted}") index = 0 for file in metadata_copy["main"]["files"]: file["index"] = index index = index + 1 del file["url"] metadata_copy["encryptedFiles"] = files_encrypted else: raise AssertionError("Encrypting the files failed.") logger.debug(f"Generated asset and services, DID is {asset.did}," f" metadata service @{ddo_service_endpoint}.") # Set datatoken address in the asset asset.data_token_address = data_token_address try: # publish the new ddo in ocean-db/Aquarius ddo_registry = self.ddo_registry() web3 = Web3Provider.get_web3() tx_id = ddo_registry.create( asset.asset_id, bytes([1]), lzma.compress(web3.toBytes(text=asset.as_text())), publisher_wallet, ) if not ddo_registry.verify_tx(tx_id): raise AssertionError( f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}" ) logger.info("Asset/ddo published on-chain successfully.") except ValueError as ve: raise ValueError( f"Invalid value to publish in the metadata: {str(ve)}") except Exception as e: logger.error(f"Publish asset on-chain failed: {str(e)}") raise return asset
def get_registered_ddo(client, wallet, metadata, service_descriptor): aqua = Aquarius('http://localhost:5000') ddo_service_endpoint = aqua.get_service_endpoint() metadata_store_url = json.dumps({'t': 1, 'url': ddo_service_endpoint}) # Create new data token contract addresses = get_contracts_addresses(get_address_file()) dt_address = addresses.get(DTFactory.CONTRACT_NAME) if dt_address: factory_contract = DTFactory(dt_address) else: factory_contract = new_factory_contract() ddo_contract_address = addresses.get(MetadataContract.CONTRACT_NAME) metadata_contract = MetadataContract(ddo_contract_address) tx_id = factory_contract.createToken(metadata_store_url, 'DataToken1', 'DT1', to_base_18(1000000), wallet) dt_contract = DataToken(factory_contract.get_token_address(tx_id)) if not dt_contract: raise AssertionError('Creation of data token contract failed.') ddo = Asset() ddo.data_token_address = dt_contract.address metadata_service_desc = ServiceDescriptor.metadata_service_descriptor( metadata, ddo_service_endpoint) service_descriptors = list([ ServiceDescriptor.authorization_service_descriptor( 'http://localhost:12001') ]) service_descriptors.append(service_descriptor) service_type = service_descriptor[0] service_descriptors = [metadata_service_desc] + service_descriptors services = ServiceFactory.build_services(service_descriptors) checksums = dict() for service in services: checksums[str(service.index)] = checksum(service.main) # Adding proof to the ddo. ddo.add_proof(checksums, wallet) did = ddo.assign_did(f'did:op:{remove_0x_prefix(ddo.data_token_address)}') ddo_service_endpoint.replace('{did}', did) services[0].set_service_endpoint(ddo_service_endpoint) stype_to_service = {s.type: s for s in services} _service = stype_to_service[service_type] for service in services: ddo.add_service(service) # ddo.proof['signatureValue'] = ocean_lib.sign_hash( # did_to_id_bytes(did), account) ddo.add_public_key(did, wallet.address) ddo.add_authentication(did, PUBLIC_KEY_TYPE_RSA) # if not plecos.is_valid_dict_local(ddo.metadata): # print(f'invalid metadata: {plecos.validate_dict_local(ddo.metadata)}') # assert False, f'invalid metadata: {plecos.validate_dict_local(ddo.metadata)}' files_list_str = json.dumps(metadata['main']['files']) encrypted_files = encrypt_document(client, did, files_list_str, wallet) # encrypted_files = do_encrypt(files_list_str, provider_wallet) # only assign if the encryption worked if encrypted_files: index = 0 for file in metadata['main']['files']: file['index'] = index index = index + 1 del file['url'] metadata['encryptedFiles'] = encrypted_files web3 = Web3Provider.get_web3() block = web3.eth.blockNumber try: data = lzma.compress(web3.toBytes(text=ddo.as_text())) tx_id = metadata_contract.create(ddo.asset_id, bytes([1]), data, wallet) if not metadata_contract.verify_tx(tx_id): raise AssertionError( f'create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}' ) except Exception as e: print(f'error publishing ddo {ddo.did} in Aquarius: {e}') raise log = metadata_contract.get_event_log( metadata_contract.EVENT_METADATA_CREATED, block, ddo.asset_id, 30) assert log, f'no ddo created event.' ddo = wait_for_ddo(aqua, ddo.did) assert ddo, f'resolve did {ddo.did} failed.' return ddo
def test_ddo_on_chain(): config = ConfigProvider.get_config() ddo_address = get_contracts_addresses( "ganache", config)[MetadataContract.CONTRACT_NAME] dtfactory_address = get_contracts_addresses( "ganache", config)[DTFactory.CONTRACT_NAME] ddo_registry = MetadataContract(ddo_address) wallet = get_publisher_wallet() web3 = Web3Provider.get_web3() dtfactory = DTFactory(dtfactory_address) tx_id = dtfactory.createToken("", "dt1", "dt1", 1000, wallet) dt = DataToken(dtfactory.get_token_address(tx_id)) # test create ddo asset = get_ddo_sample(dt.address) old_name = asset.metadata["main"]["name"] txid = ddo_registry.create( asset.asset_id, b"", lzma.compress(web3.toBytes(text=asset.as_text())), wallet) assert ddo_registry.verify_tx(txid), f"create ddo failed: txid={txid}" logs = ddo_registry.event_MetadataCreated.processReceipt( ddo_registry.get_tx_receipt(txid)) assert logs, f"no logs found for create ddo tx {txid}" log = logs[0] assert add_0x_prefix(log.args.dataToken) == asset.asset_id # read back the asset ddo from the event log ddo_text = web3.toText(lzma.decompress(log.args.data)) assert ddo_text == asset.as_text(), "ddo text does not match original." _asset = Asset(json_text=ddo_text) assert _asset.did == asset.did, "did does not match." name = _asset.metadata["main"]["name"] assert name == old_name, f"name does not match: {name} != {old_name}" # test_update ddo asset.metadata["main"]["name"] = "updated name for test" txid = ddo_registry.update( asset.asset_id, b"", lzma.compress(web3.toBytes(text=asset.as_text())), wallet) assert ddo_registry.verify_tx(txid), f"update ddo failed: txid={txid}" logs = ddo_registry.event_MetadataUpdated.processReceipt( ddo_registry.get_tx_receipt(txid)) assert logs, f"no logs found for update ddo tx {txid}" log = logs[0] assert add_0x_prefix(log.args.dataToken) == asset.asset_id # read back the asset ddo from the event log ddo_text = web3.toText(lzma.decompress(log.args.data)) assert ddo_text == asset.as_text(), "ddo text does not match original." _asset = Asset(json_text=ddo_text) assert (_asset.metadata["main"]["name"] == "updated name for test" ), "name does not seem to be updated." assert DataToken(asset.asset_id).contract_concise.isMinter(wallet.address) # test update fails from wallet other than the original publisher bob = get_consumer_wallet() try: txid = ddo_registry.update( asset.asset_id, b"", lzma.compress(web3.toBytes(text=asset.as_text())), bob) assert ddo_registry.verify_tx( txid) is False, f"update ddo failed: txid={txid}" logs = ddo_registry.event_MetadataUpdated.processReceipt( ddo_registry.get_tx_receipt(txid)) assert ( not logs ), f"should be no logs for MetadataUpdated, but seems there are some logs: tx {txid}, logs {logs}" except ValueError: print("as expected, only owner can update a published ddo.") # test ddoOwner assert DataToken(asset.asset_id).contract_concise.isMinter( wallet.address ), (f"ddo owner does not match the expected publisher address {wallet.address}, " f"owner is {DataToken(asset.asset_id).contract_concise.minter(wallet.address)}" )
def create( self, metadata: dict, publisher_wallet: Wallet, services: Optional[list] = None, owner_address: Optional[str] = None, data_token_address: Optional[str] = None, provider_uri: Optional[str] = None, dt_name: Optional[str] = None, dt_symbol: Optional[str] = None, dt_blob: Optional[str] = None, dt_cap: Optional[int] = None, encrypt: Optional[bool] = False, ) -> Optional[V3Asset]: """Register an asset on-chain. Creating/deploying a DataToken contract and in the Metadata store (Aquarius). :param metadata: dict conforming to the Metadata accepted by Ocean Protocol. :param publisher_wallet: Wallet of the publisher registering this asset :param services: list of Service objects. :param owner_address: hex str the ethereum address to assign asset ownership to. After registering the asset on-chain, the ownership is transferred to this address :param data_token_address: hex str the address of the data token smart contract. The new asset will be associated with this data token address. :param provider_uri: str URL of service provider. This will be used as base to construct the serviceEndpoint for the `access` (download) service :param dt_name: str name of DataToken if creating a new one :param dt_symbol: str symbol of DataToken if creating a new one :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text to be stored with the ERC20 DataToken contract for any purpose. :param dt_cap: int amount of DataTokens to mint, denoted in wei :return: DDO instance """ assert isinstance( metadata, dict ), f"Expected metadata of type dict, got {type(metadata)}" # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) asset_type = metadata_copy["main"]["type"] assert asset_type in ( "dataset", "algorithm", ), f"Invalid/unsupported asset type {asset_type}" validation_result, validation_errors = self.validate(metadata) if not validation_result: msg = f"Metadata has validation errors: {validation_errors}" logger.error(msg) raise ValueError(msg) urls = [item["url"] for item in metadata["main"]["files"]] if not provider_uri: provider_uri = DataServiceProvider.get_url(self._config) for url in urls: if not DataServiceProvider.check_single_file_info(url, provider_uri): msg = f"The URL of this service can not be accessed: {url}." logger.error(msg) raise ValueError(msg) services = services or [] services = self._add_defaults( services, metadata_copy, provider_uri, publisher_wallet ) checksum_dict = dict() for service in services: checksum_dict[str(service.index)] = checksum(service.main) # Create a DDO object asset = V3Asset() # Adding proof to the ddo. asset.add_proof(checksum_dict, publisher_wallet) ################# # DataToken address = DTFactory.configured_address( get_network_name(web3=self._web3), self._config.address_file ) dtfactory = DTFactory(self._web3, address) if not data_token_address: blob = dt_blob or "" name = dt_name or metadata["main"]["name"] symbol = dt_symbol or name # register on-chain _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP tx_id = dtfactory.createToken( blob, name, symbol, _cap, from_wallet=publisher_wallet ) data_token = DataToken(self._web3, dtfactory.get_token_address(tx_id)) if not data_token: logger.warning("Creating new data token failed.") return None data_token_address = data_token.address logger.info( f"Successfully created data token with address " f"{data_token.address} for new dataset asset." ) # owner_address is set as minter only if creating new data token. So if # `data_token_address` is set `owner_address` has no effect. if owner_address: data_token.proposeMinter(owner_address, from_wallet=publisher_wallet) else: if not dtfactory.verify_data_token(data_token_address): raise ContractNotFound( f"datatoken address {data_token_address} is not found in the DTFactory events." ) # verify data_token_address dt = DataToken(self._web3, data_token_address) minter = dt.contract.caller.minter() if not minter: raise AssertionError( f"datatoken address {data_token_address} does not seem to be a valid DataToken contract." ) elif minter.lower() != publisher_wallet.address.lower(): raise AssertionError( f"Minter of datatoken {data_token_address} is not the same as the publisher." ) assert ( data_token_address ), "data_token_address is required for publishing a dataset asset." # Generating the did and adding to the ddo. did = f"did:op:{remove_0x_prefix(data_token_address)}" asset.did = did logger.debug(f"Using datatoken address as did: {did}") # Check if it's already registered first! if self._get_aquarius().ddo_exists(did): raise AquariusError( f"Asset id {did} is already registered to another asset." ) for service in services: if service.type == ServiceTypes.METADATA: ddo_service_endpoint = service.service_endpoint if "{did}" in ddo_service_endpoint: ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did) service.service_endpoint = ddo_service_endpoint asset.add_service(service) asset.proof["signatureValue"] = sign_hash( encode_defunct(text=asset.asset_id), publisher_wallet ) # Setup metadata service # First compute files_encrypted assert metadata_copy["main"][ "files" ], "files is required in the metadata main attributes." logger.debug("Encrypting content urls in the metadata.") publisher_signature = self._data_provider.sign_message( publisher_wallet, asset.asset_id, provider_uri=provider_uri ) _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(provider_uri) files_encrypted = self._data_provider.encrypt_files_dict( metadata_copy["main"]["files"], encrypt_endpoint, asset.asset_id, publisher_wallet.address, publisher_signature, ) # only assign if the encryption worked if files_encrypted: logger.debug(f"Content urls encrypted successfully {files_encrypted}") index = 0 for file in metadata_copy["main"]["files"]: file["index"] = index index = index + 1 del file["url"] metadata_copy["encryptedFiles"] = files_encrypted else: raise AssertionError("Encrypting the files failed.") logger.debug( f"Generated asset and services, DID is {asset.did}," f" metadata service @{ddo_service_endpoint}." ) # Set datatoken address in the asset asset.data_token_address = data_token_address flags, asset_contents = self._build_asset_contents(asset, encrypt) try: # publish the new ddo in ocean-db/Aquarius ddo_registry = self.ddo_registry() tx_id = ddo_registry.create( asset.asset_id, flags, asset_contents, publisher_wallet ) if not ddo_registry.verify_tx(tx_id): raise VerifyTxFailed( f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}" ) logger.info("Asset/ddo published on-chain successfully.") except ValueError as ve: raise ValueError(f"Invalid value to publish in the metadata: {str(ve)}") except Exception as e: logger.error(f"Publish asset on-chain failed: {str(e)}") raise return asset