def test_download_service(client): aqua = Aquarius("http://localhost:5000") try: for did in aqua.list_assets(): aqua.retire_asset_ddo(did) except (ValueError, Exception): pass pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() ddo = get_dataset_ddo_with_access_service(client, pub_wallet) dt_address = ddo.as_dictionary()["dataToken"] dt_token = DataToken(dt_address) mint_tokens_and_wait(dt_token, cons_wallet, pub_wallet) sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo) tx_id = send_order(client, ddo, dt_token, sa, cons_wallet) index = 0 download_endpoint = BaseURLs.ASSETS_URL + "/download" # Consume using url index and auth token # (let the provider do the decryption) payload = dict({ "documentId": ddo.did, "serviceId": sa.index, "serviceType": sa.type, "dataToken": dt_address, "consumerAddress": cons_wallet.address, }) payload["signature"] = generate_auth_token(cons_wallet) payload["transferTxId"] = tx_id payload["fileIndex"] = index request_url = (download_endpoint + "?" + "&".join([f"{k}={v}" for k, v in payload.items()])) response = client.get(request_url) assert response.status_code == 200, f"{response.data}" # Consume using url index and signature (withOUT nonce), should fail _hash = add_ethereum_prefix_and_hash_msg(ddo.did) payload["signature"] = Web3Helper.sign_hash(_hash, cons_wallet) request_url = (download_endpoint + "?" + "&".join([f"{k}={v}" for k, v in payload.items()])) print( ">>>> Expecting InvalidSignatureError from the download endpoint <<<<" ) # noqa response = client.get(request_url) assert response.status_code == 400, f"{response.data}" # Consume using url index and signature (with nonce) nonce = get_nonce(client, cons_wallet.address) _hash = add_ethereum_prefix_and_hash_msg(f"{ddo.did}{nonce}") payload["signature"] = Web3Helper.sign_hash(_hash, cons_wallet) request_url = (download_endpoint + "?" + "&".join([f"{k}={v}" for k, v in payload.items()])) response = client.get(request_url) assert response.status_code == 200, f"{response.data}"
def test_download_service(client): aqua = Aquarius('http://localhost:5000') try: for did in aqua.list_assets(): aqua.retire_asset_ddo(did) except (ValueError, Exception): pass pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() ddo = get_dataset_ddo_with_access_service(client, pub_wallet) dt_address = ddo.as_dictionary()['dataToken'] dt_token = DataToken(dt_address) mint_tokens_and_wait(dt_token, cons_wallet, pub_wallet) sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo) tx_id = send_order(client, ddo, dt_token, sa, cons_wallet) index = 0 download_endpoint = BaseURLs.ASSETS_URL + '/download' # Consume using url index and auth token (let the provider do the decryption) payload = dict({ 'documentId': ddo.did, 'serviceId': sa.index, 'serviceType': sa.type, 'dataToken': dt_address, 'consumerAddress': cons_wallet.address }) payload['signature'] = generate_auth_token(cons_wallet) payload['transferTxId'] = tx_id payload['fileIndex'] = index request_url = download_endpoint + '?' + '&'.join( [f'{k}={v}' for k, v in payload.items()]) response = client.get(request_url) assert response.status_code == 200, f'{response.data}' # Consume using url index and signature (withOUT nonce), should fail _hash = add_ethereum_prefix_and_hash_msg(ddo.did) payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet) request_url = download_endpoint + '?' + '&'.join( [f'{k}={v}' for k, v in payload.items()]) print( '>>>> Expecting InvalidSignatureError from the download endpoint <<<<') response = client.get(request_url) assert response.status_code == 401, f'{response.data}' # Consume using url index and signature (with nonce) nonce = get_nonce(client, cons_wallet.address) _hash = add_ethereum_prefix_and_hash_msg(f'{ddo.did}{nonce}') payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet) request_url = download_endpoint + '?' + '&'.join( [f'{k}={v}' for k, v in payload.items()]) response = client.get(request_url) assert response.status_code == 200, f'{response.data}'
def process_compute_request(data): provider_wallet = get_provider_wallet() did = data.get("documentId") owner = data.get("consumerAddress") job_id = data.get("jobId") tx_id = data.get("transferTxId") body = dict() body["providerAddress"] = provider_wallet.address if owner is not None: body["owner"] = owner if job_id is not None: body["jobId"] = job_id if tx_id is not None: body["agreementId"] = tx_id if did is not None: body["documentId"] = did msg_to_sign = ( f"{provider_wallet.address}" f'{body.get("jobId", "")}' f'{body.get("documentId", "")}' ) # noqa msg_hash = add_ethereum_prefix_and_hash_msg(msg_to_sign) body["providerSignature"] = Web3Helper.sign_hash(msg_hash, provider_wallet) return body
def _check_job_id(client, job_id, did, token_address, wait_time=20): endpoint = BaseURLs.ASSETS_URL + "/compute" cons_wallet = get_consumer_wallet() nonce = get_nonce(client, cons_wallet.address) msg = f"{cons_wallet.address}{job_id}{did}{nonce}" _id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_id_hash, cons_wallet) payload = dict({ "signature": signature, "documentId": did, "consumerAddress": cons_wallet.address, "jobId": job_id, }) job_info = get_compute_job_info(client, endpoint, payload) assert job_info, f"Failed to get job info for jobId {job_id}" print(f"got info for compute job {job_id}: {job_info}") assert job_info["statusText"] in get_possible_compute_job_status_text() did = None # get did of results for _ in range(wait_time * 4): job_info = get_compute_job_info(client, endpoint, payload) did = job_info["did"] if did: break time.sleep(0.25) assert did, f"Compute job has no results, job info {job_info}." # check results ddo ddo = get_asset_from_metadatastore(get_metadata_url(), did) assert ddo, f"Failed to resolve ddo for did {did}"
def process_compute_request(data, user_nonce: UserNonce): required_attributes = ['signature', 'consumerAddress'] msg, status = check_required_attributes(required_attributes, data, 'compute') if msg: raise BadRequestError(msg) provider_wallet = get_provider_wallet() did = data.get('documentId') owner = data.get('consumerAddress') job_id = data.get('jobId') body = dict() body['providerAddress'] = provider_wallet.address if owner is not None: body['owner'] = owner if job_id is not None: body['jobId'] = job_id if did is not None: body['documentId'] = did # Consumer signature signature = data.get('signature') original_msg = f'{body.get("owner", "")}{body.get("jobId", "")}{body.get("documentId", "")}' verify_signature(owner, signature, original_msg, user_nonce.get_nonce(owner)) msg_to_sign = f'{provider_wallet.address}{body.get("jobId", "")}{body.get("documentId", "")}' msg_hash = add_ethereum_prefix_and_hash_msg(msg_to_sign) body['providerSignature'] = Web3Helper.sign_hash(msg_hash, provider_wallet) return body
def _sign_message(self, wallet, msg, nonce=None, service_endpoint=None): if nonce is None: uri = self._data_provider.get_root_uri(service_endpoint) nonce = self._data_provider.get_nonce(wallet.address, uri) return Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(f"{msg}{nonce}"), wallet )
def _sign_message(self, wallet, msg, nonce=None): if nonce is None: nonce = self._data_provider.get_nonce(wallet.address, self._config) return Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(f'{msg}{nonce}'), wallet )
def _check_job_id(client, job_id, did, token_address, wait_time=20): endpoint = BaseURLs.ASSETS_URL + '/compute' cons_wallet = get_consumer_wallet() nonce = get_nonce(client, cons_wallet.address) msg = f'{cons_wallet.address}{job_id}{did}{nonce}' _id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_id_hash, cons_wallet) payload = dict({ 'signature': signature, 'documentId': did, 'consumerAddress': cons_wallet.address, 'jobId': job_id, }) job_info = get_compute_job_info(client, endpoint, payload) assert job_info, f'Failed to get job info for jobId {job_id}' print(f'got info for compute job {job_id}: {job_info}') assert job_info['statusText'] in get_possible_compute_job_status_text() did = None # get did of results for _ in range(wait_time * 4): job_info = get_compute_job_info(client, endpoint, payload) did = job_info['did'] if did: break time.sleep(0.25) assert did, f'Compute job has no results, job info {job_info}.' # check results ddo ddo = get_asset_from_metadatastore(get_metadata_url(), did) assert ddo, f'Failed to resolve ddo for did {did}'
def sign_message(wallet, msg, nonce=None, provider_uri=None): if nonce is None: nonce = DataServiceProvider.get_nonce(wallet.address, provider_uri) print(f"signing message with nonce {nonce}: {msg}, account={wallet.address}") return Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(f"{msg}{nonce}"), wallet )
def test_compute_not_an_algo(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() ( _, did, tx_id, sa, data_token, _, alg_data_token, _, alg_tx_id, ) = build_and_send_ddo_with_compute_service( client, asset_type="allow_all_published") nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f"{cons_wallet.address}{did}{str(nonce)}" _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ "signature": signature, "documentId": did, "serviceId": sa.index, "serviceType": sa.type, "consumerAddress": cons_wallet.address, "transferTxId": tx_id, "dataToken": data_token, "output": build_stage_output_dict(dict(), sa.service_endpoint, cons_wallet.address, pub_wallet), "algorithmDid": did, # intentionally, should not be an algo did "algorithmDataToken": alg_data_token, "algorithmTransferTxId": alg_tx_id, }) # Start compute with valid signature payload["signature"] = signature compute_endpoint = BaseURLs.ASSETS_URL + "/compute" response = client.post(compute_endpoint, data=json.dumps(payload), content_type="application/json") assert response.status == "400 BAD REQUEST" error = response.get_json()["error"] assert "is not a valid algorithm" in error
def test_compute_norawalgo_allowed(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_no_rawalgo(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.data_token_address dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # CHECKPOINT 1 algorithm_meta = { "rawcode": "console.log('Hello world'!)", "format": 'docker-image', "version": '0.1', "container": { "entrypoint": 'node $ALGO', "image": 'node', "tag": '10' } } # prepare parameter values for the compute endpoint # signature, documentId, consumerAddress, and algorithmDid or algorithmMeta sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f'{cons_wallet.address}{did}{nonce}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ 'signature': signature, 'documentId': did, 'serviceId': sa.index, 'serviceType': sa.type, 'consumerAddress': cons_wallet.address, 'transferTxId': tx_id, 'dataToken': data_token, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet), 'algorithmDid': '', 'algorithmMeta': algorithm_meta, 'algorithmDataToken': '' }) compute_endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
def test_compute_diff_provider(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() ( _, did, tx_id, sa, data_token, alg_ddo, alg_data_token, _, alg_tx_id, ) = build_and_send_ddo_with_compute_service(client, alg_diff=True) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f"{cons_wallet.address}{did}{str(nonce)}" _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ "signature": signature, "documentId": did, "serviceId": sa.index, "serviceType": sa.type, "consumerAddress": cons_wallet.address, "transferTxId": tx_id, "dataToken": data_token, "output": build_stage_output_dict(dict(), sa.service_endpoint, cons_wallet.address, pub_wallet), "algorithmDid": alg_ddo.did, "algorithmDataToken": alg_data_token, "algorithmTransferTxId": alg_tx_id, }) compute_endpoint = BaseURLs.ASSETS_URL + "/compute" response = client.post(compute_endpoint, data=json.dumps(payload), content_type="application/json") assert response.status == "200 OK", f"start compute job failed: {response.data}"
def get(self, wallet): """ :param wallet: Wallet instance signing the token :return: hex str the token generated/signed by the users wallet """ _message, _time = self._get_message_and_time() try: prefixed_msg_hash = Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(_message), wallet) return f'{prefixed_msg_hash}-{_time}' except Exception as e: logging.error(f'Error signing token: {str(e)}')
def test_compute_specific_algo_dids(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_specific_algo_dids(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.as_dictionary()['dataToken'] dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # publish an algorithm asset (asset with metadata of type `algorithm`) alg_ddo = get_algorithm_ddo(client, cons_wallet) alg_data_token = alg_ddo.as_dictionary()['dataToken'] alg_dt_contract = DataToken(alg_data_token) mint_tokens_and_wait(alg_dt_contract, pub_wallet, cons_wallet) # CHECKPOINT 1 sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f'{cons_wallet.address}{did}{nonce}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ 'signature': signature, 'documentId': did, 'serviceId': sa.index, 'serviceType': sa.type, 'consumerAddress': cons_wallet.address, 'transferTxId': tx_id, 'dataToken': data_token, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet), 'algorithmDid': alg_ddo.did, 'algorithmMeta': {}, 'algorithmDataToken': alg_data_token }) compute_endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
def encrypt_document(client, did, document, wallet): nonce = get_nonce(client, wallet.address) text = f'{did}{nonce}' msg_hash = add_ethereum_prefix_and_hash_msg(text) signature = Web3Helper.sign_hash(msg_hash, wallet) payload = { 'documentId': did, 'signature': signature, 'document': document, 'publisherAddress': wallet.address } response = client.post(BaseURLs.ASSETS_URL + '/encrypt', data=json.dumps(payload), content_type='application/json') assert response.status_code == 201 and response.data, \ f'encrypt endpoint failed: response status {response.status}, data {response.data}' encrypted_document = response.json['encryptedDocument'] return encrypted_document
def encrypt_document(client, did, document, wallet): nonce = get_nonce(client, wallet.address) text = f"{did}{nonce}" msg_hash = add_ethereum_prefix_and_hash_msg(text) signature = Web3Helper.sign_hash(msg_hash, wallet) payload = { "documentId": did, "signature": signature, "document": document, "publisherAddress": wallet.address, } response = client.post( BaseURLs.ASSETS_URL + "/encrypt", data=json.dumps(payload), content_type="application/json", ) assert ( response.status_code == 201 and response.data ), f"encrypt endpoint failed: response status {response.status}, data {response.data}" encrypted_document = response.json["encryptedDocument"] return encrypted_document
def computeStart(): """Call the execution of a workflow. --- tags: - services consumes: - application/json parameters: - name: signature in: query description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of this asset/compute job. The signature uses ethereum based signing method (see https://github.com/ethereum/EIPs/pull/683) type: string - name: consumerAddress in: query description: The consumer ethereum address. required: true type: string - name: algorithmDid in: query description: The DID of the algorithm Asset to be executed required: false type: string - name: algorithmMeta in: query description: json object that define the algorithm attributes and url or raw code required: false type: json string - name: output in: query description: json object that define the output section required: true type: json string responses: 200: description: Call to the operator-service was successful. 400: description: One of the required attributes is missing. 401: description: Consumer signature is invalid or failed verification 500: description: General server error """ data = get_request_data(request) try: consumer_address = data.get("consumerAddress") validator = WorkflowValidator(consumer_address, provider_wallet, data) status = validator.validate() if not status: return jsonify(error=validator.error), 400 workflow = validator.workflow # workflow is ready, push it to operator logger.info("Sending: %s", workflow) tx_id = data.get("transferTxId") did = data.get("documentId") msg_to_sign = f"{provider_wallet.address}{did}" msg_hash = add_ethereum_prefix_and_hash_msg(msg_to_sign) payload = { "workflow": workflow, "providerSignature": Web3Helper.sign_hash(msg_hash, provider_wallet), "documentId": did, "agreementId": tx_id, "owner": consumer_address, "providerAddress": provider_wallet.address, } response = requests_session.post( get_compute_endpoint(), data=json.dumps(payload), headers={"content-type": "application/json"}, ) increment_nonce(consumer_address) return Response( response.content, response.status_code, headers={"content-type": "application/json"}, ) except (ValueError, KeyError, Exception) as e: logger.error(f"Error- {str(e)}", exc_info=1) return jsonify(error=f"Error : {str(e)}"), 500
def create( self, metadata: dict, publisher_wallet: Wallet, service_descriptors: list = None, owner_address: str = None, data_token_address: str = None, provider_uri=None, dt_name: str = None, dt_symbol: str = None, dt_blob: str = None, dt_cap: float = None, ) -> (Asset, None): """ Register an asset on-chain by creating/deploying a DataToken contract and in the Metadata store (Aquarius). :param metadata: dict conforming to the Metadata accepted by Ocean Protocol. :param publisher_wallet: Wallet of the publisher registering this asset :param service_descriptors: list of ServiceDescriptor tuples of length 2. The first item must be one of ServiceTypes and the second item is a dict of parameters and values required by the service :param owner_address: hex str the ethereum address to assign asset ownership to. After registering the asset on-chain, the ownership is transferred to this address :param data_token_address: hex str the address of the data token smart contract. The new asset will be associated with this data token address. :param provider_uri: str URL of service provider. This will be used as base to construct the serviceEndpoint for the `access` (download) service :param dt_name: str name of DataToken if creating a new one :param dt_symbol: str symbol of DataToken if creating a new one :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text to be stored with the ERC20 DataToken contract for any purpose. :param dt_cap: float :return: DDO instance """ assert isinstance( metadata, dict), f"Expected metadata of type dict, got {type(metadata)}" assert service_descriptors is None or isinstance( service_descriptors, list ), f"bad type of `service_descriptors` {type(service_descriptors)}" # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) asset_type = metadata_copy["main"]["type"] assert asset_type in ( "dataset", "algorithm", ), f"Invalid/unsupported asset type {asset_type}" if not plecos.is_valid_dict_local(metadata_copy): errors = plecos.list_errors_dict_local(metadata_copy) msg = f"Metadata has validation errors: {errors}" logger.error(msg) raise ValueError(msg) service_descriptors = service_descriptors or [] services = self._process_service_descriptors(service_descriptors, metadata_copy, provider_uri, publisher_wallet) stype_to_service = {s.type: s for s in services} checksum_dict = dict() for service in services: checksum_dict[str(service.index)] = checksum(service.main) # Create a DDO object asset = Asset() # Adding proof to the ddo. asset.add_proof(checksum_dict, publisher_wallet) ################# # DataToken address = DTFactory.configured_address(Web3Helper.get_network_name(), self._config.address_file) dtfactory = DTFactory(address) if not data_token_address: blob = dt_blob or "" name = dt_name or metadata["main"]["name"] symbol = dt_symbol or name # register on-chain _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP tx_id = dtfactory.createToken(blob, name, symbol, to_base_18(_cap), from_wallet=publisher_wallet) data_token = DataToken(dtfactory.get_token_address(tx_id)) if not data_token: logger.warning("Creating new data token failed.") return None data_token_address = data_token.address logger.info(f"Successfully created data token with address " f"{data_token.address} for new dataset asset.") # owner_address is set as minter only if creating new data token. So if # `data_token_address` is set `owner_address` has no effect. if owner_address: data_token.proposeMinter(owner_address, from_wallet=publisher_wallet) else: # verify data_token_address dt = DataToken(data_token_address) minter = dt.contract_concise.minter() if not minter: raise AssertionError( f"datatoken address {data_token_address} does not seem to be a valid DataToken contract." ) elif minter.lower() != publisher_wallet.address.lower(): raise AssertionError( f"Minter of datatoken {data_token_address} is not the same as the publisher." ) elif not dtfactory.verify_data_token(data_token_address): raise AssertionError( f"datatoken address {data_token_address} is not found in the DTFactory events." ) assert ( data_token_address ), "data_token_address is required for publishing a dataset asset." # Generating the did and adding to the ddo. did = asset.assign_did( f"did:op:{remove_0x_prefix(data_token_address)}") logger.debug(f"Using datatoken address as did: {did}") # Check if it's already registered first! if did in self._get_aquarius().list_assets(): raise OceanDIDAlreadyExist( f"Asset id {did} is already registered to another asset.") md_service = stype_to_service[ServiceTypes.METADATA] ddo_service_endpoint = md_service.service_endpoint if "{did}" in ddo_service_endpoint: ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did) md_service.set_service_endpoint(ddo_service_endpoint) # Populate the ddo services asset.add_service(md_service) access_service = stype_to_service.get(ServiceTypes.ASSET_ACCESS, None) compute_service = stype_to_service.get(ServiceTypes.CLOUD_COMPUTE, None) if access_service: asset.add_service(access_service) if compute_service: asset.add_service(compute_service) asset.proof["signatureValue"] = Web3Helper.sign_hash( add_ethereum_prefix_and_hash_msg(asset.asset_id), publisher_wallet) # Add public key and authentication asset.add_public_key(did, publisher_wallet.address) asset.add_authentication(did, PUBLIC_KEY_TYPE_RSA) # Setup metadata service # First compute files_encrypted assert metadata_copy["main"][ "files"], "files is required in the metadata main attributes." logger.debug("Encrypting content urls in the metadata.") publisher_signature = self._data_provider.sign_message( publisher_wallet, asset.asset_id, self._config) _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint( provider_uri) files_encrypted = self._data_provider.encrypt_files_dict( metadata_copy["main"]["files"], encrypt_endpoint, asset.asset_id, publisher_wallet.address, publisher_signature, ) # only assign if the encryption worked if files_encrypted: logger.debug( f"Content urls encrypted successfully {files_encrypted}") index = 0 for file in metadata_copy["main"]["files"]: file["index"] = index index = index + 1 del file["url"] metadata_copy["encryptedFiles"] = files_encrypted else: raise AssertionError("Encrypting the files failed.") logger.debug(f"Generated asset and services, DID is {asset.did}," f" metadata service @{ddo_service_endpoint}.") # Set datatoken address in the asset asset.data_token_address = data_token_address try: # publish the new ddo in ocean-db/Aquarius ddo_registry = self.ddo_registry() web3 = Web3Provider.get_web3() tx_id = ddo_registry.create( asset.asset_id, bytes([1]), lzma.compress(web3.toBytes(text=asset.as_text())), publisher_wallet, ) if not ddo_registry.verify_tx(tx_id): raise AssertionError( f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}" ) logger.info("Asset/ddo published on-chain successfully.") except ValueError as ve: raise ValueError( f"Invalid value to publish in the metadata: {str(ve)}") except Exception as e: logger.error(f"Publish asset on-chain failed: {str(e)}") raise return asset
def computeStart(): """Call the execution of a workflow. --- tags: - services consumes: - application/json parameters: - name: signature in: query description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of this asset/compute job. The signature uses ethereum based signing method (see https://github.com/ethereum/EIPs/pull/683) type: string - name: consumerAddress in: query description: The consumer ethereum address. required: true type: string - name: algorithmDid in: query description: The DID of the algorithm Asset to be executed required: false type: string - name: algorithmMeta in: query description: json object that define the algorithm attributes and url or raw code required: false type: json string - name: output in: query description: json object that define the output section required: true type: json string responses: 200: description: Call to the operator-service was successful. 400: description: One of the required attributes is missing. 401: description: Consumer signature is invalid or failed verification 500: description: General server error """ data = get_request_data(request) try: asset, service, did, consumer_address, token_address = process_consume_request( # noqa data, 'compute_start_job', additional_params=["transferTxId", "output"], require_signature=False) service_id = data.get('serviceId') service_type = data.get('serviceType') signature = data.get('signature') tx_id = data.get("transferTxId") # Verify that the number of required tokens has been # transferred to the provider's wallet. _tx, _order_log, _transfer_log = validate_order( consumer_address, token_address, float(service.get_cost()), tx_id, add_0x_prefix(did_to_id(did)) if did.startswith('did:') else did, service_id) validate_transfer_not_used_for_other_service(did, service_id, tx_id, consumer_address, token_address) record_consume_request(did, service_id, tx_id, consumer_address, token_address, service.get_cost()) algorithm_did = data.get('algorithmDid') algorithm_token_address = data.get('algorithmDataToken') algorithm_meta = data.get('algorithmMeta') algorithm_tx_id = data.get('algorithmTransferTxId') output_def = data.get('output', dict()) assert service_type == ServiceTypes.CLOUD_COMPUTE # Validate algorithm choice if not (algorithm_meta or algorithm_did): msg = f'Need an `algorithmMeta` or `algorithmDid` to run, otherwise don\'t bother.' # noqa logger.error(msg, exc_info=1) return jsonify(error=msg), 400 # algorithmDid also requires algorithmDataToken # and algorithmTransferTxId if algorithm_did: if not (algorithm_token_address and algorithm_tx_id): msg = ( f'Using `algorithmDid` requires the `algorithmDataToken` and ' # noqa f'`algorithmTransferTxId` values in the request payload. ' f'algorithmDataToken is the DataToken address for the algorithm asset. ' # noqa f'algorithmTransferTxId is the transaction id (hash) of transferring ' # noqa f'data tokens from consumer wallet to this providers wallet.' ) logger.error(msg, exc_info=1) return jsonify(error=msg), 400 # Consumer signature original_msg = f'{consumer_address}{did}' verify_signature(consumer_address, signature, original_msg, user_nonce.get_nonce(consumer_address)) ######################## # Valid service? if service is None: return jsonify( error=f'This DID has no compute service {did}.'), 400 ######################### # Check privacy privacy_options = service.main.get('privacy', {}) if (algorithm_meta and privacy_options.get('allowRawAlgorithm', True) is False): return jsonify( error=f'cannot run raw algorithm on this did {did}.'), 400 trusted_algorithms = privacy_options.get('trustedAlgorithms', []) if (algorithm_did and trusted_algorithms and algorithm_did not in trusted_algorithms): return jsonify( error=f'cannot run raw algorithm on this did {did}.'), 400 ######################### # Validate ALGORITHM meta if algorithm_meta: algorithm_meta = json.loads(algorithm_meta) if isinstance( algorithm_meta, str) else algorithm_meta algorithm_dict = build_stage_algorithm_dict( consumer_address, algorithm_did, algorithm_token_address, algorithm_tx_id, algorithm_meta, provider_wallet) error_msg, status_code = validate_algorithm_dict( algorithm_dict, algorithm_did) if error_msg: return jsonify(error=error_msg), status_code ######################### # INPUT asset_urls = get_asset_download_urls( asset, provider_wallet, config_file=app.config['CONFIG_FILE']) if not asset_urls: return jsonify(error=f'cannot get url(s) in input did {did}.'), 400 input_dict = dict({'index': 0, 'id': did, 'url': asset_urls}) ######################### # OUTPUT if output_def: output_def = json.loads(output_def) if isinstance( output_def, str) else output_def output_dict = build_stage_output_dict(output_def, asset, consumer_address, provider_wallet) ######################### # STAGE stage = build_stage_dict(input_dict, algorithm_dict, output_dict) ######################### # WORKFLOW workflow = dict({'stages': list([stage])}) # workflow is ready, push it to operator logger.info('Sending: %s', workflow) msg_to_sign = f'{provider_wallet.address}{did}' msg_hash = add_ethereum_prefix_and_hash_msg(msg_to_sign) payload = { 'workflow': workflow, 'providerSignature': Web3Helper.sign_hash(msg_hash, provider_wallet), 'documentId': did, 'agreementId': tx_id, 'owner': consumer_address, 'providerAddress': provider_wallet.address } response = requests_session.post( get_compute_endpoint(), data=json.dumps(payload), headers={'content-type': 'application/json'}) user_nonce.increment_nonce(consumer_address) return Response(response.content, response.status_code, headers={'content-type': 'application/json'}) except InvalidSignatureError as e: msg = f'Consumer signature failed verification: {e}' logger.error(msg, exc_info=1) return jsonify(error=msg), 401 except (ValueError, KeyError, Exception) as e: logger.error(f'Error- {str(e)}', exc_info=1) return jsonify(error=f'Error : {str(e)}'), 500
def test_compute(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() ( _, did, tx_id, sa, data_token, alg_ddo, alg_data_token, _, alg_tx_id, ) = build_and_send_ddo_with_compute_service(client) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f"{cons_wallet.address}{did}{str(nonce)}" _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ "signature": signature, "documentId": did, "serviceId": sa.index, "serviceType": sa.type, "consumerAddress": cons_wallet.address, "transferTxId": tx_id, "dataToken": data_token, "output": build_stage_output_dict(dict(), sa.service_endpoint, cons_wallet.address, pub_wallet), "algorithmDid": alg_ddo.did, "algorithmDataToken": alg_data_token, "algorithmTransferTxId": alg_tx_id, }) # Start compute using invalid signature (withOUT nonce), should fail msg = f"{cons_wallet.address}{did}" _hash = add_ethereum_prefix_and_hash_msg(msg) payload["signature"] = Web3Helper.sign_hash(_hash, cons_wallet) compute_endpoint = BaseURLs.ASSETS_URL + "/compute" response = client.post(compute_endpoint, data=json.dumps(payload), content_type="application/json") assert response.status_code == 400, f"{response.data}" # Start compute with valid signature payload["signature"] = signature response = client.post(compute_endpoint, data=json.dumps(payload), content_type="application/json") assert response.status == "200 OK", f"start compute job failed: {response.data}" job_info = response.json[0] print(f"got response from starting compute job: {job_info}") job_id = job_info.get("jobId", "") nonce = get_nonce(client, cons_wallet.address) msg = f"{cons_wallet.address}{job_id}{did}{nonce}" _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) payload = dict({ "signature": signature, "documentId": did, "consumerAddress": cons_wallet.address, "jobId": job_id, }) job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f"Failed to get job info for jobId {job_id}" print(f"got info for compute job {job_id}: {job_info}") assert job_info["statusText"] in get_possible_compute_job_status_text() # get compute job status without signature should return limited status info payload.pop("signature") job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f"Failed to get job status without signature: payload={payload}" assert "owner" not in job_info, "owner should not be in this status response" assert ( "resultsUrl" not in job_info), "resultsUrl should not be in this status response" assert ("algorithmLogUrl" not in job_info ), "algorithmLogUrl should not be in this status response" assert ( "resultsDid" not in job_info), "resultsDid should not be in this status response" payload["signature"] = "" job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f"Failed to get job status without signature: payload={payload}" assert "owner" not in job_info, "owner should not be in this status response" assert ( "resultsUrl" not in job_info), "resultsUrl should not be in this status response" assert ("algorithmLogUrl" not in job_info ), "algorithmLogUrl should not be in this status response" assert ( "resultsDid" not in job_info), "resultsDid should not be in this status response"
def test_compute_norawalgo_allowed(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = comp_ds_no_rawalgo(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.data_token_address dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # CHECKPOINT 1 algorithm_meta = { "rawcode": "console.log('Hello world'!)", "format": "docker-image", "version": "0.1", "container": { "entrypoint": "node $ALGO", "image": "node", "tag": "10" }, } # prepare parameter values for the compute endpoint # signature, documentId, consumerAddress, and algorithmDid or algorithmMeta sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f"{cons_wallet.address}{did}{nonce}" _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ "signature": signature, "documentId": did, "serviceId": sa.index, "serviceType": sa.type, "consumerAddress": cons_wallet.address, "transferTxId": tx_id, "dataToken": data_token, "output": build_stage_output_dict(dict(), sa.service_endpoint, cons_wallet.address, pub_wallet), "algorithmMeta": algorithm_meta, "algorithmDataToken": "", }) compute_endpoint = BaseURLs.ASSETS_URL + "/compute" response = client.post(compute_endpoint, data=json.dumps(payload), content_type="application/json") assert (response.status == "400 BAD REQUEST" ), f"start compute job failed: {response.status} , {response.data}"
def test_compute(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.data_token_address dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # publish an algorithm asset (asset with metadata of type `algorithm`) alg_ddo = get_algorithm_ddo(client, cons_wallet) alg_data_token = alg_ddo.as_dictionary()['dataToken'] alg_dt_contract = DataToken(alg_data_token) mint_tokens_and_wait(alg_dt_contract, cons_wallet, cons_wallet) sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) alg_service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, alg_ddo) alg_tx_id = send_order(client, alg_ddo, alg_dt_contract, alg_service, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f'{cons_wallet.address}{did}{str(nonce)}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ 'signature': signature, 'documentId': did, 'serviceId': sa.index, 'serviceType': sa.type, 'consumerAddress': cons_wallet.address, 'transferTxId': tx_id, 'dataToken': data_token, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet), 'algorithmDid': alg_ddo.did, 'algorithmMeta': {}, 'algorithmDataToken': alg_data_token, 'algorithmTransferTxId': alg_tx_id }) # Start compute using invalid signature (withOUT nonce), should fail msg = f'{cons_wallet.address}{did}' _hash = add_ethereum_prefix_and_hash_msg(msg) payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet) compute_endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status_code == 401, f'{response.data}' # Start compute with valid signature payload['signature'] = signature response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status == '200 OK', f'start compute job failed: {response.data}' job_info = response.json[0] print(f'got response from starting compute job: {job_info}') job_id = job_info.get('jobId', '') nonce = get_nonce(client, cons_wallet.address) msg = f'{cons_wallet.address}{job_id}{did}{nonce}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) payload = dict({ 'signature': signature, 'documentId': did, 'consumerAddress': cons_wallet.address, 'jobId': job_id, }) job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job info for jobId {job_id}' print(f'got info for compute job {job_id}: {job_info}') assert job_info['statusText'] in get_possible_compute_job_status_text() # get compute job status without signature should return limited status info payload.pop('signature') job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job status without signature: payload={payload}' assert 'owner' not in job_info, 'owner should not be in this status response' assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response' assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response' assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response' payload['signature'] = '' job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job status without signature: payload={payload}' assert 'owner' not in job_info, 'owner should not be in this status response' assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response' assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response' assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response'