def test_compute_specific_algo_dids(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_specific_algo_dids(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.as_dictionary()['dataToken'] dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # publish an algorithm asset (asset with metadata of type `algorithm`) alg_ddo = get_algorithm_ddo(client, cons_wallet) alg_data_token = alg_ddo.as_dictionary()['dataToken'] alg_dt_contract = DataToken(alg_data_token) mint_tokens_and_wait(alg_dt_contract, pub_wallet, cons_wallet) # CHECKPOINT 1 sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f'{cons_wallet.address}{did}{nonce}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ 'signature': signature, 'documentId': did, 'serviceId': sa.index, 'serviceType': sa.type, 'consumerAddress': cons_wallet.address, 'transferTxId': tx_id, 'dataToken': data_token, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet), 'algorithmDid': alg_ddo.did, 'algorithmMeta': {}, 'algorithmDataToken': alg_data_token }) compute_endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
def test_compute(client): aqua = Aquarius('http://localhost:5000') for did in aqua.list_assets(): aqua.retire_asset_ddo(did) pub_acc = get_publisher_account() cons_acc = get_consumer_account() keeper = keeper_instance() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service( pub_acc, providers=[pub_acc.address]) # publish an algorithm asset (asset with metadata of type `algorithm`) alg_ddo = get_algorithm_ddo(cons_acc, providers=[pub_acc.address]) # CHECKPOINT 1 # prepare parameter values for the compute endpoint # signature, serviceAgreementId, consumerAddress, and algorithmDid or algorithmMeta # initialize an agreement agreement_id = place_order(pub_acc, dataset_ddo_w_compute_service, cons_acc, ServiceTypes.CLOUD_COMPUTE) # CHECKPOINT 2 event = keeper.agreement_manager.subscribe_agreement_created(agreement_id, 15, None, (), wait=True, from_block=0) assert event, "Agreement event is not found, check the keeper node's logs" consumer_balance = keeper.token.get_token_balance(cons_acc.address) if consumer_balance < 50: keeper.dispenser.request_tokens(50 - consumer_balance, cons_acc) sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) lock_reward(agreement_id, sa, cons_acc) event = keeper.lock_reward_condition.subscribe_condition_fulfilled( agreement_id, 15, None, (), wait=True, from_block=0) assert event, "Lock reward condition fulfilled event is not found, check the keeper node's logs" grant_compute(agreement_id, dataset_ddo_w_compute_service.asset_id, cons_acc, pub_acc) event = keeper.compute_execution_condition.subscribe_condition_fulfilled( agreement_id, 15, None, (), wait=True, from_block=0) assert event or keeper.compute_execution_condition.was_compute_triggered( dataset_ddo_w_compute_service.asset_id, cons_acc.address ), (f'Failed to compute: agreement_id={agreement_id}, ' f'did={dataset_ddo_w_compute_service.did}, consumer={cons_acc.address}' ) # prepare consumer signature on agreement_id msg = f'{cons_acc.address}{agreement_id}' agreement_id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = keeper.sign_hash(agreement_id_hash, cons_acc) # Start the compute job payload = dict({ 'signature': signature, 'serviceAgreementId': agreement_id, 'consumerAddress': cons_acc.address, 'algorithmDid': alg_ddo.did, 'algorithmMeta': {}, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_acc.address, pub_acc) }) endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post(endpoint, data=json.dumps(payload), content_type='application/json') assert response.status == '200 OK', f'start compute job failed: {response.data}' job_info = response.json[0] print(f'got response from starting compute job: {job_info}') job_id = job_info.get('jobId', '') msg = f'{cons_acc.address}{job_id}{agreement_id}' agreement_id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = keeper.sign_hash(agreement_id_hash, cons_acc) payload = dict({ 'signature': signature, 'serviceAgreementId': agreement_id, 'consumerAddress': cons_acc.address, 'jobId': job_id, }) job_info = get_compute_job_info(client, endpoint, payload) assert job_info, f'Failed to get job info for jobId {job_id}' print(f'got info for compute job {job_id}: {job_info}') assert job_info['statusText'] in get_possible_compute_job_status_text()
def test_compute(client): pub_wallet = get_publisher_wallet() cons_wallet = get_consumer_wallet() # publish a dataset asset dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service(client, pub_wallet) did = dataset_ddo_w_compute_service.did ddo = dataset_ddo_w_compute_service data_token = dataset_ddo_w_compute_service.data_token_address dt_contract = DataToken(data_token) mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet) # publish an algorithm asset (asset with metadata of type `algorithm`) alg_ddo = get_algorithm_ddo(client, cons_wallet) alg_data_token = alg_ddo.as_dictionary()['dataToken'] alg_dt_contract = DataToken(alg_data_token) mint_tokens_and_wait(alg_dt_contract, cons_wallet, cons_wallet) sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service) tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet) alg_service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, alg_ddo) alg_tx_id = send_order(client, alg_ddo, alg_dt_contract, alg_service, cons_wallet) nonce = get_nonce(client, cons_wallet.address) # prepare consumer signature on did msg = f'{cons_wallet.address}{did}{str(nonce)}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) # Start the compute job payload = dict({ 'signature': signature, 'documentId': did, 'serviceId': sa.index, 'serviceType': sa.type, 'consumerAddress': cons_wallet.address, 'transferTxId': tx_id, 'dataToken': data_token, 'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet), 'algorithmDid': alg_ddo.did, 'algorithmMeta': {}, 'algorithmDataToken': alg_data_token, 'algorithmTransferTxId': alg_tx_id }) # Start compute using invalid signature (withOUT nonce), should fail msg = f'{cons_wallet.address}{did}' _hash = add_ethereum_prefix_and_hash_msg(msg) payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet) compute_endpoint = BaseURLs.ASSETS_URL + '/compute' response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status_code == 401, f'{response.data}' # Start compute with valid signature payload['signature'] = signature response = client.post( compute_endpoint, data=json.dumps(payload), content_type='application/json' ) assert response.status == '200 OK', f'start compute job failed: {response.data}' job_info = response.json[0] print(f'got response from starting compute job: {job_info}') job_id = job_info.get('jobId', '') nonce = get_nonce(client, cons_wallet.address) msg = f'{cons_wallet.address}{job_id}{did}{nonce}' _hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_hash, cons_wallet) payload = dict({ 'signature': signature, 'documentId': did, 'consumerAddress': cons_wallet.address, 'jobId': job_id, }) job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job info for jobId {job_id}' print(f'got info for compute job {job_id}: {job_info}') assert job_info['statusText'] in get_possible_compute_job_status_text() # get compute job status without signature should return limited status info payload.pop('signature') job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job status without signature: payload={payload}' assert 'owner' not in job_info, 'owner should not be in this status response' assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response' assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response' assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response' payload['signature'] = '' job_info = get_compute_job_info(client, compute_endpoint, payload) assert job_info, f'Failed to get job status without signature: payload={payload}' assert 'owner' not in job_info, 'owner should not be in this status response' assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response' assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response' assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response'