Example #1
0
    def order(self, did: str, consumer_address: str,
              service_index: [int, None]=None, service_type: str=None) -> OrderRequirements:
        """
        Request a specific service from an asset, returns the service requirements that
        must be met prior to consuming the service.

        :param did:
        :param consumer_address:
        :param service_index:
        :param service_type:
        :return: OrderRequirements instance -- named tuple (amount, data_token_address, receiver_address, nonce),
        """
        assert service_type or service_index, f'One of service_index or service_type is required.'
        asset = self.resolve(did)
        if service_type:
            sa = ServiceAgreement.from_ddo(service_type, asset)
        else:
            service = asset.get_service_by_index(service_index)
            sa = ServiceAgreement.from_ddo(service.type, asset)

        dt_address = asset.data_token_address

        _, initialize_url = self._data_provider.get_initialize_endpoint(sa.service_endpoint)
        order_requirements = self._data_provider.get_order_requirements(
            asset.did, initialize_url, consumer_address, sa.index, sa.type, dt_address
        )
        if not order_requirements:
            raise AssertionError('Data service provider or service is not available.')

        assert dt_address == order_requirements.data_token_address
        return order_requirements
Example #2
0
def build_and_send_ddo_with_compute_service(client,
                                            alg_diff=False,
                                            asset_type=None):
    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    # publish an algorithm asset (asset with metadata of type `algorithm`)
    alg_ddo = (get_algorithm_ddo_different_provider(client, cons_wallet)
               if alg_diff else get_algorithm_ddo(client, cons_wallet))
    alg_data_token = alg_ddo.as_dictionary()["dataToken"]
    alg_dt_contract = DataToken(alg_data_token)

    mint_tokens_and_wait(alg_dt_contract, cons_wallet, cons_wallet)

    # publish a dataset asset
    if asset_type == "allow_all_published":
        dataset_ddo_w_compute_service = comp_ds_allow_all_published(
            client, pub_wallet)
    elif asset_type == "specific_algo_dids":
        algos = []

        for _ in itertools.repeat(None, 2):
            alg_ddo = get_algorithm_ddo(client, cons_wallet)
            alg_data_token = alg_ddo.as_dictionary()["dataToken"]
            alg_dt_contract = DataToken(alg_data_token)
            mint_tokens_and_wait(alg_dt_contract, cons_wallet, cons_wallet)
            algos.append(alg_ddo)

        dataset_ddo_w_compute_service = comp_ds_specific_algo_dids(
            client, pub_wallet, algos)
    else:
        dataset_ddo_w_compute_service = comp_ds(client, pub_wallet)

    did = dataset_ddo_w_compute_service.did
    ddo = dataset_ddo_w_compute_service
    data_token = dataset_ddo_w_compute_service.data_token_address
    dt_contract = DataToken(data_token)
    mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet)

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                   dataset_ddo_w_compute_service)

    tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet)
    alg_service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, alg_ddo)
    alg_tx_id = send_order(client, alg_ddo, alg_dt_contract, alg_service,
                           cons_wallet)

    return (
        dataset_ddo_w_compute_service,
        did,
        tx_id,
        sa,
        data_token,
        alg_ddo,
        alg_data_token,
        alg_dt_contract,
        alg_tx_id,
    )
Example #3
0
    def _get_service_endpoint(self, did, asset=None):
        if not asset:
            asset = resolve_asset(did, self._config.aquarius_url)

        return self._data_provider.build_compute_endpoint(
            ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                      asset).service_endpoint)
def download_asset_files(service_index: int,
                         asset: Asset,
                         consumer_wallet: Wallet,
                         destination: str,
                         token_address: str,
                         order_tx_id: str,
                         data_provider: DataServiceProvider,
                         index: [int, None] = None):
    """
    Download asset data files or result files from a compute job.

    :param service_index: identifier of the service inside the asset DDO, str
    :param asset: Asset instance
    :param consumer_wallet: Wallet instance of the consumer
    :param destination: Path, str
    :param token_address: hex str the address of the DataToken smart contract
    :param order_tx_id: hex str the transaction hash of the startOrder tx
    :param data_provider: DataServiceProvider instance
    :param index: Index of the document that is going to be downloaded, int
    :return: Asset folder path, str
    """
    _files = asset.metadata['main']['files']
    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, asset)
    service_endpoint = sa.service_endpoint
    if not service_endpoint:
        logger.error(
            'Consume asset failed, service definition is missing the "serviceEndpoint".'
        )
        raise AssertionError(
            'Consume asset failed, service definition is missing the "serviceEndpoint".'
        )

    service_endpoint = data_provider.build_download_endpoint(service_endpoint)
    if not os.path.isabs(destination):
        destination = os.path.abspath(destination)
    if not os.path.exists(destination):
        os.mkdir(destination)

    asset_folder = os.path.join(destination,
                                f'datafile.{asset.asset_id}.{service_index}')
    if not os.path.exists(asset_folder):
        os.mkdir(asset_folder)

    if index is not None:
        assert isinstance(index,
                          int), logger.error('index has to be an integer.')
        assert index >= 0, logger.error(
            'index has to be 0 or a positive integer.')
        assert index < len(_files), logger.error(
            'index can not be bigger than the number of files')
        indexes = [index]
    else:
        indexes = range(len(_files))

    for i in indexes:
        data_provider.download_service(asset.did, service_endpoint,
                                       consumer_wallet, _files, asset_folder,
                                       service_index, token_address,
                                       order_tx_id, i)
    return asset_folder
def place_order(publisher_account, ddo, consumer_account, service_type):
    keeper = keeper_instance()
    agreement_id = ServiceAgreement.create_new_agreement_id()
    publisher_address = publisher_account.address
    # balance = keeper.token.get_token_balance(consumer_account.address)/(2**18)
    # if balance < 20:
    #     keeper.dispenser.request_tokens(100, consumer_account)

    service_agreement = ServiceAgreement.from_ddo(service_type, ddo)
    condition_ids = service_agreement.generate_agreement_condition_ids(
        agreement_id, ddo.asset_id, consumer_account.address,
        publisher_address, keeper)
    time_locks = service_agreement.conditions_timelocks
    time_outs = service_agreement.conditions_timeouts

    template_name = keeper.template_manager.SERVICE_TO_TEMPLATE_NAME[
        service_type]
    template_id = keeper.template_manager.create_template_id(template_name)
    actor_map = {
        'consumer': consumer_account.address,
        'provider': publisher_address
    }
    actors = [
        actor_map[_type]
        for _type in get_template_actor_types(keeper, template_id)
    ]

    assert keeper.template_manager.contract_concise.isTemplateIdApproved(
        template_id), f'template {template_id} is not approved.'

    keeper_instance().agreement_manager.create_agreement(
        agreement_id, ddo.asset_id, template_id, condition_ids, time_locks,
        time_outs, actors, consumer_account)

    return agreement_id
def setup_agreements_environment():
    consumer_acc = get_consumer_account()
    publisher_acc = get_publisher_account()
    keeper = Keeper.get_instance()

    ddo = get_ddo_sample()
    ddo._did = DID.did({"0": "0x12341234"})
    keeper.did_registry.register(
        ddo.asset_id,
        checksum=Web3Provider.get_web3().toBytes(hexstr=ddo.asset_id),
        url='aquarius:5000',
        account=publisher_acc,
        providers=None)

    registered_ddo = ddo
    asset_id = registered_ddo.asset_id
    service_agreement = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS,
                                                  ddo)
    agreement_id = ServiceAgreement.create_new_agreement_id()
    price = service_agreement.get_price()
    (lock_cond_id, access_cond_id,
     escrow_cond_id) = service_agreement.generate_agreement_condition_ids(
         agreement_id, asset_id, consumer_acc.address, publisher_acc.address,
         keeper)

    return (
        keeper,
        publisher_acc,
        consumer_acc,
        agreement_id,
        asset_id,
        price,
        service_agreement,
        (lock_cond_id, access_cond_id, escrow_cond_id),
    )
Example #7
0
    def _get_service_endpoint_from_agreement(self, agreement_id, ddo=None):
        agreement = self._keeper.agreement_manager.get_agreement(agreement_id)
        _type = self._keeper.get_agreement_type(agreement.template_id)

        if not ddo:
            ddo = self._did_resolver.resolve(id_to_did(agreement.did))
        service = ServiceAgreement.from_ddo(_type, ddo)
        assert service, f'Using agreement_id {agreement_id}, the service type {_type} does not ' \
                        f'have a matching service in the DDO with DID {agreement.did}.'

        compute_service = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                                    ddo)
        assert service.service_endpoint == compute_service.service_endpoint, \
            f'`Expecting agreement of type `{ServiceTypes.CLOUD_COMPUTE}`, but `agreement_id` {agreement_id} ' \
            f'seems to have type {_type}.'

        return service.service_endpoint
Example #8
0
def test_compute_norawalgo_allowed(client):
    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    # publish a dataset asset
    dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_no_rawalgo(client, pub_wallet)
    did = dataset_ddo_w_compute_service.did
    ddo = dataset_ddo_w_compute_service
    data_token = dataset_ddo_w_compute_service.data_token_address
    dt_contract = DataToken(data_token)
    mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet)

    # CHECKPOINT 1
    algorithm_meta = {
        "rawcode": "console.log('Hello world'!)",
        "format": 'docker-image',
        "version": '0.1',
        "container": {
            "entrypoint": 'node $ALGO',
            "image": 'node',
            "tag": '10'
        }
    }
    # prepare parameter values for the compute endpoint
    # signature, documentId, consumerAddress, and algorithmDid or algorithmMeta

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service)
    tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet)
    nonce = get_nonce(client, cons_wallet.address)

    # prepare consumer signature on did
    msg = f'{cons_wallet.address}{did}{nonce}'
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = Web3Helper.sign_hash(_hash, cons_wallet)

    # Start the compute job
    payload = dict({
        'signature': signature,
        'documentId': did,
        'serviceId': sa.index,
        'serviceType': sa.type,
        'consumerAddress': cons_wallet.address,
        'transferTxId': tx_id,
        'dataToken': data_token,
        'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet),
        'algorithmDid': '',
        'algorithmMeta': algorithm_meta,
        'algorithmDataToken': ''
    })

    compute_endpoint = BaseURLs.ASSETS_URL + '/compute'
    response = client.post(
        compute_endpoint,
        data=json.dumps(payload),
        content_type='application/json'
    )
    assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
Example #9
0
def test_download_service(client):
    aqua = Aquarius("http://localhost:5000")
    try:
        for did in aqua.list_assets():
            aqua.retire_asset_ddo(did)
    except (ValueError, Exception):
        pass

    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    ddo = get_dataset_ddo_with_access_service(client, pub_wallet)
    dt_address = ddo.as_dictionary()["dataToken"]
    dt_token = DataToken(dt_address)
    mint_tokens_and_wait(dt_token, cons_wallet, pub_wallet)

    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)
    tx_id = send_order(client, ddo, dt_token, sa, cons_wallet)
    index = 0
    download_endpoint = BaseURLs.ASSETS_URL + "/download"
    # Consume using url index and auth token
    # (let the provider do the decryption)
    payload = dict({
        "documentId": ddo.did,
        "serviceId": sa.index,
        "serviceType": sa.type,
        "dataToken": dt_address,
        "consumerAddress": cons_wallet.address,
    })
    payload["signature"] = generate_auth_token(cons_wallet)
    payload["transferTxId"] = tx_id
    payload["fileIndex"] = index
    request_url = (download_endpoint + "?" +
                   "&".join([f"{k}={v}" for k, v in payload.items()]))
    response = client.get(request_url)
    assert response.status_code == 200, f"{response.data}"

    # Consume using url index and signature (withOUT nonce), should fail
    _hash = add_ethereum_prefix_and_hash_msg(ddo.did)
    payload["signature"] = Web3Helper.sign_hash(_hash, cons_wallet)
    request_url = (download_endpoint + "?" +
                   "&".join([f"{k}={v}" for k, v in payload.items()]))
    print(
        ">>>> Expecting InvalidSignatureError from the download endpoint <<<<"
    )  # noqa
    response = client.get(request_url)
    assert response.status_code == 400, f"{response.data}"

    # Consume using url index and signature (with nonce)
    nonce = get_nonce(client, cons_wallet.address)
    _hash = add_ethereum_prefix_and_hash_msg(f"{ddo.did}{nonce}")
    payload["signature"] = Web3Helper.sign_hash(_hash, cons_wallet)
    request_url = (download_endpoint + "?" +
                   "&".join([f"{k}={v}" for k, v in payload.items()]))
    response = client.get(request_url)
    assert response.status_code == 200, f"{response.data}"
def test_initialize_on_ipfs_url(client):
    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    ddo = get_dataset_with_ipfs_url_ddo(client, pub_wallet)
    data_token = ddo.data_token_address
    dt_contract = DataToken(data_token)
    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)

    send_order(client, ddo, dt_contract, sa, cons_wallet)
Example #11
0
def test_download_service(client):
    aqua = Aquarius('http://localhost:5000')
    try:
        for did in aqua.list_assets():
            aqua.retire_asset_ddo(did)
    except (ValueError, Exception):
        pass

    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    ddo = get_dataset_ddo_with_access_service(client, pub_wallet)
    dt_address = ddo.as_dictionary()['dataToken']
    dt_token = DataToken(dt_address)
    mint_tokens_and_wait(dt_token, cons_wallet, pub_wallet)

    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)
    tx_id = send_order(client, ddo, dt_token, sa, cons_wallet)
    index = 0
    download_endpoint = BaseURLs.ASSETS_URL + '/download'
    # Consume using url index and auth token (let the provider do the decryption)
    payload = dict({
        'documentId': ddo.did,
        'serviceId': sa.index,
        'serviceType': sa.type,
        'dataToken': dt_address,
        'consumerAddress': cons_wallet.address
    })
    payload['signature'] = generate_auth_token(cons_wallet)
    payload['transferTxId'] = tx_id
    payload['fileIndex'] = index
    request_url = download_endpoint + '?' + '&'.join(
        [f'{k}={v}' for k, v in payload.items()])
    response = client.get(request_url)
    assert response.status_code == 200, f'{response.data}'

    # Consume using url index and signature (withOUT nonce), should fail
    _hash = add_ethereum_prefix_and_hash_msg(ddo.did)
    payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet)
    request_url = download_endpoint + '?' + '&'.join(
        [f'{k}={v}' for k, v in payload.items()])
    print(
        '>>>> Expecting InvalidSignatureError from the download endpoint <<<<')
    response = client.get(request_url)
    assert response.status_code == 401, f'{response.data}'

    # Consume using url index and signature (with nonce)
    nonce = get_nonce(client, cons_wallet.address)
    _hash = add_ethereum_prefix_and_hash_msg(f'{ddo.did}{nonce}')
    payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet)
    request_url = download_endpoint + '?' + '&'.join(
        [f'{k}={v}' for k, v in payload.items()])
    response = client.get(request_url)
    assert response.status_code == 200, f'{response.data}'
Example #12
0
    def start(self,
              did: str,
              consumer_wallet: Wallet,
              order_tx_id: str,
              nonce: [int, None] = None,
              algorithm_did: [str, None] = None,
              algorithm_meta: [AlgorithmMetadata, None] = None,
              algorithm_tx_id: str = '',
              algorithm_data_token: str = '',
              output: dict = None,
              job_id: str = None):
        """Start a remote compute job on the asset files identified by `did` after
        verifying that the provider service is active and transferring the
        number of data-tokens required for using this compute service.

        :param did: str -- id of asset that has the compute service
        :param consumer_wallet: Wallet instance of the consumer ordering the service
        :param order_tx_id: hex str -- id of the startOrder transaction (tx hash)
        :param nonce: int value to use in the signature
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: `AlgorithmMetadata` instance -- metadata about the algorithm being run if
            `algorithm` is being used. This is ignored when `algorithm_did` is specified.
        :param algorithm_tx_id: transaction hash of algorithm StartOrder tx (Required when using `algorithm_did`)
        :param algorithm_data_token: datatoken address of this algorithm (Required when using `algorithm_did`)
        :param output: dict object to be used in publishing mechanism, must define
        :param job_id: str identifier of a compute job that was previously started and
            stopped (if supported by the provider's  backend)
        :return: str -- id of compute job being executed
        """
        assert algorithm_did or algorithm_meta, 'either an algorithm did or an algorithm meta must be provided.'

        output = OceanCompute.check_output_dict(
            output, consumer_wallet.address, data_provider=self._data_provider)
        asset = resolve_asset(did,
                              metadata_store_url=self._config.aquarius_url)
        service_endpoint = self._get_service_endpoint(did, asset)

        sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, asset)

        signature = self._sign_message(consumer_wallet,
                                       f'{consumer_wallet.address}{did}',
                                       nonce=nonce)

        job_info = self._data_provider.start_compute_job(
            did, service_endpoint, consumer_wallet.address, signature,
            sa.index, asset.data_token_address, order_tx_id, algorithm_did,
            algorithm_meta, algorithm_tx_id, algorithm_data_token, output,
            job_id)
        return job_info['jobId']
    def execute(agreement_id, compute_ddo, workflow_ddo, consumer_account,
                brizo, index):
        """

        :param agreement_id:
        :param workflow_ddo:
        :param consumer_account:
        :param index:
        :return:
        """
        service_endpoint = ServiceAgreement.from_ddo(
            ServiceTypes.CLOUD_COMPUTE, compute_ddo).service_endpoint
        return brizo.execute_service(agreement_id, service_endpoint,
                                     consumer_account, workflow_ddo)
Example #14
0
def test_compute_specific_algo_dids(client):
    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    # publish a dataset asset
    dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_specific_algo_dids(client, pub_wallet)
    did = dataset_ddo_w_compute_service.did
    ddo = dataset_ddo_w_compute_service
    data_token = dataset_ddo_w_compute_service.as_dictionary()['dataToken']
    dt_contract = DataToken(data_token)
    mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet)

    # publish an algorithm asset (asset with metadata of type `algorithm`)
    alg_ddo = get_algorithm_ddo(client, cons_wallet)
    alg_data_token = alg_ddo.as_dictionary()['dataToken']
    alg_dt_contract = DataToken(alg_data_token)
    mint_tokens_and_wait(alg_dt_contract, pub_wallet, cons_wallet)
    # CHECKPOINT 1

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service)
    tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet)
    nonce = get_nonce(client, cons_wallet.address)

    # prepare consumer signature on did
    msg = f'{cons_wallet.address}{did}{nonce}'
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = Web3Helper.sign_hash(_hash, cons_wallet)

    # Start the compute job
    payload = dict({
        'signature': signature,
        'documentId': did,
        'serviceId': sa.index,
        'serviceType': sa.type,
        'consumerAddress': cons_wallet.address,
        'transferTxId': tx_id,
        'dataToken': data_token,
        'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet),
        'algorithmDid': alg_ddo.did,
        'algorithmMeta': {},
        'algorithmDataToken': alg_data_token
    })

    compute_endpoint = BaseURLs.ASSETS_URL + '/compute'
    response = client.post(
        compute_endpoint,
        data=json.dumps(payload),
        content_type='application/json'
    )
    assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
Example #15
0
def build_stage_algorithm_dict(consumer_address,
                               algorithm_did,
                               algorithm_token_address,
                               algorithm_tx_id,
                               algorithm_meta,
                               provider_wallet,
                               receiver_address=None):
    if algorithm_did is not None:
        assert algorithm_token_address and algorithm_tx_id, \
            'algorithm_did requires both algorithm_token_address and algorithm_tx_id.'

        algo_asset = get_asset_from_metadatastore(get_metadata_url(),
                                                  algorithm_did)

        service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS,
                                            algo_asset)
        _tx, _order_log, _transfer_log = validate_order(
            consumer_address, algorithm_token_address,
            float(service.get_cost()), algorithm_tx_id,
            add_0x_prefix(did_to_id(algorithm_did)) if
            algorithm_did.startswith('did:') else algorithm_did, service.index)
        validate_transfer_not_used_for_other_service(algorithm_did,
                                                     service.index,
                                                     algorithm_tx_id,
                                                     consumer_address,
                                                     algorithm_token_address)
        record_consume_request(algorithm_did, service.index, algorithm_tx_id,
                               consumer_address, algorithm_token_address,
                               service.get_cost())

        algo_id = algorithm_did
        raw_code = ''
        algo_url = get_asset_url_at_index(0, algo_asset, provider_wallet)
        container = algo_asset.metadata['main']['algorithm']['container']
    else:
        algo_id = ''
        algo_url = algorithm_meta.get('url')
        raw_code = algorithm_meta.get('rawcode')
        container = algorithm_meta.get('container')

    return dict({
        'id': algo_id,
        'url': algo_url,
        'rawcode': raw_code,
        'container': container
    })
Example #16
0
def process_consume_request(data: dict):
    did = data.get("documentId")
    token_address = data.get("dataToken")
    consumer_address = data.get("consumerAddress")
    service_id = data.get("serviceId")
    service_type = data.get("serviceType")

    # grab asset for did from the metadatastore associated with
    # the Data Token address
    asset = get_asset_from_metadatastore(get_metadata_url(), did)
    service = ServiceAgreement.from_ddo(service_type, asset)
    if service.type != service_type:
        raise AssertionError(
            f"Requested service with id {service_id} has type {service.type} "
            f"which does not match the requested service type {service_type}."
        )

    return asset, service, did, consumer_address, token_address
Example #17
0
def place_order(publisher_account, ddo, consumer_account):
    keeper = keeper_instance()
    agreement_id = ServiceAgreement.create_new_agreement_id()
    agreement_template = keeper.escrow_access_secretstore_template
    publisher_address = publisher_account.address
    service_agreement = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS,
                                                  ddo)
    condition_ids = service_agreement.generate_agreement_condition_ids(
        agreement_id, ddo.asset_id, consumer_account.address,
        publisher_address, keeper)
    time_locks = service_agreement.conditions_timelocks
    time_outs = service_agreement.conditions_timeouts
    agreement_template.create_agreement(agreement_id, ddo.asset_id,
                                        condition_ids, time_locks, time_outs,
                                        consumer_account.address,
                                        consumer_account)

    return agreement_id
Example #18
0
def process_consume_request(data: dict,
                            method: str,
                            user_nonce: UserNonce = None,
                            additional_params: list = None,
                            require_signature: bool = True):

    required_attributes = [
        'documentId', 'serviceId', 'serviceType', 'dataToken',
        'consumerAddress'
    ]
    if additional_params:
        required_attributes += additional_params

    if require_signature:
        required_attributes.append('signature')

    msg, status = check_required_attributes(required_attributes, data, method)
    if msg:
        raise AssertionError(msg)

    did = data.get('documentId')
    token_address = data.get('dataToken')
    consumer_address = data.get('consumerAddress')
    service_id = data.get('serviceId')
    service_type = data.get('serviceType')

    # grab asset for did from the metadatastore associated with the Data Token address
    asset = get_asset_from_metadatastore(get_metadata_url(), did)
    service = ServiceAgreement.from_ddo(service_type, asset)
    if service.type != service_type:
        raise AssertionError(
            f'Requested service with id {service_id} has type {service.type} which '
            f'does not match the requested service type {service_type}.')

    if require_signature:
        assert user_nonce, '`user_nonce` is required when signature is required.'
        # Raises ValueError when signature is invalid
        signature = data.get('signature')
        verify_signature(consumer_address, signature, did,
                         user_nonce.get_nonce(consumer_address))

    return asset, service, did, consumer_address, token_address
Example #19
0
def ocean_assets_download_destination_file_helper(publisher_ocean_instance,
                                                  metadata, tmpdir):
    """Tests downloading to an existing directory."""
    publisher = get_publisher_wallet()
    metadata_copy = metadata.copy()
    data_provider = DataServiceProvider

    ddo = publisher_ocean_instance.assets.create(metadata_copy, publisher)
    wait_for_ddo(publisher_ocean_instance, ddo.did)
    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)

    written_path = download_asset_files(
        sa.index,
        ddo,
        publisher,
        tmpdir,
        ddo.data_token_address,
        "test_order_tx_id",
        data_provider,
    )
    assert os.path.exists(written_path)
Example #20
0
def test_ocean_assets_download_failure(publisher_ocean_instance, metadata):
    """Tests that downloading from an empty service raises an AssertionError."""
    publisher = get_publisher_wallet()
    metadata_copy = metadata.copy()
    data_provider = DataServiceProvider

    ddo = publisher_ocean_instance.assets.create(metadata_copy, publisher)
    wait_for_ddo(publisher_ocean_instance, ddo.did)
    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)
    sa.__dict__["_service_endpoint"] = None
    ddo.__dict__["_services"][1] = sa

    with pytest.raises(AssertionError):
        download_asset_files(
            sa.index,
            ddo,
            publisher,
            "test_destination",
            ddo.data_token_address,
            "test_order_tx_id",
            data_provider,
        )
def test_consume(client):
    aqua = Aquarius('http://localhost:5000')
    for did in aqua.list_assets():
        aqua.retire_asset_ddo(did)

    endpoint = BaseURLs.ASSETS_URL + '/consume'

    pub_acc = get_publisher_account()
    cons_acc = get_consumer_account()

    keeper = keeper_instance()
    ddo = get_dataset_ddo_with_access_service(pub_acc,
                                              providers=[pub_acc.address])

    # initialize an agreement
    agreement_id = place_order(pub_acc, ddo, cons_acc,
                               ServiceTypes.ASSET_ACCESS)
    payload = dict({
        'serviceAgreementId': agreement_id,
        'consumerAddress': cons_acc.address
    })

    agr_id_hash = add_ethereum_prefix_and_hash_msg(agreement_id)
    signature = keeper.sign_hash(agr_id_hash, cons_acc)
    index = 0

    event = keeper.agreement_manager.subscribe_agreement_created(agreement_id,
                                                                 15,
                                                                 None, (),
                                                                 wait=True,
                                                                 from_block=0)
    assert event, "Agreement event is not found, check the keeper node's logs"

    consumer_balance = keeper.token.get_token_balance(cons_acc.address)
    if consumer_balance < 50:
        keeper.dispenser.request_tokens(50 - consumer_balance, cons_acc)

    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)
    lock_reward(agreement_id, sa, cons_acc)
    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event, "Lock reward condition fulfilled event is not found, check the keeper node's logs"

    grant_access(agreement_id, ddo, cons_acc, pub_acc)
    event = keeper.access_secret_store_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event or keeper.access_secret_store_condition.check_permissions(
        ddo.asset_id, cons_acc.address
    ), f'Failed to get access permission: agreement_id={agreement_id}, ' \
       f'did={ddo.did}, consumer={cons_acc.address}'

    # Consume using decrypted url
    files_list = json.loads(
        do_secret_store_decrypt(did_to_id(ddo.did), ddo.encrypted_files,
                                pub_acc, get_config()))
    payload['url'] = files_list[index]['url']
    request_url = endpoint + '?' + '&'.join(
        [f'{k}={v}' for k, v in payload.items()])

    response = client.get(request_url)
    assert response.status == '200 OK'

    # Consume using url index and signature (let brizo do the decryption)
    payload.pop('url')
    payload['signature'] = signature
    payload['index'] = index
    request_url = endpoint + '?' + '&'.join(
        [f'{k}={v}' for k, v in payload.items()])
    response = client.get(request_url)
    assert response.status == '200 OK'
Example #22
0
def test_ocean_assets_download_indexes(publisher_ocean_instance, metadata):
    """Tests different values of indexes that raise AssertionError."""
    publisher = get_publisher_wallet()
    metadata_copy = metadata.copy()
    data_provider = DataServiceProvider

    ddo = publisher_ocean_instance.assets.create(metadata_copy, publisher)
    wait_for_ddo(publisher_ocean_instance, ddo.did)
    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)

    config = Config(os.getenv(ENV_CONFIG_FILE))

    index = range(3)
    if config["util"].getboolean("typecheck"):
        with pytest.raises(TypeError):
            download_asset_files(
                sa.index,
                ddo,
                publisher,
                "test_destination",
                ddo.data_token_address,
                "test_order_tx_id",
                data_provider,
                index,
            )
    else:
        with pytest.raises(AssertionError):
            download_asset_files(
                sa.index,
                ddo,
                publisher,
                "test_destination",
                ddo.data_token_address,
                "test_order_tx_id",
                data_provider,
                index,
            )

    index = -1
    with pytest.raises(AssertionError):
        download_asset_files(
            sa.index,
            ddo,
            publisher,
            "test_destination",
            ddo.data_token_address,
            "test_order_tx_id",
            data_provider,
            index,
        )
    index = 4
    with pytest.raises(AssertionError):
        download_asset_files(
            sa.index,
            ddo,
            publisher,
            "test_destination",
            ddo.data_token_address,
            "test_order_tx_id",
            data_provider,
            index,
        )
def test_sign_agreement(publisher_ocean_instance, consumer_ocean_instance,
                        registered_ddo):
    # point consumer_ocean_instance's brizo mock to the publisher's ocean instance
    BrizoProvider.set_brizo_class(BrizoMock)

    consumer_ocn = consumer_ocean_instance
    consumer_acc = consumer_ocn.main_account
    keeper = Keeper.get_instance()

    pub_ocn = publisher_ocean_instance
    publisher_acc = pub_ocn.main_account

    did = registered_ddo.did
    asset_id = registered_ddo.asset_id
    ddo = consumer_ocn.assets.resolve(did)
    service_agreement = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS,
                                                  ddo)

    price = service_agreement.get_price()

    # Give consumer some tokens
    keeper.dispenser.request_vodkas(price * 2, consumer_acc)

    agreement_id, signature = consumer_ocean_instance.agreements.prepare(
        did, consumer_acc, ServiceTypesIndices.DEFAULT_ACCESS_INDEX)

    success = publisher_ocean_instance.agreements.create(
        did, ServiceTypesIndices.DEFAULT_ACCESS_INDEX, agreement_id, signature,
        consumer_acc.address, publisher_acc)
    assert success, 'createAgreement failed.'

    event = keeper.agreement_manager.subscribe_agreement_created(
        agreement_id,
        10,
        log_event(keeper.agreement_manager.AGREEMENT_CREATED_EVENT), (),
        wait=True)
    assert event, 'no event for AgreementCreated '

    # Verify condition types (condition contracts)
    agreement_values = keeper.agreement_manager.get_agreement(agreement_id)
    assert agreement_values.did == asset_id, ''
    template_id = keeper.get_agreement_template_id(service_agreement.type)
    cond_types = keeper.template_manager.get_template(
        template_id).condition_types
    for i, cond_id in enumerate(agreement_values.condition_ids):
        cond = keeper.condition_manager.get_condition(cond_id)
        assert cond.type_ref == cond_types[i]
        assert int(cond.state) == 1

    lock_cond_id, access_cond_id, escrow_cond_id = agreement_values.condition_ids
    # Fulfill lock_reward_condition
    starting_balance = keeper.token.get_token_balance(
        keeper.escrow_reward_condition.address)
    keeper.token.token_approve(keeper.lock_reward_condition.address, price,
                               consumer_acc)
    tx_hash = keeper.lock_reward_condition.fulfill(
        agreement_id, keeper.escrow_reward_condition.address, price,
        consumer_acc)
    keeper.lock_reward_condition.get_tx_receipt(tx_hash)
    assert keeper.token.get_token_balance(
        keeper.escrow_reward_condition.address) == (price +
                                                    starting_balance), ''
    assert keeper.condition_manager.get_condition_state(lock_cond_id) == 2, ''
    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id,
        10,
        log_event(keeper.lock_reward_condition.FULFILLED_EVENT), (),
        wait=True)
    assert event, 'no event for LockRewardCondition.Fulfilled'

    # Fulfill access_secret_store_condition
    tx_hash = keeper.access_secret_store_condition.fulfill(
        agreement_id, asset_id, consumer_acc.address, publisher_acc)
    keeper.access_secret_store_condition.get_tx_receipt(tx_hash)
    assert 2 == keeper.condition_manager.get_condition_state(
        access_cond_id), ''
    event = keeper.access_secret_store_condition.subscribe_condition_fulfilled(
        agreement_id,
        10,
        log_event(keeper.access_secret_store_condition.FULFILLED_EVENT), (),
        wait=True)
    assert event, 'no event for AccessSecretStoreCondition.Fulfilled'

    # Fulfill escrow_reward_condition
    tx_hash = keeper.escrow_reward_condition.fulfill(
        agreement_id, price, publisher_acc.address, consumer_acc.address,
        lock_cond_id, access_cond_id, publisher_acc)
    keeper.escrow_reward_condition.get_tx_receipt(tx_hash)
    assert 2 == keeper.condition_manager.get_condition_state(
        escrow_cond_id), ''
    event = keeper.escrow_reward_condition.subscribe_condition_fulfilled(
        agreement_id,
        10,
        log_event(keeper.escrow_reward_condition.FULFILLED_EVENT), (),
        wait=True)
    assert event, 'no event for EscrowReward.Fulfilled'
    publisher_ocean_instance.assets.retire(did)
Example #24
0
def test_compute(client):

    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    # publish a dataset asset
    dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service(client, pub_wallet)
    did = dataset_ddo_w_compute_service.did
    ddo = dataset_ddo_w_compute_service
    data_token = dataset_ddo_w_compute_service.data_token_address
    dt_contract = DataToken(data_token)
    mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet)

    # publish an algorithm asset (asset with metadata of type `algorithm`)
    alg_ddo = get_algorithm_ddo(client, cons_wallet)
    alg_data_token = alg_ddo.as_dictionary()['dataToken']
    alg_dt_contract = DataToken(alg_data_token)
    mint_tokens_and_wait(alg_dt_contract, cons_wallet, cons_wallet)

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE, dataset_ddo_w_compute_service)
    tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet)

    alg_service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, alg_ddo)
    alg_tx_id = send_order(client, alg_ddo, alg_dt_contract, alg_service, cons_wallet)

    nonce = get_nonce(client, cons_wallet.address)
    # prepare consumer signature on did
    msg = f'{cons_wallet.address}{did}{str(nonce)}'
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = Web3Helper.sign_hash(_hash, cons_wallet)

    # Start the compute job
    payload = dict({
        'signature': signature,
        'documentId': did,
        'serviceId': sa.index,
        'serviceType': sa.type,
        'consumerAddress': cons_wallet.address,
        'transferTxId': tx_id,
        'dataToken': data_token,
        'output': build_stage_output_dict(dict(), dataset_ddo_w_compute_service, cons_wallet.address, pub_wallet),
        'algorithmDid': alg_ddo.did,
        'algorithmMeta': {},
        'algorithmDataToken': alg_data_token,
        'algorithmTransferTxId': alg_tx_id
    })

    # Start compute using invalid signature (withOUT nonce), should fail
    msg = f'{cons_wallet.address}{did}'
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    payload['signature'] = Web3Helper.sign_hash(_hash, cons_wallet)
    compute_endpoint = BaseURLs.ASSETS_URL + '/compute'
    response = client.post(
        compute_endpoint,
        data=json.dumps(payload),
        content_type='application/json'
    )

    assert response.status_code == 401, f'{response.data}'

    # Start compute with valid signature
    payload['signature'] = signature
    response = client.post(
        compute_endpoint,
        data=json.dumps(payload),
        content_type='application/json'
    )
    assert response.status == '200 OK', f'start compute job failed: {response.data}'
    job_info = response.json[0]
    print(f'got response from starting compute job: {job_info}')
    job_id = job_info.get('jobId', '')

    nonce = get_nonce(client, cons_wallet.address)
    msg = f'{cons_wallet.address}{job_id}{did}{nonce}'
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = Web3Helper.sign_hash(_hash, cons_wallet)

    payload = dict({
        'signature': signature,
        'documentId': did,
        'consumerAddress': cons_wallet.address,
        'jobId': job_id,
    })

    job_info = get_compute_job_info(client, compute_endpoint, payload)
    assert job_info, f'Failed to get job info for jobId {job_id}'
    print(f'got info for compute job {job_id}: {job_info}')
    assert job_info['statusText'] in get_possible_compute_job_status_text()

    # get compute job status without signature should return limited status info
    payload.pop('signature')
    job_info = get_compute_job_info(client, compute_endpoint, payload)
    assert job_info, f'Failed to get job status without signature: payload={payload}'
    assert 'owner' not in job_info, 'owner should not be in this status response'
    assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response'
    assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response'
    assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response'

    payload['signature'] = ''
    job_info = get_compute_job_info(client, compute_endpoint, payload)
    assert job_info, f'Failed to get job status without signature: payload={payload}'
    assert 'owner' not in job_info, 'owner should not be in this status response'
    assert 'resultsUrl' not in job_info, 'resultsUrl should not be in this status response'
    assert 'algorithmLogUrl' not in job_info, 'algorithmLogUrl should not be in this status response'
    assert 'resultsDid' not in job_info, 'resultsDid should not be in this status response'
Example #25
0
def test_download_service(client):
    aqua = Aquarius('http://localhost:5000')
    for did in aqua.list_assets():
        aqua.retire_asset_ddo(did)

    init_endpoint = BaseURLs.ASSETS_URL + '/initialize'
    download_endpoint = BaseURLs.ASSETS_URL + '/download'

    pub_acc = get_publisher_account()
    cons_acc = get_consumer_account()

    request_ether('https://faucet.nile.dev-ocean.com', cons_acc)

    ddo = get_dataset_ddo_with_access_service(pub_acc)
    dt_address = ddo.as_dictionary()['dataTokenAddress']
    dt_token = DataTokenContract(dt_address)
    tx_id = dt_token.mint(cons_acc.address, 50, pub_acc)
    dt_token.get_tx_receipt(tx_id)
    time.sleep(2)

    def verify_supply(mint_amount=50):
        supply = dt_token.contract_concise.totalSupply()
        if supply <= 0:
            _tx_id = dt_token.mint(cons_acc.address, mint_amount, pub_acc)
            dt_token.get_tx_receipt(_tx_id)
            supply = dt_token.contract_concise.totalSupply()
        return supply

    while True:
        try:
            s = verify_supply()
            if s > 0:
                break
        except (ValueError, Exception):
            pass

    auth_token = generate_auth_token(cons_acc)
    index = 0

    sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo)

    # Consume using decrypted url
    files_list = json.loads(
        do_decrypt(ddo.encrypted_files, pub_acc))

    # initialize an agreement
    payload = dict({
        'documentId': ddo.did,
        'serviceId': sa.index,
        'serviceType': sa.type,
        'tokenAddress': dt_address,
        'consumerAddress': cons_acc.address
    })

    payload['url'] = files_list[index]['url']
    request_url = init_endpoint + '?' + '&'.join([f'{k}={v}' for k, v in payload.items()])

    response = client.get(
        request_url
    )
    assert response.status == '200 OK'

    tx_params = response.json
    num_tokens = tx_params['numTokens']
    assert tx_params['from'] == cons_acc.address
    assert tx_params['to'] == pub_acc.address
    assert tx_params['dataTokenAddress'] == ddo.as_dictionary()['dataTokenAddress']

    # Transfer tokens to provider account
    tx_id = dt_token.transfer(tx_params['to'], num_tokens, cons_acc)
    dt_token.get_tx_receipt(tx_id)

    # Consume using url index and signature (let the provider do the decryption)
    payload.pop('url')
    payload['signature'] = auth_token
    payload['transferTxId'] = Web3.toHex(tx_id)
    payload['fileIndex'] = index
    request_url = download_endpoint + '?' + '&'.join([f'{k}={v}' for k, v in payload.items()])
    response = client.get(
        request_url
    )
    assert response.status == '200 OK'
def test_process_pending_agreements(keeper, web3, storage_path,
                                    provider_account):
    start_time = 0
    ddo = get_registered_ddo(provider_account,
                             providers=[provider_account.address])
    did = ddo.did
    consumer = get_consumer_account()
    block_number = web3.eth.blockNumber
    block_number = block_number - 10 if block_number > 10 else block_number
    metadata = ddo.metadata
    encrypted_files = metadata['encryptedFiles']
    sa = ServiceAgreement.from_ddo('access', ddo)
    agr_1 = place_order(provider_account, ddo, consumer)
    agr_2 = place_order(provider_account, ddo, consumer)
    agr_3 = place_order(provider_account, ddo, consumer)
    pending_agreements = {
        agr_1: [
            did, 3,
            sa.get_price(), encrypted_files, start_time, consumer.address,
            block_number, 'access'
        ],
        agr_2: [
            did, 3,
            sa.get_price(), encrypted_files, start_time + 3000,
            consumer.address, block_number, 'access'
        ],
        agr_3: [
            did, 3,
            sa.get_price(), encrypted_files, start_time + 10000,
            consumer.address, block_number, 'access'
        ]
    }
    conditions = {
        agr_1: {
            'accessSecretStore': 1,
            'lockReward': 2,
            'escrowReward': 1
        },
        agr_2: {
            'accessSecretStore': 1,
            'lockReward': 1,
            'escrowReward': 1
        },
        agr_3: {
            'accessSecretStore': 2,
            'lockReward': 2,
            'escrowReward': 1
        }
    }
    balance = keeper.token.get_token_balance(consumer.address) / (2**18)
    if balance < 20:
        keeper.dispenser.request_tokens(100, consumer)

    lock_reward(agr_1, sa, consumer)
    lock_reward(agr_3, sa, consumer)
    grant_access(agr_3, ddo, consumer, provider_account)
    event = keeper.access_secret_store_condition.subscribe_condition_fulfilled(
        agr_3, 35, None, (), wait=True)
    if not event:
        # check status
        cond_to_status = get_conditions_status(agr_3)
        print(f'agr_3 condition status: {cond_to_status}')
        if cond_to_status['accessSecretStore'] != 2:
            raise AssertionError(f'grant access failed for agreement {agr_3}')

    events_monitor = ProviderEventsMonitor(keeper, web3, storage_path,
                                           provider_account)
    events_monitor.process_pending_agreements(pending_agreements, conditions)

    keeper.access_secret_store_condition.subscribe_condition_fulfilled(
        agr_1, 15, None, (), wait=True)
    keeper.escrow_reward_condition.subscribe_condition_fulfilled(agr_1,
                                                                 15,
                                                                 None, (),
                                                                 wait=True)
    keeper.escrow_reward_condition.subscribe_condition_fulfilled(agr_3,
                                                                 15,
                                                                 None, (),
                                                                 wait=True)

    # check status of all agreements
    for agr_id in (agr_1, agr_3):
        cond_to_status = get_conditions_status(agr_1)
        assert [2, 2, 2] == list(cond_to_status.values()), \
            f'agr_id {agr_id}: some conditions were not fulfilled or ' \
            f'do not match the expected status. Conditions status are: {cond_to_status}'

    events_monitor.start_agreement_events_monitor()
    lock_reward(agr_2, sa, consumer)
    keeper.access_secret_store_condition.subscribe_condition_fulfilled(
        agr_2, 15, None, (), wait=True)
    keeper.escrow_reward_condition.subscribe_condition_fulfilled(agr_2,
                                                                 15,
                                                                 None, (),
                                                                 wait=True)
    cond_to_status = get_conditions_status(agr_2)
    assert [2, 2, 2] == list(cond_to_status.values()), \
        f'agr_id {agr_id}: some conditions were not fulfilled or ' \
        f'do not match the expected status. Conditions status are: {cond_to_status}'

    events_monitor.stop_monitor()
    time.sleep(2)
def test_compute_norawalgo_allowed(client):
    aqua = Aquarius('http://localhost:5000')
    for did in aqua.list_assets():
        aqua.retire_asset_ddo(did)

    pub_acc = get_publisher_account()
    cons_acc = get_consumer_account()

    keeper = keeper_instance()

    # publish a dataset asset
    dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service_no_rawalgo(
        pub_acc, providers=[pub_acc.address])

    # CHECKPOINT 1
    algorithmMeta = {
        "rawcode": "console.log('Hello world'!)",
        "format": 'docker-image',
        "version": '0.1',
        "container": {
            "entrypoint": 'node $ALGO',
            "image": 'node',
            "tag": '10'
        }
    }
    # prepare parameter values for the compute endpoint
    # signature, serviceAgreementId, consumerAddress, and algorithmDid or algorithmMeta

    # initialize an agreement
    agreement_id = place_order(pub_acc, dataset_ddo_w_compute_service,
                               cons_acc, ServiceTypes.CLOUD_COMPUTE)
    # CHECKPOINT 2

    event = keeper.agreement_manager.subscribe_agreement_created(agreement_id,
                                                                 15,
                                                                 None, (),
                                                                 wait=True,
                                                                 from_block=0)
    assert event, "Agreement event is not found, check the keeper node's logs"

    consumer_balance = keeper.token.get_token_balance(cons_acc.address)
    if consumer_balance < 50:
        keeper.dispenser.request_tokens(50 - consumer_balance, cons_acc)

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                   dataset_ddo_w_compute_service)
    lock_reward(agreement_id, sa, cons_acc)
    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event, "Lock reward condition fulfilled event is not found, check the keeper node's logs"

    grant_compute(agreement_id, dataset_ddo_w_compute_service.asset_id,
                  cons_acc, pub_acc)
    event = keeper.compute_execution_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event or keeper.compute_execution_condition.was_compute_triggered(
        dataset_ddo_w_compute_service.asset_id, cons_acc.address
    ), (f'Failed to compute: agreement_id={agreement_id}, '
        f'did={dataset_ddo_w_compute_service.did}, consumer={cons_acc.address}'
        )

    # prepare consumer signature on agreement_id
    msg = f'{cons_acc.address}{agreement_id}'
    agreement_id_hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = keeper.sign_hash(agreement_id_hash, cons_acc)

    # Start the compute job
    payload = dict({
        'signature':
        signature,
        'serviceAgreementId':
        agreement_id,
        'consumerAddress':
        cons_acc.address,
        'algorithmDid':
        None,
        'algorithmMeta':
        algorithmMeta,
        'output':
        build_stage_output_dict(dict(), dataset_ddo_w_compute_service,
                                cons_acc.address, pub_acc)
    })

    endpoint = BaseURLs.ASSETS_URL + '/compute'
    response = client.post(endpoint,
                           data=json.dumps(payload),
                           content_type='application/json')
    assert response.status == '400 BAD REQUEST', f'start compute job failed: {response.status} , { response.data}'
def test_compute(client):
    aqua = Aquarius('http://localhost:5000')
    for did in aqua.list_assets():
        aqua.retire_asset_ddo(did)

    pub_acc = get_publisher_account()
    cons_acc = get_consumer_account()

    keeper = keeper_instance()

    # publish a dataset asset
    dataset_ddo_w_compute_service = get_dataset_ddo_with_compute_service(
        pub_acc, providers=[pub_acc.address])

    # publish an algorithm asset (asset with metadata of type `algorithm`)
    alg_ddo = get_algorithm_ddo(cons_acc, providers=[pub_acc.address])
    # CHECKPOINT 1

    # prepare parameter values for the compute endpoint
    # signature, serviceAgreementId, consumerAddress, and algorithmDid or algorithmMeta

    # initialize an agreement
    agreement_id = place_order(pub_acc, dataset_ddo_w_compute_service,
                               cons_acc, ServiceTypes.CLOUD_COMPUTE)
    # CHECKPOINT 2

    event = keeper.agreement_manager.subscribe_agreement_created(agreement_id,
                                                                 15,
                                                                 None, (),
                                                                 wait=True,
                                                                 from_block=0)
    assert event, "Agreement event is not found, check the keeper node's logs"

    consumer_balance = keeper.token.get_token_balance(cons_acc.address)
    if consumer_balance < 50:
        keeper.dispenser.request_tokens(50 - consumer_balance, cons_acc)

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                   dataset_ddo_w_compute_service)
    lock_reward(agreement_id, sa, cons_acc)
    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event, "Lock reward condition fulfilled event is not found, check the keeper node's logs"

    grant_compute(agreement_id, dataset_ddo_w_compute_service.asset_id,
                  cons_acc, pub_acc)
    event = keeper.compute_execution_condition.subscribe_condition_fulfilled(
        agreement_id, 15, None, (), wait=True, from_block=0)
    assert event or keeper.compute_execution_condition.was_compute_triggered(
        dataset_ddo_w_compute_service.asset_id, cons_acc.address
    ), (f'Failed to compute: agreement_id={agreement_id}, '
        f'did={dataset_ddo_w_compute_service.did}, consumer={cons_acc.address}'
        )

    # prepare consumer signature on agreement_id
    msg = f'{cons_acc.address}{agreement_id}'
    agreement_id_hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = keeper.sign_hash(agreement_id_hash, cons_acc)

    # Start the compute job
    payload = dict({
        'signature':
        signature,
        'serviceAgreementId':
        agreement_id,
        'consumerAddress':
        cons_acc.address,
        'algorithmDid':
        alg_ddo.did,
        'algorithmMeta': {},
        'output':
        build_stage_output_dict(dict(), dataset_ddo_w_compute_service,
                                cons_acc.address, pub_acc)
    })

    endpoint = BaseURLs.ASSETS_URL + '/compute'
    response = client.post(endpoint,
                           data=json.dumps(payload),
                           content_type='application/json')
    assert response.status == '200 OK', f'start compute job failed: {response.data}'
    job_info = response.json[0]
    print(f'got response from starting compute job: {job_info}')
    job_id = job_info.get('jobId', '')

    msg = f'{cons_acc.address}{job_id}{agreement_id}'
    agreement_id_hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = keeper.sign_hash(agreement_id_hash, cons_acc)

    payload = dict({
        'signature': signature,
        'serviceAgreementId': agreement_id,
        'consumerAddress': cons_acc.address,
        'jobId': job_id,
    })

    job_info = get_compute_job_info(client, endpoint, payload)
    assert job_info, f'Failed to get job info for jobId {job_id}'
    print(f'got info for compute job {job_id}: {job_info}')
    assert job_info['statusText'] in get_possible_compute_job_status_text()
Example #29
0
def test_compute_norawalgo_allowed(client):
    pub_wallet = get_publisher_wallet()
    cons_wallet = get_consumer_wallet()

    # publish a dataset asset
    dataset_ddo_w_compute_service = comp_ds_no_rawalgo(client, pub_wallet)
    did = dataset_ddo_w_compute_service.did
    ddo = dataset_ddo_w_compute_service

    data_token = dataset_ddo_w_compute_service.data_token_address
    dt_contract = DataToken(data_token)
    mint_tokens_and_wait(dt_contract, cons_wallet, pub_wallet)

    # CHECKPOINT 1
    algorithm_meta = {
        "rawcode": "console.log('Hello world'!)",
        "format": "docker-image",
        "version": "0.1",
        "container": {
            "entrypoint": "node $ALGO",
            "image": "node",
            "tag": "10"
        },
    }
    # prepare parameter values for the compute endpoint
    # signature, documentId, consumerAddress, and algorithmDid or algorithmMeta

    sa = ServiceAgreement.from_ddo(ServiceTypes.CLOUD_COMPUTE,
                                   dataset_ddo_w_compute_service)
    tx_id = send_order(client, ddo, dt_contract, sa, cons_wallet)
    nonce = get_nonce(client, cons_wallet.address)

    # prepare consumer signature on did
    msg = f"{cons_wallet.address}{did}{nonce}"
    _hash = add_ethereum_prefix_and_hash_msg(msg)
    signature = Web3Helper.sign_hash(_hash, cons_wallet)

    # Start the compute job
    payload = dict({
        "signature":
        signature,
        "documentId":
        did,
        "serviceId":
        sa.index,
        "serviceType":
        sa.type,
        "consumerAddress":
        cons_wallet.address,
        "transferTxId":
        tx_id,
        "dataToken":
        data_token,
        "output":
        build_stage_output_dict(dict(), sa.service_endpoint,
                                cons_wallet.address, pub_wallet),
        "algorithmMeta":
        algorithm_meta,
        "algorithmDataToken":
        "",
    })

    compute_endpoint = BaseURLs.ASSETS_URL + "/compute"
    response = client.post(compute_endpoint,
                           data=json.dumps(payload),
                           content_type="application/json")
    assert (response.status == "400 BAD REQUEST"
            ), f"start compute job failed: {response.status} , {response.data}"