Beispiel #1
0
    def search(self,
               text: str,
               sort=None,
               offset=100,
               page=1,
               metadata_cache_uri=None) -> list:
        """
        Search an asset in oceanDB using aquarius.

        :param text: String with the value that you are searching
        :param sort: Dictionary to choose order main in some value
        :param offset: Number of elements shows by page
        :param page: Page number
        :param metadata_cache_uri: Url of the aquarius where you want to search. If there is not
            provided take the default
        :return: List of assets that match with the query
        """
        assert page >= 1, f"Invalid page value {page}. Required page >= 1."
        logger.info(f"Searching asset containing: {text}")
        return [
            Asset(dictionary=ddo_dict)
            for ddo_dict in self._get_aquarius(metadata_cache_uri).
            query_search({"query": {
                "query_string": {
                    "query": text
                }
            }}, sort, offset, page)["results"]
        ]
Beispiel #2
0
def wait_for_ddo(ddo_store, did, timeout=30):
    start = time.time()
    ddo = None
    while not ddo:
        try:
            ddo = ddo_store.get_asset_ddo(did)
        except ValueError:
            pass

        if not ddo:
            time.sleep(0.2)

        if time.time() - start > timeout:
            break

    return Asset(dictionary=ddo.as_dictionary())
Beispiel #3
0
    def search(self, text: str, sort=None, offset=100, page=1, aquarius_url=None) -> list:
        """
        Search an asset in oceanDB using aquarius.

        :param text: String with the value that you are searching
        :param sort: Dictionary to choose order main in some value
        :param offset: Number of elements shows by page
        :param page: Page number
        :param aquarius_url: Url of the aquarius where you want to search. If there is not
            provided take the default
        :return: List of assets that match with the query
        """
        assert page >= 1, f'Invalid page value {page}. Required page >= 1.'
        logger.info(f'Searching asset containing: {text}')
        return [Asset(dictionary=ddo_dict) for ddo_dict in
                self._get_aquarius(aquarius_url).text_search(text, sort, offset, page)['results']]
Beispiel #4
0
    def query(self, query: dict, sort=None, offset=100, page=1, aquarius_url=None) -> []:
        """
        Search an asset in oceanDB using search query.

        :param query: dict with query parameters
            (e.g.) https://github.com/oceanprotocol/aquarius/blob/develop/docs/for_api_users/API.md
        :param sort: Dictionary to choose order main in some value
        :param offset: Number of elements shows by page
        :param page: Page number
        :param aquarius_url: Url of the aquarius where you want to search. If there is not
            provided take the default
        :return: List of assets that match with the query.
        """
        logger.info(f'Searching asset query: {query}')
        aquarius = self._get_aquarius(aquarius_url)
        return [Asset(dictionary=ddo_dict) for ddo_dict in
                aquarius.query_search(query, sort, offset, page)['results']]
Beispiel #5
0
 def update(self, asset: Asset, publisher_wallet: Wallet) -> bool:
     try:
         # publish the new ddo in ocean-db/Aquarius
         ddo_registry = self.ddo_registry()
         web3 = Web3Provider.get_web3()
         tx_id = ddo_registry.update(
             asset.asset_id,
             bytes([1]),
             lzma.compress(web3.toBytes(text=asset.as_text())),
             publisher_wallet
         )
         if not ddo_registry.verify_tx(tx_id):
             raise AssertionError(f'update DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}')
         logger.info('Asset/ddo updated on-chain successfully.')
     except ValueError as ve:
         raise ValueError(f'Invalid value to publish in the metadata: {str(ve)}')
     except Exception as e:
         logger.error(f'Publish asset on-chain failed: {str(e)}')
         raise
Beispiel #6
0
def resolve_asset(did, metadata_cache_uri=None, token_address=None):
    """Resolve a DID to an URL/DDO or later an internal/external DID.

    :param did: the asset id to resolve, this is part of the ocean
        DID did:op:<32 byte value>
    :param metadata_cache_uri: str the url of the metadata store
    :param token_address: str the address of the DataToken smart contract

    :return string: DDO of the resolved DID
    :return None: if the DID cannot be resolved
    """
    assert (metadata_cache_uri or token_address
            ), "One of metadata_cache_uri or token_address is required."

    metadata_url = metadata_cache_uri
    if not metadata_cache_uri and token_address:
        metadata_url = DataToken(token_address).get_metadata_url()

    logger.debug(f"found did {did} -> url={metadata_url}")
    ddo = AquariusProvider.get_aquarius(metadata_url).get_asset_ddo(did)
    return Asset(dictionary=ddo.as_dictionary())
Beispiel #7
0
def test_compute_flow():
    ######
    # setup
    pub_wallet = get_publisher_wallet()
    p_ocean_instance = get_publisher_ocean_instance()
    c_ocean_instance = get_consumer_ocean_instance()
    cons_ocn = c_ocean_instance
    consumer_wallet = get_consumer_wallet()

    ######
    # Publish Assets

    # Dataset with compute service
    sample_ddo_path = get_resource_path('ddo', 'ddo_with_compute_service.json')
    old_ddo = Asset(json_filename=sample_ddo_path)
    metadata = old_ddo.metadata
    metadata['main']['files'][0]['checksum'] = str(uuid.uuid4())
    service = old_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    compute_service = ServiceDescriptor.compute_service_descriptor(
        service.attributes,
        DataServiceProvider.get_url(p_ocean_instance.config))
    block = p_ocean_instance.web3.eth.blockNumber
    compute_ddo = p_ocean_instance.assets.create(
        metadata,
        pub_wallet,
        service_descriptors=[compute_service],
    )
    did = compute_ddo.did

    ddo_reg = p_ocean_instance.assets.ddo_registry()
    log = ddo_reg.get_event_log(ddo_reg.EVENT_METADATA_CREATED, block,
                                compute_ddo.asset_id, 30)
    assert log, f'no ddo created event.'

    ddo = wait_for_ddo(p_ocean_instance, compute_ddo.did)
    assert ddo, f'resolve did {compute_ddo.did} failed.'

    _compute_ddo = p_ocean_instance.assets.resolve(compute_ddo.did)

    # algorithm with download service
    algorithm_ddo_path = get_resource_path('ddo', 'ddo_sample_algorithm.json')
    algo_main = Asset(json_filename=algorithm_ddo_path).metadata['main']
    algo_meta_dict = algo_main['algorithm'].copy()
    algo_meta_dict['url'] = algo_main['files'][0]['url']
    algorithm_meta = AlgorithmMetadata(algo_meta_dict)

    ######
    # Mint tokens for dataset and assign to publisher
    dt = p_ocean_instance.get_data_token(compute_ddo.data_token_address)
    mint_tokens_and_wait(dt, pub_wallet.address, pub_wallet)

    ######
    # Give the consumer some datatokens so they can order the service
    try:
        tx_id = dt.transfer_tokens(consumer_wallet.address, 10, pub_wallet)
        dt.verify_transfer_tx(tx_id, pub_wallet.address,
                              consumer_wallet.address)
    except (AssertionError, Exception) as e:
        print(e)
        raise

    ######
    # Order compute service from the dataset asset
    order_requirements = cons_ocn.assets.order(
        compute_ddo.did,
        consumer_wallet.address,
        service_type=ServiceTypes.CLOUD_COMPUTE)

    ######
    # Start the order on-chain using the `order` requirements from previous step
    service = compute_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    _order_tx_id = cons_ocn.assets.pay_for_service(
        order_requirements.amount, order_requirements.data_token_address,
        compute_ddo.did, service.index,
        '0xF9f2DB837b3db03Be72252fAeD2f6E0b73E428b9', consumer_wallet)

    ######
    job_id = cons_ocn.compute.start(did,
                                    consumer_wallet,
                                    _order_tx_id,
                                    nonce=order_requirements.nonce,
                                    algorithm_meta=algorithm_meta)
    assert job_id, f'expected a job id, got {job_id}'

    status = cons_ocn.compute.status(did, job_id, consumer_wallet)
    print(f'got job status: {status}')
    assert status and status[
        'ok'], f'something not right about the compute job, got status: {status}'

    status = cons_ocn.compute.stop(did, job_id, consumer_wallet)
    print(f'got job status after requesting stop: {status}')
    assert status, f'something not right about the compute job, got status: {status}'
Beispiel #8
0
    def create(
        self,
        metadata: dict,
        publisher_wallet: Wallet,
        service_descriptors: list = None,
        owner_address: str = None,
        data_token_address: str = None,
        provider_uri=None,
        dt_name: str = None,
        dt_symbol: str = None,
        dt_blob: str = None,
        dt_cap: float = None,
    ) -> (Asset, None):
        """
        Register an asset on-chain by creating/deploying a DataToken contract
        and in the Metadata store (Aquarius).

        :param metadata: dict conforming to the Metadata accepted by Ocean Protocol.
        :param publisher_wallet: Wallet of the publisher registering this asset
        :param service_descriptors: list of ServiceDescriptor tuples of length 2.
            The first item must be one of ServiceTypes and the second
            item is a dict of parameters and values required by the service
        :param owner_address: hex str the ethereum address to assign asset ownership to. After
            registering the asset on-chain, the ownership is transferred to this address
        :param data_token_address: hex str the address of the data token smart contract. The new
            asset will be associated with this data token address.
        :param provider_uri: str URL of service provider. This will be used as base to
            construct the serviceEndpoint for the `access` (download) service
        :param dt_name: str name of DataToken if creating a new one
        :param dt_symbol: str symbol of DataToken if creating a new one
        :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text
            to be stored with the ERC20 DataToken contract for any purpose.
        :param dt_cap: float
        :return: DDO instance
        """
        assert isinstance(
            metadata,
            dict), f"Expected metadata of type dict, got {type(metadata)}"
        assert service_descriptors is None or isinstance(
            service_descriptors, list
        ), f"bad type of `service_descriptors` {type(service_descriptors)}"

        # copy metadata so we don't change the original
        metadata_copy = copy.deepcopy(metadata)
        asset_type = metadata_copy["main"]["type"]
        assert asset_type in (
            "dataset",
            "algorithm",
        ), f"Invalid/unsupported asset type {asset_type}"
        if not plecos.is_valid_dict_local(metadata_copy):
            errors = plecos.list_errors_dict_local(metadata_copy)
            msg = f"Metadata has validation errors: {errors}"
            logger.error(msg)
            raise ValueError(msg)

        service_descriptors = service_descriptors or []

        services = self._process_service_descriptors(service_descriptors,
                                                     metadata_copy,
                                                     provider_uri,
                                                     publisher_wallet)

        stype_to_service = {s.type: s for s in services}
        checksum_dict = dict()
        for service in services:
            checksum_dict[str(service.index)] = checksum(service.main)

        # Create a DDO object
        asset = Asset()
        # Adding proof to the ddo.
        asset.add_proof(checksum_dict, publisher_wallet)

        #################
        # DataToken
        address = DTFactory.configured_address(Web3Helper.get_network_name(),
                                               self._config.address_file)
        dtfactory = DTFactory(address)
        if not data_token_address:
            blob = dt_blob or ""
            name = dt_name or metadata["main"]["name"]
            symbol = dt_symbol or name
            # register on-chain
            _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP
            tx_id = dtfactory.createToken(blob,
                                          name,
                                          symbol,
                                          to_base_18(_cap),
                                          from_wallet=publisher_wallet)
            data_token = DataToken(dtfactory.get_token_address(tx_id))
            if not data_token:
                logger.warning("Creating new data token failed.")
                return None

            data_token_address = data_token.address

            logger.info(f"Successfully created data token with address "
                        f"{data_token.address} for new dataset asset.")
            # owner_address is set as minter only if creating new data token. So if
            # `data_token_address` is set `owner_address` has no effect.
            if owner_address:
                data_token.proposeMinter(owner_address,
                                         from_wallet=publisher_wallet)
        else:
            # verify data_token_address
            dt = DataToken(data_token_address)
            minter = dt.contract_concise.minter()
            if not minter:
                raise AssertionError(
                    f"datatoken address {data_token_address} does not seem to be a valid DataToken contract."
                )
            elif minter.lower() != publisher_wallet.address.lower():
                raise AssertionError(
                    f"Minter of datatoken {data_token_address} is not the same as the publisher."
                )
            elif not dtfactory.verify_data_token(data_token_address):
                raise AssertionError(
                    f"datatoken address {data_token_address} is not found in the DTFactory events."
                )

        assert (
            data_token_address
        ), "data_token_address is required for publishing a dataset asset."

        # Generating the did and adding to the ddo.
        did = asset.assign_did(
            f"did:op:{remove_0x_prefix(data_token_address)}")
        logger.debug(f"Using datatoken address as did: {did}")
        # Check if it's already registered first!
        if did in self._get_aquarius().list_assets():
            raise OceanDIDAlreadyExist(
                f"Asset id {did} is already registered to another asset.")

        md_service = stype_to_service[ServiceTypes.METADATA]
        ddo_service_endpoint = md_service.service_endpoint
        if "{did}" in ddo_service_endpoint:
            ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did)
            md_service.set_service_endpoint(ddo_service_endpoint)

        # Populate the ddo services
        asset.add_service(md_service)
        access_service = stype_to_service.get(ServiceTypes.ASSET_ACCESS, None)
        compute_service = stype_to_service.get(ServiceTypes.CLOUD_COMPUTE,
                                               None)

        if access_service:
            asset.add_service(access_service)
        if compute_service:
            asset.add_service(compute_service)

        asset.proof["signatureValue"] = Web3Helper.sign_hash(
            add_ethereum_prefix_and_hash_msg(asset.asset_id), publisher_wallet)

        # Add public key and authentication
        asset.add_public_key(did, publisher_wallet.address)

        asset.add_authentication(did, PUBLIC_KEY_TYPE_RSA)

        # Setup metadata service
        # First compute files_encrypted
        assert metadata_copy["main"][
            "files"], "files is required in the metadata main attributes."
        logger.debug("Encrypting content urls in the metadata.")

        publisher_signature = self._data_provider.sign_message(
            publisher_wallet, asset.asset_id, self._config)
        _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(
            provider_uri)
        files_encrypted = self._data_provider.encrypt_files_dict(
            metadata_copy["main"]["files"],
            encrypt_endpoint,
            asset.asset_id,
            publisher_wallet.address,
            publisher_signature,
        )

        # only assign if the encryption worked
        if files_encrypted:
            logger.debug(
                f"Content urls encrypted successfully {files_encrypted}")
            index = 0
            for file in metadata_copy["main"]["files"]:
                file["index"] = index
                index = index + 1
                del file["url"]
            metadata_copy["encryptedFiles"] = files_encrypted
        else:
            raise AssertionError("Encrypting the files failed.")

        logger.debug(f"Generated asset and services, DID is {asset.did},"
                     f" metadata service @{ddo_service_endpoint}.")

        # Set datatoken address in the asset
        asset.data_token_address = data_token_address

        try:
            # publish the new ddo in ocean-db/Aquarius
            ddo_registry = self.ddo_registry()
            web3 = Web3Provider.get_web3()
            tx_id = ddo_registry.create(
                asset.asset_id,
                bytes([1]),
                lzma.compress(web3.toBytes(text=asset.as_text())),
                publisher_wallet,
            )
            if not ddo_registry.verify_tx(tx_id):
                raise AssertionError(
                    f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}"
                )
            logger.info("Asset/ddo published on-chain successfully.")
        except ValueError as ve:
            raise ValueError(
                f"Invalid value to publish in the metadata: {str(ve)}")
        except Exception as e:
            logger.error(f"Publish asset on-chain failed: {str(e)}")
            raise

        return asset
Beispiel #9
0
def get_registered_ddo(client, wallet, metadata, service_descriptor):
    aqua = Aquarius('http://localhost:5000')
    ddo_service_endpoint = aqua.get_service_endpoint()

    metadata_store_url = json.dumps({'t': 1, 'url': ddo_service_endpoint})
    # Create new data token contract
    addresses = get_contracts_addresses(get_address_file())
    dt_address = addresses.get(DTFactory.CONTRACT_NAME)
    if dt_address:
        factory_contract = DTFactory(dt_address)
    else:
        factory_contract = new_factory_contract()

    ddo_contract_address = addresses.get(MetadataContract.CONTRACT_NAME)
    metadata_contract = MetadataContract(ddo_contract_address)

    tx_id = factory_contract.createToken(metadata_store_url, 'DataToken1',
                                         'DT1', to_base_18(1000000), wallet)
    dt_contract = DataToken(factory_contract.get_token_address(tx_id))
    if not dt_contract:
        raise AssertionError('Creation of data token contract failed.')

    ddo = Asset()
    ddo.data_token_address = dt_contract.address

    metadata_service_desc = ServiceDescriptor.metadata_service_descriptor(
        metadata, ddo_service_endpoint)
    service_descriptors = list([
        ServiceDescriptor.authorization_service_descriptor(
            'http://localhost:12001')
    ])
    service_descriptors.append(service_descriptor)
    service_type = service_descriptor[0]

    service_descriptors = [metadata_service_desc] + service_descriptors

    services = ServiceFactory.build_services(service_descriptors)
    checksums = dict()
    for service in services:
        checksums[str(service.index)] = checksum(service.main)

    # Adding proof to the ddo.
    ddo.add_proof(checksums, wallet)

    did = ddo.assign_did(f'did:op:{remove_0x_prefix(ddo.data_token_address)}')
    ddo_service_endpoint.replace('{did}', did)
    services[0].set_service_endpoint(ddo_service_endpoint)

    stype_to_service = {s.type: s for s in services}
    _service = stype_to_service[service_type]

    for service in services:
        ddo.add_service(service)

    # ddo.proof['signatureValue'] = ocean_lib.sign_hash(
    #     did_to_id_bytes(did), account)

    ddo.add_public_key(did, wallet.address)

    ddo.add_authentication(did, PUBLIC_KEY_TYPE_RSA)

    # if not plecos.is_valid_dict_local(ddo.metadata):
    #     print(f'invalid metadata: {plecos.validate_dict_local(ddo.metadata)}')
    #     assert False, f'invalid metadata: {plecos.validate_dict_local(ddo.metadata)}'

    files_list_str = json.dumps(metadata['main']['files'])
    encrypted_files = encrypt_document(client, did, files_list_str, wallet)
    # encrypted_files = do_encrypt(files_list_str, provider_wallet)

    # only assign if the encryption worked
    if encrypted_files:
        index = 0
        for file in metadata['main']['files']:
            file['index'] = index
            index = index + 1
            del file['url']
        metadata['encryptedFiles'] = encrypted_files

    web3 = Web3Provider.get_web3()
    block = web3.eth.blockNumber
    try:
        data = lzma.compress(web3.toBytes(text=ddo.as_text()))
        tx_id = metadata_contract.create(ddo.asset_id, bytes([1]), data,
                                         wallet)
        if not metadata_contract.verify_tx(tx_id):
            raise AssertionError(
                f'create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}'
            )
    except Exception as e:
        print(f'error publishing ddo {ddo.did} in Aquarius: {e}')
        raise

    log = metadata_contract.get_event_log(
        metadata_contract.EVENT_METADATA_CREATED, block, ddo.asset_id, 30)
    assert log, f'no ddo created event.'

    ddo = wait_for_ddo(aqua, ddo.did)
    assert ddo, f'resolve did {ddo.did} failed.'

    return ddo
Beispiel #10
0
def test_ddo_on_chain():
    config = ConfigProvider.get_config()
    ddo_address = get_contracts_addresses(
        "ganache", config)[MetadataContract.CONTRACT_NAME]
    dtfactory_address = get_contracts_addresses(
        "ganache", config)[DTFactory.CONTRACT_NAME]
    ddo_registry = MetadataContract(ddo_address)
    wallet = get_publisher_wallet()
    web3 = Web3Provider.get_web3()

    dtfactory = DTFactory(dtfactory_address)
    tx_id = dtfactory.createToken("", "dt1", "dt1", 1000, wallet)
    dt = DataToken(dtfactory.get_token_address(tx_id))

    # test create ddo
    asset = get_ddo_sample(dt.address)
    old_name = asset.metadata["main"]["name"]
    txid = ddo_registry.create(
        asset.asset_id, b"", lzma.compress(web3.toBytes(text=asset.as_text())),
        wallet)
    assert ddo_registry.verify_tx(txid), f"create ddo failed: txid={txid}"
    logs = ddo_registry.event_MetadataCreated.processReceipt(
        ddo_registry.get_tx_receipt(txid))
    assert logs, f"no logs found for create ddo tx {txid}"
    log = logs[0]
    assert add_0x_prefix(log.args.dataToken) == asset.asset_id
    # read back the asset ddo from the event log
    ddo_text = web3.toText(lzma.decompress(log.args.data))
    assert ddo_text == asset.as_text(), "ddo text does not match original."

    _asset = Asset(json_text=ddo_text)
    assert _asset.did == asset.did, "did does not match."
    name = _asset.metadata["main"]["name"]
    assert name == old_name, f"name does not match: {name} != {old_name}"

    # test_update ddo
    asset.metadata["main"]["name"] = "updated name for test"
    txid = ddo_registry.update(
        asset.asset_id, b"", lzma.compress(web3.toBytes(text=asset.as_text())),
        wallet)
    assert ddo_registry.verify_tx(txid), f"update ddo failed: txid={txid}"
    logs = ddo_registry.event_MetadataUpdated.processReceipt(
        ddo_registry.get_tx_receipt(txid))
    assert logs, f"no logs found for update ddo tx {txid}"
    log = logs[0]
    assert add_0x_prefix(log.args.dataToken) == asset.asset_id
    # read back the asset ddo from the event log
    ddo_text = web3.toText(lzma.decompress(log.args.data))
    assert ddo_text == asset.as_text(), "ddo text does not match original."
    _asset = Asset(json_text=ddo_text)
    assert (_asset.metadata["main"]["name"] == "updated name for test"
            ), "name does not seem to be updated."
    assert DataToken(asset.asset_id).contract_concise.isMinter(wallet.address)

    # test update fails from wallet other than the original publisher
    bob = get_consumer_wallet()
    try:
        txid = ddo_registry.update(
            asset.asset_id, b"",
            lzma.compress(web3.toBytes(text=asset.as_text())), bob)
        assert ddo_registry.verify_tx(
            txid) is False, f"update ddo failed: txid={txid}"
        logs = ddo_registry.event_MetadataUpdated.processReceipt(
            ddo_registry.get_tx_receipt(txid))
        assert (
            not logs
        ), f"should be no logs for MetadataUpdated, but seems there are some logs: tx {txid}, logs {logs}"
    except ValueError:
        print("as expected, only owner can update a published ddo.")

    # test ddoOwner
    assert DataToken(asset.asset_id).contract_concise.isMinter(
        wallet.address
    ), (f"ddo owner does not match the expected publisher address {wallet.address}, "
        f"owner is {DataToken(asset.asset_id).contract_concise.minter(wallet.address)}"
        )
Beispiel #11
0
def get_algorithm_meta():
    algorithm_ddo_path = get_resource_path("ddo", "ddo_algorithm.json")
    algo_main = Asset(json_filename=algorithm_ddo_path).metadata["main"]
    algo_meta_dict = algo_main["algorithm"].copy()
    algo_meta_dict["url"] = algo_main["files"][0]["url"]
    return AlgorithmMetadata(algo_meta_dict)
Beispiel #12
0
def get_sample_ddo_with_compute_service() -> Asset:
    return Asset(json_filename=get_resource_path(
        "ddo", "ddo_with_compute_service.json"))
Beispiel #13
0
def get_sample_ddo() -> Asset:
    return Asset(json_filename=get_resource_path("ddo", "ddo_sa_sample.json"))
Beispiel #14
0
def get_ddo_sample() -> Asset:
    return Asset(json_filename=get_resource_path('ddo', 'ddo_sa_sample.json'))