Ejemplo n.º 1
0
def test_provider_address_with_url():
    """Tests that a URL version of provider address exists on the DataServiceProvider."""
    p_ocean_instance = get_publisher_ocean_instance()
    provider_address = DataSP.get_provider_address(
        DataSP.get_url(p_ocean_instance.config))
    assert provider_address, "Failed to get provider address."
    assert DataSP.get_provider_address("not a url") is None
Ejemplo n.º 2
0
def get_registered_ddo_with_compute_service(
    ocean_instance,
    wallet,
    provider_uri=None,
    trusted_algorithms=None,
    trusted_algorithm_publishers=None,
):
    old_ddo = get_sample_ddo_with_compute_service()
    metadata = old_ddo.metadata
    metadata["main"]["files"][0]["checksum"] = str(uuid.uuid4())
    service = old_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    compute_attributes = ocean_instance.compute.create_compute_service_attributes(
        service.attributes["main"]["timeout"],
        service.attributes["main"]["creator"],
        service.attributes["main"]["datePublished"],
        service.attributes["main"]["provider"],
        privacy_attributes=ocean_instance.compute.build_service_privacy_attributes(
            trusted_algorithms=trusted_algorithms,
            trusted_algorithm_publishers=trusted_algorithm_publishers,
            metadata_cache_uri=ocean_instance.config.metadata_cache_uri,
            allow_raw_algorithm=True,
            allow_all_published_algorithms=not bool(trusted_algorithms),
        ),
    )
    compute_service = Service(
        service_endpoint=DataServiceProvider.get_url(ocean_instance.config),
        service_type=ServiceTypes.CLOUD_COMPUTE,
        attributes=compute_attributes,
    )

    return get_registered_ddo(
        ocean_instance, metadata, wallet, compute_service, provider_uri=provider_uri
    )
Ejemplo n.º 3
0
    def check_output_dict(
        output_def: Optional[Dict[str, Any]],
        consumer_address: str,
        data_provider: DataServiceProvider,
        config: Config,
    ) -> Dict[str, Any]:
        """
        Validate the `output_def` dict and fills in defaults for missing values.

        :param output_def: dict
        :param consumer_address: hex str the consumer ethereum address
        :param data_provider:  DataServiceProvider class or similar interface
        :param config: Config instance
        :return: dict a valid `output_def` object
        """
        default_output_def = {
            "nodeUri": config.network_url,
            "brizoUri": data_provider.get_url(config),
            "brizoAddress": config.provider_address,
            "metadata": dict(),
            "metadataUri": config.metadata_cache_uri,
            "owner": consumer_address,
            "publishOutput": 0,
            "publishAlgorithmLog": 0,
            "whitelist": [],
        }

        output_def = output_def if isinstance(output_def, dict) else dict()
        default_output_def.update(output_def)
        return default_output_def
def test_build_specific_endpoints(config):
    """Tests that a specific list of agreed endpoints is supported on the DataServiceProvider."""
    endpoints = TEST_SERVICE_ENDPOINTS

    def get_service_endpoints(_provider_uri=None):
        return TEST_SERVICE_ENDPOINTS.copy()

    original_func = DataSP.get_service_endpoints
    DataSP.get_service_endpoints = get_service_endpoints

    provider_uri = DataSP.get_url(config)
    base_uri = DataSP.get_root_uri(config.provider_url)
    assert DataSP.build_download_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["download"][1])
    assert DataSP.build_initialize_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["initialize"][1])
    assert DataSP.build_encrypt_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["encrypt"][1])
    assert DataSP.build_fileinfo(provider_uri)[1] == urljoin(
        base_uri, endpoints["fileinfo"][1])
    assert DataSP.build_compute_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["computeStatus"][1])
    assert DataSP.build_compute_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["computeStart"][1])
    assert DataSP.build_compute_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["computeStop"][1])
    assert DataSP.build_compute_endpoint(provider_uri)[1] == urljoin(
        base_uri, endpoints["computeDelete"][1])
    assert DataSP.build_asset_urls(provider_uri)[1] == urljoin(
        base_uri, endpoints["asset_urls"][1])

    DataSP.get_service_endpoints = original_func
Ejemplo n.º 5
0
 def create_compute_service(attributes, provider_uri=None):
     service_endpoint = provider_uri or DataServiceProvider.get_url(
         ConfigProvider.get_config()
     )
     return ServiceDescriptor.compute_service_descriptor(
         attributes, service_endpoint
     )
def test_expose_endpoints(config):
    """Tests that the DataServiceProvider exposes all service endpoints."""
    service_endpoints = TEST_SERVICE_ENDPOINTS
    provider_uri = DataSP.get_url(config)
    valid_endpoints = DataSP.get_service_endpoints(provider_uri)
    assert len(valid_endpoints) == len(service_endpoints)
    assert [
        valid_endpoints[key]
        for key in set(service_endpoints) & set(valid_endpoints)
    ]
Ejemplo n.º 7
0
def get_access_service_descriptor(
    ocean_instance, address, date_created, provider_uri=None, timeout=3600
):
    if not provider_uri:
        provider_uri = DataServiceProvider.get_url(ocean_instance.config)

    return ServiceDescriptor.access_service_descriptor(
        ocean_instance.assets.build_access_service(date_created, 1.0, address, timeout),
        DataServiceProvider.build_download_endpoint(provider_uri)[1],
    )
Ejemplo n.º 8
0
def get_registered_ddo_with_compute_service(ocean_instance, wallet, provider_uri=None):
    old_ddo = get_sample_ddo_with_compute_service()
    metadata = old_ddo.metadata
    metadata["main"]["files"][0]["checksum"] = str(uuid.uuid4())
    service = old_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    compute_service = ServiceDescriptor.compute_service_descriptor(
        service.attributes, DataServiceProvider.get_url(ocean_instance.config)
    )

    return get_registered_ddo(
        ocean_instance, metadata, wallet, compute_service, provider_uri=provider_uri
    )
Ejemplo n.º 9
0
    def create_access_service(attributes, provider_uri=None):
        """Publish an asset with an `Access` service according to the supplied attributes.

        :param attributes: attributes of the access service, dict
        :param provider_uri: str URL of service provider. This will be used as base to
            construct the serviceEndpoint for the `access` (download) service
        :return: Service instance or None
        """
        service_endpoint = provider_uri or DataServiceProvider.get_url(
            ConfigProvider.get_config())
        service = ServiceDescriptor.access_service_descriptor(
            attributes, service_endpoint)
        return service
Ejemplo n.º 10
0
def create_asset(ocean, wallet, service_attributes, metadata, token_address):

    service_endpoint = DataServiceProvider.get_url(ocean.config)
    download_service = ServiceDescriptor.access_service_descriptor(
        service_attributes, service_endpoint)
    compute_attributes = trying_compute(wallet)
    computer_service = ServiceDescriptor.compute_service_descriptor(
        compute_attributes, service_endpoint)
    print(download_service)
    print(computer_service)
    print("kjashdfkhaskdfhkjhasdjkfhkjasdhfjkasdhjkfhajkdshf")
    asset = ocean.assets.create(metadata,
                                wallet,
                                service_descriptors=[download_service],
                                data_token_address=token_address)
    return asset
Ejemplo n.º 11
0
def get_registered_ddo(ocean_instance, wallet: Wallet):
    metadata = get_metadata()
    metadata["main"]["files"][0]["checksum"] = str(uuid.uuid4())
    ServiceDescriptor.access_service_descriptor(
        ocean_instance.assets._build_access_service(metadata, to_base_18(1),
                                                    wallet),
        DataServiceProvider.get_url(ocean_instance.config),
    )

    block = ocean_instance.web3.eth.blockNumber
    asset = ocean_instance.assets.create(metadata, wallet)
    ddo_reg = ocean_instance.assets.ddo_registry()
    log = ddo_reg.get_event_log(ddo_reg.EVENT_METADATA_CREATED, block,
                                asset.asset_id, 30)
    assert log, "no ddo created event."

    ddo = wait_for_ddo(ocean_instance, asset.did)
    assert ddo, f"resolve did {asset.did} failed."

    return asset
Ejemplo n.º 12
0
 def get_url(config):
     return DataServiceProvider.get_url(config)
def test_provider_address(config):
    """Tests that a provider address exists on the DataServiceProvider."""
    provider_uri = DataSP.get_url(config)
    provider_address = DataSP.get_provider_address(provider_uri)
    assert provider_address, "Failed to get provider address."
Ejemplo n.º 14
0
def test_provider_address_with_url():
    p_ocean_instance = get_publisher_ocean_instance()
    provider_address = DataServiceProvider.get_provider_address(
        DataServiceProvider.get_url(p_ocean_instance.config))
    assert provider_address, "Failed to get provider address."
Ejemplo n.º 15
0
token_address = data_token.address


date_created = "2020-12-01T10:55:11Z"
service_attributes = {
        "main": {
            "name": "dataAssetAccessServiceAgreement",
            "creator": wallet.address,
            "timeout": 3600 * 24,
            "datePublished": date_created,
            "cost": 1.0, # <don't change, this is obsolete>
        }
    }

#service_endpoint = DataServiceProvider.get_url(ocean.config)
service_endpoint = DataServiceProvider.get_url(config)

download_service = ServiceDescriptor.access_service_descriptor(service_attributes, service_endpoint)


metadata =  {
    "main": {
        "type": "dataset", "name": "S1Seven", "author": "Hannes", 
        "license": "CC0: Public Domain", "dateCreated": date_created, 
        "files": [
            { "index": 0, "contentType": "application/zip", "url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/training.zip"},
            { "index": 1, "contentType": "text/text", "url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/monkey_labels.txt"},
            { "index": 2, "contentType": "application/zip", "url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/validation.zip"}]}
}

Ejemplo n.º 16
0
    def create(
        self,
        metadata: dict,
        publisher_wallet: Wallet,
        services: Optional[list] = None,
        owner_address: Optional[str] = None,
        data_token_address: Optional[str] = None,
        provider_uri: Optional[str] = None,
        dt_name: Optional[str] = None,
        dt_symbol: Optional[str] = None,
        dt_blob: Optional[str] = None,
        dt_cap: Optional[int] = None,
        encrypt: Optional[bool] = False,
    ) -> Optional[V3Asset]:
        """Register an asset on-chain.

        Creating/deploying a DataToken contract and in the Metadata store (Aquarius).

        :param metadata: dict conforming to the Metadata accepted by Ocean Protocol.
        :param publisher_wallet: Wallet of the publisher registering this asset
        :param services: list of Service objects.
        :param owner_address: hex str the ethereum address to assign asset ownership to. After
            registering the asset on-chain, the ownership is transferred to this address
        :param data_token_address: hex str the address of the data token smart contract. The new
            asset will be associated with this data token address.
        :param provider_uri: str URL of service provider. This will be used as base to
            construct the serviceEndpoint for the `access` (download) service
        :param dt_name: str name of DataToken if creating a new one
        :param dt_symbol: str symbol of DataToken if creating a new one
        :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text
            to be stored with the ERC20 DataToken contract for any purpose.
        :param dt_cap: int amount of DataTokens to mint, denoted in wei
        :return: DDO instance
        """
        assert isinstance(
            metadata, dict
        ), f"Expected metadata of type dict, got {type(metadata)}"

        # copy metadata so we don't change the original
        metadata_copy = copy.deepcopy(metadata)
        asset_type = metadata_copy["main"]["type"]
        assert asset_type in (
            "dataset",
            "algorithm",
        ), f"Invalid/unsupported asset type {asset_type}"

        validation_result, validation_errors = self.validate(metadata)
        if not validation_result:
            msg = f"Metadata has validation errors: {validation_errors}"
            logger.error(msg)
            raise ValueError(msg)

        urls = [item["url"] for item in metadata["main"]["files"]]
        if not provider_uri:
            provider_uri = DataServiceProvider.get_url(self._config)
        for url in urls:
            if not DataServiceProvider.check_single_file_info(url, provider_uri):
                msg = f"The URL of this service can not be accessed: {url}."
                logger.error(msg)
                raise ValueError(msg)

        services = services or []
        services = self._add_defaults(
            services, metadata_copy, provider_uri, publisher_wallet
        )

        checksum_dict = dict()
        for service in services:
            checksum_dict[str(service.index)] = checksum(service.main)

        # Create a DDO object
        asset = V3Asset()
        # Adding proof to the ddo.
        asset.add_proof(checksum_dict, publisher_wallet)

        #################
        # DataToken
        address = DTFactory.configured_address(
            get_network_name(web3=self._web3), self._config.address_file
        )
        dtfactory = DTFactory(self._web3, address)
        if not data_token_address:
            blob = dt_blob or ""
            name = dt_name or metadata["main"]["name"]
            symbol = dt_symbol or name
            # register on-chain
            _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP
            tx_id = dtfactory.createToken(
                blob, name, symbol, _cap, from_wallet=publisher_wallet
            )
            data_token = DataToken(self._web3, dtfactory.get_token_address(tx_id))
            if not data_token:
                logger.warning("Creating new data token failed.")
                return None

            data_token_address = data_token.address

            logger.info(
                f"Successfully created data token with address "
                f"{data_token.address} for new dataset asset."
            )
            # owner_address is set as minter only if creating new data token. So if
            # `data_token_address` is set `owner_address` has no effect.
            if owner_address:
                data_token.proposeMinter(owner_address, from_wallet=publisher_wallet)
        else:
            if not dtfactory.verify_data_token(data_token_address):
                raise ContractNotFound(
                    f"datatoken address {data_token_address} is not found in the DTFactory events."
                )
            # verify data_token_address
            dt = DataToken(self._web3, data_token_address)
            minter = dt.contract.caller.minter()
            if not minter:
                raise AssertionError(
                    f"datatoken address {data_token_address} does not seem to be a valid DataToken contract."
                )
            elif minter.lower() != publisher_wallet.address.lower():
                raise AssertionError(
                    f"Minter of datatoken {data_token_address} is not the same as the publisher."
                )

        assert (
            data_token_address
        ), "data_token_address is required for publishing a dataset asset."

        # Generating the did and adding to the ddo.
        did = f"did:op:{remove_0x_prefix(data_token_address)}"
        asset.did = did
        logger.debug(f"Using datatoken address as did: {did}")
        # Check if it's already registered first!
        if self._get_aquarius().ddo_exists(did):
            raise AquariusError(
                f"Asset id {did} is already registered to another asset."
            )

        for service in services:
            if service.type == ServiceTypes.METADATA:
                ddo_service_endpoint = service.service_endpoint
                if "{did}" in ddo_service_endpoint:
                    ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did)
                    service.service_endpoint = ddo_service_endpoint

            asset.add_service(service)

        asset.proof["signatureValue"] = sign_hash(
            encode_defunct(text=asset.asset_id), publisher_wallet
        )

        # Setup metadata service
        # First compute files_encrypted
        assert metadata_copy["main"][
            "files"
        ], "files is required in the metadata main attributes."
        logger.debug("Encrypting content urls in the metadata.")

        publisher_signature = self._data_provider.sign_message(
            publisher_wallet, asset.asset_id, provider_uri=provider_uri
        )
        _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(provider_uri)
        files_encrypted = self._data_provider.encrypt_files_dict(
            metadata_copy["main"]["files"],
            encrypt_endpoint,
            asset.asset_id,
            publisher_wallet.address,
            publisher_signature,
        )

        # only assign if the encryption worked
        if files_encrypted:
            logger.debug(f"Content urls encrypted successfully {files_encrypted}")
            index = 0
            for file in metadata_copy["main"]["files"]:
                file["index"] = index
                index = index + 1
                del file["url"]
            metadata_copy["encryptedFiles"] = files_encrypted
        else:
            raise AssertionError("Encrypting the files failed.")

        logger.debug(
            f"Generated asset and services, DID is {asset.did},"
            f" metadata service @{ddo_service_endpoint}."
        )

        # Set datatoken address in the asset
        asset.data_token_address = data_token_address
        flags, asset_contents = self._build_asset_contents(asset, encrypt)

        try:
            # publish the new ddo in ocean-db/Aquarius
            ddo_registry = self.ddo_registry()
            tx_id = ddo_registry.create(
                asset.asset_id, flags, asset_contents, publisher_wallet
            )
            if not ddo_registry.verify_tx(tx_id):
                raise VerifyTxFailed(
                    f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}"
                )
            logger.info("Asset/ddo published on-chain successfully.")
        except ValueError as ve:
            raise ValueError(f"Invalid value to publish in the metadata: {str(ve)}")
        except Exception as e:
            logger.error(f"Publish asset on-chain failed: {str(e)}")
            raise

        return asset
Ejemplo n.º 17
0
def test_compute_flow():
    ######
    # setup
    pub_wallet = get_publisher_wallet()
    p_ocean_instance = get_publisher_ocean_instance()
    c_ocean_instance = get_consumer_ocean_instance()
    cons_ocn = c_ocean_instance
    consumer_wallet = get_consumer_wallet()

    ######
    # Publish Assets

    # Dataset with compute service
    sample_ddo_path = get_resource_path('ddo', 'ddo_with_compute_service.json')
    old_ddo = Asset(json_filename=sample_ddo_path)
    metadata = old_ddo.metadata
    metadata['main']['files'][0]['checksum'] = str(uuid.uuid4())
    service = old_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    compute_service = ServiceDescriptor.compute_service_descriptor(
        service.attributes,
        DataServiceProvider.get_url(p_ocean_instance.config))
    block = p_ocean_instance.web3.eth.blockNumber
    compute_ddo = p_ocean_instance.assets.create(
        metadata,
        pub_wallet,
        service_descriptors=[compute_service],
    )
    did = compute_ddo.did

    ddo_reg = p_ocean_instance.assets.ddo_registry()
    log = ddo_reg.get_event_log(ddo_reg.EVENT_METADATA_CREATED, block,
                                compute_ddo.asset_id, 30)
    assert log, f'no ddo created event.'

    ddo = wait_for_ddo(p_ocean_instance, compute_ddo.did)
    assert ddo, f'resolve did {compute_ddo.did} failed.'

    _compute_ddo = p_ocean_instance.assets.resolve(compute_ddo.did)

    # algorithm with download service
    algorithm_ddo_path = get_resource_path('ddo', 'ddo_sample_algorithm.json')
    algo_main = Asset(json_filename=algorithm_ddo_path).metadata['main']
    algo_meta_dict = algo_main['algorithm'].copy()
    algo_meta_dict['url'] = algo_main['files'][0]['url']
    algorithm_meta = AlgorithmMetadata(algo_meta_dict)

    ######
    # Mint tokens for dataset and assign to publisher
    dt = p_ocean_instance.get_data_token(compute_ddo.data_token_address)
    mint_tokens_and_wait(dt, pub_wallet.address, pub_wallet)

    ######
    # Give the consumer some datatokens so they can order the service
    try:
        tx_id = dt.transfer_tokens(consumer_wallet.address, 10, pub_wallet)
        dt.verify_transfer_tx(tx_id, pub_wallet.address,
                              consumer_wallet.address)
    except (AssertionError, Exception) as e:
        print(e)
        raise

    ######
    # Order compute service from the dataset asset
    order_requirements = cons_ocn.assets.order(
        compute_ddo.did,
        consumer_wallet.address,
        service_type=ServiceTypes.CLOUD_COMPUTE)

    ######
    # Start the order on-chain using the `order` requirements from previous step
    service = compute_ddo.get_service(ServiceTypes.CLOUD_COMPUTE)
    _order_tx_id = cons_ocn.assets.pay_for_service(
        order_requirements.amount, order_requirements.data_token_address,
        compute_ddo.did, service.index,
        '0xF9f2DB837b3db03Be72252fAeD2f6E0b73E428b9', consumer_wallet)

    ######
    job_id = cons_ocn.compute.start(did,
                                    consumer_wallet,
                                    _order_tx_id,
                                    nonce=order_requirements.nonce,
                                    algorithm_meta=algorithm_meta)
    assert job_id, f'expected a job id, got {job_id}'

    status = cons_ocn.compute.status(did, job_id, consumer_wallet)
    print(f'got job status: {status}')
    assert status and status[
        'ok'], f'something not right about the compute job, got status: {status}'

    status = cons_ocn.compute.stop(did, job_id, consumer_wallet)
    print(f'got job status after requesting stop: {status}')
    assert status, f'something not right about the compute job, got status: {status}'
Ejemplo n.º 18
0
def publish_data(
    ocean,
    private_key,
    files,
    name,
    symbol,
    author,
    data_license='CC0: Public Domain',
):
    """Publish a dataset on the Ocean marketplace.
    Publish metadata and service attributes on-chain.
    The service urls will be encrypted before going on-chain.
    They're only decrypted for datatoken owners upon consume.
    Args:
        ocean ():
        private_key (str):
        files (list):
        name (str):
        symbol (str):
        author (str):
        data_license (str): The license for the data,
            `CC0: Public Domain` by default.
    Returns:
        ()
    """
    wallet = Wallet(ocean.web3, private_key, ocean.config.block_confirmations)
    assert wallet.web3.eth.get_balance(wallet.address) > 0, 'need ETH'
    print('Proceeding with wallet:', wallet.address)
    data_token = ocean.create_data_token(name,
                                         symbol,
                                         wallet,
                                         blob=ocean.config.metadata_cache_uri)
    # return data_token
    token_address = data_token.address
    print('Created token:', token_address)
    date_created = datetime.now().isoformat()
    metadata = {
        'main': {
            'type': 'dataset',
            'name': name,
            'author': author,
            'license': data_license,
            'dateCreated': date_created,
            'files': files,
        }
    }
    service_attributes = {
        'main': {
            'name': 'dataAssetAccessServiceAgreement',
            'creator': wallet.address,
            'timeout': 3600 * 24,
            'datePublished': date_created,
            'cost': 1.0,  # <don't change, this is obsolete>
        }
    }
    service_endpoint = DataServiceProvider.get_url(ocean.config)
    # FIXME:
    download_service = Service(
        service_endpoint=service_endpoint,
        service_type=ServiceTypes.ASSET_ACCESS,
        attributes=service_attributes,
    )
    assert wallet.web3.eth.get_balance(wallet.address) > 0, 'need ETH'
    asset = ocean.assets.create(
        metadata,
        wallet,
        # services=[download_service],
        # service_descriptors=[],
        data_token_address=token_address)
    print('Created asset:', asset.data_token_address)
    assert token_address == asset.data_token_address
    return data_token, asset