Пример #1
0
def test_get_network_name(web3):
    assert get_network_name(1) == "mainnet"
    assert get_network_name(4) == "rinkeby"
    assert get_network_name(3) == "ropsten"
    assert get_network_name(137) == "polygon"
    assert get_network_name(1337) == "ganache"
    assert get_network_name(web3=web3) == "ganache"
    assert get_network_name(-1) == "ganache"
    assert get_network_name() == "ganache"
Пример #2
0
def get_bfactory_address(
    address_file: str, network: Optional[str] = None, web3: Optional[Web3] = None
) -> str:
    """Returns the BFactory address for given network or web3 instance
    Requires either network name or web3 instance.
    """
    return BFactory.configured_address(
        network or get_network_name(web3=web3), address_file
    )
Пример #3
0
def get_ocean_token_address(
    address_file: str, network: Optional[str] = None, web3: Optional[Web3] = None
) -> str:
    """Returns the Ocean token address for given network or web3 instance
    Requires either network name or web3 instance.
    """
    addresses = get_contracts_addresses(
        address_file, network or get_network_name(web3=web3)
    )
    return addresses.get("Ocean") if addresses else None
Пример #4
0
    def get_dtfactory(self, dtfactory_address: str = "") -> DTFactory:
        """
        :param dtfactory_address: contract address, str

        :return: `DTFactory` instance
        """
        dtf_address = dtfactory_address or DTFactory.configured_address(
            get_network_name(web3=self.web3), self.config.address_file
        )
        return DTFactory(self.web3, dtf_address)
Пример #5
0
def get_ocean_token_address(network=None):
    addresses = get_contracts_addresses(network or get_network_name(),
                                        ConfigProvider.get_config())
    return addresses.get("Ocean") if addresses else None
Пример #6
0
def get_bfactory_address(network=None):
    return BFactory.configured_address(
        network or get_network_name(),
        ConfigProvider.get_config().address_file)
Пример #7
0
    def create(
        self,
        metadata: dict,
        publisher_wallet: Wallet,
        services: Optional[list] = None,
        owner_address: Optional[str] = None,
        data_token_address: Optional[str] = None,
        provider_uri: Optional[str] = None,
        dt_name: Optional[str] = None,
        dt_symbol: Optional[str] = None,
        dt_blob: Optional[str] = None,
        dt_cap: Optional[int] = None,
        encrypt: Optional[bool] = False,
    ) -> Optional[V3Asset]:
        """Register an asset on-chain.

        Creating/deploying a DataToken contract and in the Metadata store (Aquarius).

        :param metadata: dict conforming to the Metadata accepted by Ocean Protocol.
        :param publisher_wallet: Wallet of the publisher registering this asset
        :param services: list of Service objects.
        :param owner_address: hex str the ethereum address to assign asset ownership to. After
            registering the asset on-chain, the ownership is transferred to this address
        :param data_token_address: hex str the address of the data token smart contract. The new
            asset will be associated with this data token address.
        :param provider_uri: str URL of service provider. This will be used as base to
            construct the serviceEndpoint for the `access` (download) service
        :param dt_name: str name of DataToken if creating a new one
        :param dt_symbol: str symbol of DataToken if creating a new one
        :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text
            to be stored with the ERC20 DataToken contract for any purpose.
        :param dt_cap: int amount of DataTokens to mint, denoted in wei
        :return: DDO instance
        """
        assert isinstance(
            metadata, dict
        ), f"Expected metadata of type dict, got {type(metadata)}"

        # copy metadata so we don't change the original
        metadata_copy = copy.deepcopy(metadata)
        asset_type = metadata_copy["main"]["type"]
        assert asset_type in (
            "dataset",
            "algorithm",
        ), f"Invalid/unsupported asset type {asset_type}"

        validation_result, validation_errors = self.validate(metadata)
        if not validation_result:
            msg = f"Metadata has validation errors: {validation_errors}"
            logger.error(msg)
            raise ValueError(msg)

        urls = [item["url"] for item in metadata["main"]["files"]]
        if not provider_uri:
            provider_uri = DataServiceProvider.get_url(self._config)
        for url in urls:
            if not DataServiceProvider.check_single_file_info(url, provider_uri):
                msg = f"The URL of this service can not be accessed: {url}."
                logger.error(msg)
                raise ValueError(msg)

        services = services or []
        services = self._add_defaults(
            services, metadata_copy, provider_uri, publisher_wallet
        )

        checksum_dict = dict()
        for service in services:
            checksum_dict[str(service.index)] = checksum(service.main)

        # Create a DDO object
        asset = V3Asset()
        # Adding proof to the ddo.
        asset.add_proof(checksum_dict, publisher_wallet)

        #################
        # DataToken
        address = DTFactory.configured_address(
            get_network_name(web3=self._web3), self._config.address_file
        )
        dtfactory = DTFactory(self._web3, address)
        if not data_token_address:
            blob = dt_blob or ""
            name = dt_name or metadata["main"]["name"]
            symbol = dt_symbol or name
            # register on-chain
            _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP
            tx_id = dtfactory.createToken(
                blob, name, symbol, _cap, from_wallet=publisher_wallet
            )
            data_token = DataToken(self._web3, dtfactory.get_token_address(tx_id))
            if not data_token:
                logger.warning("Creating new data token failed.")
                return None

            data_token_address = data_token.address

            logger.info(
                f"Successfully created data token with address "
                f"{data_token.address} for new dataset asset."
            )
            # owner_address is set as minter only if creating new data token. So if
            # `data_token_address` is set `owner_address` has no effect.
            if owner_address:
                data_token.proposeMinter(owner_address, from_wallet=publisher_wallet)
        else:
            if not dtfactory.verify_data_token(data_token_address):
                raise ContractNotFound(
                    f"datatoken address {data_token_address} is not found in the DTFactory events."
                )
            # verify data_token_address
            dt = DataToken(self._web3, data_token_address)
            minter = dt.contract.caller.minter()
            if not minter:
                raise AssertionError(
                    f"datatoken address {data_token_address} does not seem to be a valid DataToken contract."
                )
            elif minter.lower() != publisher_wallet.address.lower():
                raise AssertionError(
                    f"Minter of datatoken {data_token_address} is not the same as the publisher."
                )

        assert (
            data_token_address
        ), "data_token_address is required for publishing a dataset asset."

        # Generating the did and adding to the ddo.
        did = f"did:op:{remove_0x_prefix(data_token_address)}"
        asset.did = did
        logger.debug(f"Using datatoken address as did: {did}")
        # Check if it's already registered first!
        if self._get_aquarius().ddo_exists(did):
            raise AquariusError(
                f"Asset id {did} is already registered to another asset."
            )

        for service in services:
            if service.type == ServiceTypes.METADATA:
                ddo_service_endpoint = service.service_endpoint
                if "{did}" in ddo_service_endpoint:
                    ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did)
                    service.service_endpoint = ddo_service_endpoint

            asset.add_service(service)

        asset.proof["signatureValue"] = sign_hash(
            encode_defunct(text=asset.asset_id), publisher_wallet
        )

        # Setup metadata service
        # First compute files_encrypted
        assert metadata_copy["main"][
            "files"
        ], "files is required in the metadata main attributes."
        logger.debug("Encrypting content urls in the metadata.")

        publisher_signature = self._data_provider.sign_message(
            publisher_wallet, asset.asset_id, provider_uri=provider_uri
        )
        _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(provider_uri)
        files_encrypted = self._data_provider.encrypt_files_dict(
            metadata_copy["main"]["files"],
            encrypt_endpoint,
            asset.asset_id,
            publisher_wallet.address,
            publisher_signature,
        )

        # only assign if the encryption worked
        if files_encrypted:
            logger.debug(f"Content urls encrypted successfully {files_encrypted}")
            index = 0
            for file in metadata_copy["main"]["files"]:
                file["index"] = index
                index = index + 1
                del file["url"]
            metadata_copy["encryptedFiles"] = files_encrypted
        else:
            raise AssertionError("Encrypting the files failed.")

        logger.debug(
            f"Generated asset and services, DID is {asset.did},"
            f" metadata service @{ddo_service_endpoint}."
        )

        # Set datatoken address in the asset
        asset.data_token_address = data_token_address
        flags, asset_contents = self._build_asset_contents(asset, encrypt)

        try:
            # publish the new ddo in ocean-db/Aquarius
            ddo_registry = self.ddo_registry()
            tx_id = ddo_registry.create(
                asset.asset_id, flags, asset_contents, publisher_wallet
            )
            if not ddo_registry.verify_tx(tx_id):
                raise VerifyTxFailed(
                    f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}"
                )
            logger.info("Asset/ddo published on-chain successfully.")
        except ValueError as ve:
            raise ValueError(f"Invalid value to publish in the metadata: {str(ve)}")
        except Exception as e:
            logger.error(f"Publish asset on-chain failed: {str(e)}")
            raise

        return asset
Пример #8
0
def test_get_OCEAN_address(publisher_ocean_instance):
    """Tests OCEAN address retrieval."""
    network = get_network_name()
    assert publisher_ocean_instance.pool.get_OCEAN_address(
    ) == get_ocean_token_address(network)
Пример #9
0
    def create(
        self,
        metadata: dict,
        publisher_wallet: Wallet,
        service_descriptors: list = None,
        owner_address: str = None,
        data_token_address: str = None,
        provider_uri=None,
        dt_name: str = None,
        dt_symbol: str = None,
        dt_blob: str = None,
        dt_cap: float = None,
    ) -> (Asset, None):
        """Register an asset on-chain.

        Creating/deploying a DataToken contract and in the Metadata store (Aquarius).

        :param metadata: dict conforming to the Metadata accepted by Ocean Protocol.
        :param publisher_wallet: Wallet of the publisher registering this asset
        :param service_descriptors: list of ServiceDescriptor tuples of length 2.
            The first item must be one of ServiceTypes and the second
            item is a dict of parameters and values required by the service
        :param owner_address: hex str the ethereum address to assign asset ownership to. After
            registering the asset on-chain, the ownership is transferred to this address
        :param data_token_address: hex str the address of the data token smart contract. The new
            asset will be associated with this data token address.
        :param provider_uri: str URL of service provider. This will be used as base to
            construct the serviceEndpoint for the `access` (download) service
        :param dt_name: str name of DataToken if creating a new one
        :param dt_symbol: str symbol of DataToken if creating a new one
        :param dt_blob: str blob of DataToken if creating a new one. A `blob` is any text
            to be stored with the ERC20 DataToken contract for any purpose.
        :param dt_cap: float
        :return: DDO instance
        """
        assert isinstance(
            metadata,
            dict), f"Expected metadata of type dict, got {type(metadata)}"
        assert service_descriptors is None or isinstance(
            service_descriptors, list
        ), f"bad type of `service_descriptors` {type(service_descriptors)}"

        # copy metadata so we don't change the original
        metadata_copy = copy.deepcopy(metadata)
        asset_type = metadata_copy["main"]["type"]
        assert asset_type in (
            "dataset",
            "algorithm",
        ), f"Invalid/unsupported asset type {asset_type}"
        if not plecos.is_valid_dict_local(metadata_copy):
            errors = plecos.list_errors_dict_local(metadata_copy)
            msg = f"Metadata has validation errors: {errors}"
            logger.error(msg)
            raise ValueError(msg)

        urls = [item["url"] for item in metadata["main"]["files"]]
        for url in urls:
            if not DataServiceProvider.check_single_file_info(
                    url, provider_uri):
                msg = f"The URL of this service can not be accessed: {url}."
                logger.error(msg)
                raise ValueError(msg)

        service_descriptors = service_descriptors or []

        services = self._process_service_descriptors(service_descriptors,
                                                     metadata_copy,
                                                     provider_uri,
                                                     publisher_wallet)

        stype_to_service = {s.type: s for s in services}
        checksum_dict = dict()
        for service in services:
            checksum_dict[str(service.index)] = checksum(service.main)

        # Create a DDO object
        asset = Asset()
        # Adding proof to the ddo.
        asset.add_proof(checksum_dict, publisher_wallet)

        #################
        # DataToken
        address = DTFactory.configured_address(get_network_name(),
                                               self._config.address_file)
        dtfactory = DTFactory(address)
        if not data_token_address:
            blob = dt_blob or ""
            name = dt_name or metadata["main"]["name"]
            symbol = dt_symbol or name
            # register on-chain
            _cap = dt_cap if dt_cap else DataToken.DEFAULT_CAP
            tx_id = dtfactory.createToken(blob,
                                          name,
                                          symbol,
                                          to_base_18(_cap),
                                          from_wallet=publisher_wallet)
            data_token = DataToken(dtfactory.get_token_address(tx_id))
            if not data_token:
                logger.warning("Creating new data token failed.")
                return None

            data_token_address = data_token.address

            logger.info(f"Successfully created data token with address "
                        f"{data_token.address} for new dataset asset.")
            # owner_address is set as minter only if creating new data token. So if
            # `data_token_address` is set `owner_address` has no effect.
            if owner_address:
                data_token.proposeMinter(owner_address,
                                         from_wallet=publisher_wallet)
        else:
            # verify data_token_address
            dt = DataToken(data_token_address)
            minter = dt.contract_concise.minter()
            if not minter:
                raise AssertionError(
                    f"datatoken address {data_token_address} does not seem to be a valid DataToken contract."
                )
            elif minter.lower() != publisher_wallet.address.lower():
                raise AssertionError(
                    f"Minter of datatoken {data_token_address} is not the same as the publisher."
                )
            elif not dtfactory.verify_data_token(data_token_address):
                raise ContractNotFound(
                    f"datatoken address {data_token_address} is not found in the DTFactory events."
                )

        assert (
            data_token_address
        ), "data_token_address is required for publishing a dataset asset."

        # Generating the did and adding to the ddo.
        did = asset.assign_did(
            f"did:op:{remove_0x_prefix(data_token_address)}")
        logger.debug(f"Using datatoken address as did: {did}")
        # Check if it's already registered first!
        if did in self._get_aquarius().list_assets():
            raise AquariusError(
                f"Asset id {did} is already registered to another asset.")

        md_service = stype_to_service[ServiceTypes.METADATA]
        ddo_service_endpoint = md_service.service_endpoint
        if "{did}" in ddo_service_endpoint:
            ddo_service_endpoint = ddo_service_endpoint.replace("{did}", did)
            md_service.set_service_endpoint(ddo_service_endpoint)

        # Populate the ddo services
        asset.add_service(md_service)
        access_service = stype_to_service.get(ServiceTypes.ASSET_ACCESS, None)
        compute_service = stype_to_service.get(ServiceTypes.CLOUD_COMPUTE,
                                               None)

        if access_service:
            asset.add_service(access_service)
        if compute_service:
            asset.add_service(compute_service)

        asset.proof["signatureValue"] = sign_hash(
            add_ethereum_prefix_and_hash_msg(asset.asset_id), publisher_wallet)

        # Add public key and authentication
        asset.add_public_key(did, publisher_wallet.address)

        asset.add_authentication(did, PUBLIC_KEY_TYPE_RSA)

        # Setup metadata service
        # First compute files_encrypted
        assert metadata_copy["main"][
            "files"], "files is required in the metadata main attributes."
        logger.debug("Encrypting content urls in the metadata.")

        publisher_signature = self._data_provider.sign_message(
            publisher_wallet, asset.asset_id, provider_uri=provider_uri)
        _, encrypt_endpoint = self._data_provider.build_encrypt_endpoint(
            provider_uri)
        files_encrypted = self._data_provider.encrypt_files_dict(
            metadata_copy["main"]["files"],
            encrypt_endpoint,
            asset.asset_id,
            publisher_wallet.address,
            publisher_signature,
        )

        # only assign if the encryption worked
        if files_encrypted:
            logger.debug(
                f"Content urls encrypted successfully {files_encrypted}")
            index = 0
            for file in metadata_copy["main"]["files"]:
                file["index"] = index
                index = index + 1
                del file["url"]
            metadata_copy["encryptedFiles"] = files_encrypted
        else:
            raise AssertionError("Encrypting the files failed.")

        logger.debug(f"Generated asset and services, DID is {asset.did},"
                     f" metadata service @{ddo_service_endpoint}.")

        # Set datatoken address in the asset
        asset.data_token_address = data_token_address

        try:
            # publish the new ddo in ocean-db/Aquarius
            ddo_registry = self.ddo_registry()
            web3 = Web3Provider.get_web3()
            tx_id = ddo_registry.create(
                asset.asset_id,
                bytes([1]),
                lzma.compress(web3.toBytes(text=asset.as_text())),
                publisher_wallet,
            )
            if not ddo_registry.verify_tx(tx_id):
                raise VerifyTxFailed(
                    f"create DDO on-chain failed, transaction status is 0. Transaction hash is {tx_id}"
                )
            logger.info("Asset/ddo published on-chain successfully.")
        except ValueError as ve:
            raise ValueError(
                f"Invalid value to publish in the metadata: {str(ve)}")
        except Exception as e:
            logger.error(f"Publish asset on-chain failed: {str(e)}")
            raise

        return asset
Пример #10
0
    def __init__(self, config=None, data_provider=None):
        """Initialize Ocean class.

           >> # Make a new Ocean instance
           >> ocean = Ocean({...})

        This class provides the main top-level functions in ocean protocol:
         * Publish assets metadata and associated services
            * Each asset is assigned a unique DID and a DID Document (DDO)
            * The DDO contains the asset's services including the metadata
            * The DID is registered on-chain with a URL of the metadata store
              to retrieve the DDO from

            >> asset = ocean.assets.create(metadata, publisher_wallet)

         * Discover/Search assets via the current configured metadata store (Aquarius)
            >> assets_list = ocean.assets.search('search text')

        An instance of Ocean is parameterized by a `Config` instance.

        :param config: Config instance
        :param data_provider: DataServiceProvider instance
        """
        # Configuration information for the market is stored in the Config class
        # config = Config(filename=config_file, options_dict=config_dict)
        if not config:
            try:
                config = ConfigProvider.get_config()
            except AssertionError:
                config = Config(os.getenv(ENV_CONFIG_FILE))
                ConfigProvider.set_config(config)
        if isinstance(config, dict):
            # fallback to metadataStoreUri
            cache_key = ("metadataCacheUri" if ("metadataCacheUri" in config)
                         else "metadataStoreUri")
            metadata_cache_uri = config.get(
                cache_key,
                config.get("metadata_cache_uri", "http://localhost:5000"))
            config_dict = {
                "eth-network": {
                    "network": config.get("network", "")
                },
                "resources": {
                    "metadata_cache_uri":
                    metadata_cache_uri,
                    "provider.url":
                    config.get("providerUri", "http://localhost:8030"),
                },
            }
            config = Config(options_dict=config_dict)
        ConfigProvider.set_config(config)
        self._config = config
        ContractHandler.set_artifacts_path(self._config.artifacts_path)
        Web3Provider.init_web3(
            provider=get_web3_connection_provider(self._config.network_url))

        self._web3 = Web3Provider.get_web3()

        if not data_provider:
            data_provider = DataServiceProvider

        network = get_network_name()
        addresses = get_contracts_addresses(network, self._config)
        self.assets = OceanAssets(
            self._config, data_provider,
            addresses.get(MetadataContract.CONTRACT_NAME))
        self.services = OceanServices()
        self.compute = OceanCompute(self._config, data_provider)

        ocean_address = get_ocean_token_address(network)
        self.pool = OceanPool(ocean_address, get_bfactory_address(network))
        self.exchange = OceanExchange(
            ocean_address,
            FixedRateExchange.configured_address(
                network or get_network_name(),
                ConfigProvider.get_config().address_file),
            self.config,
        )

        logger.debug("Ocean instance initialized: ")
Пример #11
0
 def get_dtfactory(self, dtfactory_address: str = "") -> DTFactory:
     dtf_address = dtfactory_address or DTFactory.configured_address(
         get_network_name(), self._config.address_file)
     return DTFactory(dtf_address)
Пример #12
0
 def OCEAN_address(self):
     return get_ocean_token_address(get_network_name())
Пример #13
0
    def __init__(
        self, config: Union[Dict, Config], data_provider: Optional[Type] = None
    ) -> None:
        """Initialize Ocean class.

        Usage: Make a new Ocean instance

        `ocean = Ocean({...})`

        This class provides the main top-level functions in ocean protocol:
        1. Publish assets metadata and associated services
            - Each asset is assigned a unique DID and a DID Document (DDO)
            - The DDO contains the asset's services including the metadata
            - The DID is registered on-chain with a URL of the metadata store
              to retrieve the DDO from

            `asset = ocean.assets.create(metadata, publisher_wallet)`

        2. Discover/Search assets via the current configured metadata store (Aquarius)

            - Usage:
            `assets_list = ocean.assets.search('search text')`

        An instance of Ocean is parameterized by a `Config` instance.

        :param config: `Config` instance
        :param data_provider: `DataServiceProvider` instance
        """
        if isinstance(config, dict):
            # fallback to metadataStoreUri
            cache_key = (
                "metadataCacheUri"
                if ("metadataCacheUri" in config)
                else "metadataStoreUri"
            )
            metadata_cache_uri = config.get(
                cache_key, config.get("metadata_cache_uri", "http://localhost:5000")
            )
            config_dict = {
                "eth-network": {"network": config.get("network", "")},
                "resources": {
                    "metadata_cache_uri": metadata_cache_uri,
                    "provider.url": config.get("providerUri", "http://localhost:8030"),
                },
            }
            config = Config(options_dict=config_dict)
        self.config = config
        self.web3 = get_web3(self.config.network_url)

        if not data_provider:
            data_provider = DataServiceProvider

        network = get_network_name(web3=self.web3)
        addresses = get_contracts_addresses(self.config.address_file, network)
        self.assets = OceanAssets(
            self.config,
            self.web3,
            data_provider,
            addresses.get(MetadataContract.CONTRACT_NAME),
        )
        self.compute = OceanCompute(self.config, data_provider)

        ocean_address = get_ocean_token_address(self.config.address_file, network)
        self.pool = OceanPool(
            self.web3,
            ocean_address,
            get_bfactory_address(self.config.address_file, network),
            get_dtfactory_address(self.config.address_file, network),
        )
        self.exchange = OceanExchange(
            self.web3,
            ocean_address,
            FixedRateExchange.configured_address(network, self.config.address_file),
            self.config,
        )

        logger.debug("Ocean instance initialized: ")