Exemple #1
0
 def download(self, wallet: Wallet, tx_id: str, destination_folder: str):
     url = self.blob()
     download_url = (f"{url}?"
                     f"consumerAddress={wallet.address}"
                     f"&dataToken={self.address}"
                     f"&transferTxId={tx_id}")
     response = get_requests_session().get(download_url, stream=True)
     file_name = f"file-{self.address}"
     DataServiceProvider.write_file(response, destination_folder, file_name)
     return os.path.join(destination_folder, file_name)
Exemple #2
0
def test_download_ipfs_file(client):
    cid = 'QmQfpdcMWnLTXKKW9GPV7NgtEugghgD6HgzSF6gSrp2mL9'
    url = f'ipfs://{cid}'
    download_url = get_download_url(url, None)
    requests_session = get_requests_session()

    request = Mock()
    request.range = None

    print(f'got ipfs download url: {download_url}')
    assert download_url and download_url.endswith(f'ipfs/{cid}')
    response = build_download_response(request, requests_session, download_url, download_url, None)
    assert response.data, f'got no data {response.data}'
Exemple #3
0
def request_ether(faucet_url, account, wait=True):
    requests = get_requests_session()

    payload = {"address": account.address}
    response = requests.post(f'{faucet_url}/faucet',
                             data=json.dumps(payload),
                             headers={'content-type': 'application/json'})
    try:
        response_json = json.loads(response.content)
        success = response_json.get('success', 'false') == 'true'
        if success and wait:
            time.sleep(5)

        return success, response_json.get('message', '')
    except (ValueError, Exception) as err:
        print(f'Error parsing response {response}: {err}')
        return None, None
Exemple #4
0
    def __init__(self, aquarius_url):
        """
        The Metadata class is a wrapper on the Metadata Store, which has exposed a REST API.

        :param aquarius_url: Url of the aquarius instance.
        """
        assert aquarius_url, f'Invalid url "{aquarius_url}"'
        # :HACK:
        if '/api/v1/aquarius/assets' in aquarius_url:
            aquarius_url = aquarius_url[:aquarius_url.find('/api/v1/aquarius/assets')]

        self._base_url = f'{aquarius_url}/api/v1/aquarius/assets'
        self._headers = {'content-type': 'application/json'}

        logging.debug(f'Metadata Store connected at {aquarius_url}')
        logging.debug(f'Metadata Store API documentation at {aquarius_url}/api/v1/docs')
        logging.debug(f'Metadata assets at {self._base_url}')

        self.requests_session = get_requests_session()
Exemple #5
0
    def download_service(
        did,
        service_endpoint,
        wallet,
        files,
        destination_folder,
        service_id,
        token_address,
        order_tx_id,
        index=None,
    ):

        indexes = range(len(files))
        if index is not None:
            assert isinstance(index, int), logger.error("index has to be an integer.")
            assert index >= 0, logger.error("index has to be 0 or a positive integer.")
            assert index < len(files), logger.error(
                "index can not be bigger than the number of files"
            )
            indexes = [index]

        base_url = (
            f"{service_endpoint}"
            f"?documentId={did}"
            f"&serviceId={service_id}"
            f"&serviceType={ServiceTypes.ASSET_ACCESS}"
            f"&dataToken={token_address}"
            f"&transferTxId={order_tx_id}"
            f"&consumerAddress={wallet.address}"
        )
        provider_uri = DataProviderMock.build_download_endpoint(service_endpoint)[1]
        for i in indexes:
            signature = DataServiceProvider.sign_message(
                wallet, did, provider_uri=provider_uri
            )
            download_url = base_url + f"&signature={signature}&fileIndex={i}"
            logger.info(f"invoke consume endpoint with this url: {download_url}")
            http_client = get_requests_session()
            response = http_client.get(download_url, stream=True)
            file_name = DataServiceProvider._get_file_name(response)
            DataServiceProvider.write_file(
                response, destination_folder, file_name or f"file-{i}"
            )
Exemple #6
0
def request_ether(faucet_url, wallet, wait=True):
    requests = get_requests_session()

    payload = {"address": wallet.address}
    response = requests.post(
        f"{faucet_url}/faucet",
        data=json.dumps(payload),
        headers={"content-type": "application/json"},
        timeout=3,
    )
    try:
        response_json = json.loads(response.content)
        success = response_json.get("success", "false") == "true"
        if success and wait:
            time.sleep(5)

        return success, response_json.get("message", "")
    except (ValueError, Exception) as err:
        print(f"Error parsing response {response}: {err}")
        return None, None
Exemple #7
0
class Brizo:
    """
    `Brizo` is the name chosen for the asset service provider.

    The main functions available are:
    - consume_service
    - run_compute_service (not implemented yet)

    """
    _http_client = get_requests_session()

    @staticmethod
    def set_http_client(http_client):
        """Set the http client to something other than the default `requests`"""
        Brizo._http_client = http_client

    @staticmethod
    def encrypt_files_dict(files_dict, encrypt_endpoint, asset_id,
                           account_address, signed_did):
        payload = json.dumps({
            'documentId': asset_id,
            'signedDocumentId': signed_did,
            'document': json.dumps(files_dict),
            'publisherAddress': account_address
        })
        response = Brizo._http_client.post(
            encrypt_endpoint,
            data=payload,
            headers={'content-type': 'application/json'})
        if response and hasattr(response, 'status_code'):
            if response.status_code != 201:
                msg = (
                    f'Encrypt file urls failed at the encryptEndpoint '
                    f'{encrypt_endpoint}, reason {response.text}, status {response.status_code}'
                )
                logger.error(msg)
                raise OceanEncryptAssetUrlsError(msg)

            logger.info(
                f'Asset urls encrypted successfully, encrypted urls str: {response.text},'
                f' encryptedEndpoint {encrypt_endpoint}')

            return response.text

    @staticmethod
    def consume_service(agreement_id,
                        service_endpoint,
                        account,
                        files,
                        destination_folder,
                        index=None):
        """
        Call the brizo endpoint to get access to the different files that form the asset.

        :param agreement_id: Service Agreement Id, hex str
        :param service_endpoint: Url to consume, str
        :param account: Account instance of the consumer signing this agreement, hex-str
        :param files: List containing the files to be consumed, list
        :param index: Index of the document that is going to be downloaded, int
        :param destination_folder: Path, str
        :return: True if was downloaded, bool
        """
        signature = Keeper.get_instance().sign_hash(
            add_ethereum_prefix_and_hash_msg(agreement_id), account)

        if index is not None:
            assert isinstance(index,
                              int), logger.error('index has to be an integer.')
            assert index >= 0, logger.error(
                'index has to be 0 or a positive integer.')
            assert index < len(files), logger.error(
                'index can not be bigger than the number of files')
            consume_url = Brizo._create_consume_url(service_endpoint,
                                                    agreement_id, account,
                                                    None, signature, index)
            logger.info(
                f'invoke consume endpoint with this url: {consume_url}')
            response = Brizo._http_client.get(consume_url, stream=True)
            file_name = Brizo._get_file_name(response)
            Brizo.write_file(response, destination_folder, file_name)
        else:
            for i, _file in enumerate(files):
                consume_url = Brizo._create_consume_url(
                    service_endpoint, agreement_id, account, _file, signature,
                    i)
                logger.info(
                    f'invoke consume endpoint with this url: {consume_url}')
                response = Brizo._http_client.get(consume_url, stream=True)
                file_name = Brizo._get_file_name(response)
                Brizo.write_file(response, destination_folder, file_name
                                 or f'file-{i}')

    @staticmethod
    def start_compute_job(agreement_id,
                          service_endpoint,
                          account_address,
                          signature,
                          algorithm_did=None,
                          algorithm_meta=None,
                          output=None,
                          job_id=None):
        """

        :param agreement_id: Service Agreement Id, hex str
        :param service_endpoint:
        :param account_address: hex str the ethereum address of the consumer executing the compute job
        :param signature: hex str signed message to allow the provider to authorize the consumer
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: see `OceanCompute.execute`
        :param output: see `OceanCompute.execute`
        :param job_id: str id of compute job that was started and stopped (optional, use it
            here to start a job after it was stopped)

        :return: job_info dict with jobId, status, and other values
        """
        assert algorithm_did or algorithm_meta, 'either an algorithm did or an algorithm meta must be provided.'

        payload = Brizo._prepare_compute_payload(agreement_id, account_address,
                                                 signature, algorithm_did,
                                                 algorithm_meta, output,
                                                 job_id)
        logger.info(f'invoke start compute endpoint with this url: {payload}')
        response = Brizo._http_client.post(
            service_endpoint,
            data=json.dumps(payload),
            headers={'content-type': 'application/json'})
        logger.debug(
            f'got brizo execute response: {response.content} with status-code {response.status_code} '
        )
        if response.status_code not in (201, 200):
            raise Exception(response.content.decode('utf-8'))

        try:
            job_info = json.loads(response.content.decode('utf-8'))
            if isinstance(job_info, list):
                return job_info[0]
            return job_info

        except KeyError as err:
            logger.error(f'Failed to extract jobId from response: {err}')
            raise KeyError(f'Failed to extract jobId from response: {err}')
        except JSONDecodeError as err:
            logger.error(f'Failed to parse response json: {err}')
            raise

    @staticmethod
    def stop_compute_job(agreement_id, job_id, service_endpoint,
                         account_address, signature):
        """

        :param agreement_id: hex str Service Agreement Id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param account_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was stopped successfully
        """
        return Brizo._send_compute_request('put', agreement_id, job_id,
                                           service_endpoint, account_address,
                                           signature)

    @staticmethod
    def restart_compute_job(agreement_id, job_id, service_endpoint,
                            account_address, signature):
        """

        :param agreement_id: hex str Service Agreement Id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param account_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was restarted successfully
        """
        Brizo.stop_compute_job(agreement_id, job_id, service_endpoint,
                               account_address, signature)
        return Brizo.start_compute_job(agreement_id,
                                       service_endpoint,
                                       account_address,
                                       signature,
                                       job_id=job_id)

    @staticmethod
    def delete_compute_job(agreement_id, job_id, service_endpoint,
                           account_address, signature):
        """

        :param agreement_id: hex str Service Agreement Id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param account_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was deleted successfully
        """
        return Brizo._send_compute_request('delete', agreement_id, job_id,
                                           service_endpoint, account_address,
                                           signature)

    @staticmethod
    def compute_job_status(agreement_id, job_id, service_endpoint,
                           account_address, signature):
        """

        :param agreement_id: hex str Service Agreement Id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param account_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to status info. When job_id is not provided, this will return
            status for each job_id that exist for the agreement_id
        """
        return Brizo._send_compute_request('get', agreement_id, job_id,
                                           service_endpoint, account_address,
                                           signature)

    @staticmethod
    def compute_job_result(agreement_id, job_id, service_endpoint,
                           account_address, signature):
        """

        :param agreement_id: hex str Service Agreement Id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param account_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to result urls. When job_id is not provided, this will return
            result for each job_id that exist for the agreement_id
        """
        return Brizo._send_compute_request('get', agreement_id, job_id,
                                           service_endpoint, account_address,
                                           signature)

    @staticmethod
    def get_brizo_url(config):
        """
        Return the Brizo component url.

        :param config: Config
        :return: Url, str
        """
        brizo_url = 'http://localhost:8030'
        if config.has_option('resources', 'brizo.url'):
            brizo_url = config.get('resources', 'brizo.url') or brizo_url

        brizo_path = '/api/v1/brizo'
        return f'{brizo_url}{brizo_path}'

    @staticmethod
    def get_consume_endpoint(config):
        """
        Return the url to consume the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/consume'

    @staticmethod
    def get_compute_endpoint(config):
        """
        Return the url to execute the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/compute'

    @staticmethod
    def get_encrypt_endpoint(config):
        """
        Return the url to encrypt the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/publish'

    @staticmethod
    def write_file(response, destination_folder, file_name):
        """
        Write the response content in a file in the destination folder.
        :param response: Response
        :param destination_folder: Destination folder, string
        :param file_name: File name, string
        :return: bool
        """
        if response.status_code == 200:
            with open(os.path.join(destination_folder, file_name), 'wb') as f:
                for chunk in response.iter_content(chunk_size=None):
                    f.write(chunk)
            logger.info(f'Saved downloaded file in {f.name}')
        else:
            logger.warning(f'consume failed: {response.reason}')

    @staticmethod
    def _send_compute_request(http_method, agreement_id, job_id,
                              service_endpoint, account_address, signature):
        compute_url = (f'{service_endpoint}'
                       f'?signature={signature}'
                       f'&serviceAgreementId={agreement_id}'
                       f'&consumerAddress={account_address}'
                       f'&jobId={job_id or ""}')
        logger.info(f'invoke compute endpoint with this url: {compute_url}')
        method = getattr(Brizo._http_client, http_method)
        response = method(compute_url)
        print(
            f'got brizo execute response: {response.content} with status-code {response.status_code} '
        )
        if response.status_code != 200:
            raise Exception(response.content.decode('utf-8'))

        resp_content = json.loads(response.content.decode('utf-8'))
        if isinstance(resp_content, list):
            return resp_content[0]
        return resp_content

    @staticmethod
    def _get_file_name(response):
        try:
            return re.match(r'attachment;filename=(.+)',
                            response.headers.get('content-disposition'))[1]
        except Exception as e:
            logger.warning(f'It was not possible to get the file name. {e}')

    @staticmethod
    def _create_consume_url(service_endpoint,
                            agreement_id,
                            account,
                            _file=None,
                            signature=None,
                            index=None):
        if _file is not None and 'url' in _file:
            url = _file['url']
            if url.startswith('"') or url.startswith("'"):
                url = url[1:-1]
            return (f'{service_endpoint}'
                    f'?url={url}'
                    f'&serviceAgreementId={agreement_id}'
                    f'&consumerAddress={account.address}')
        else:
            return (f'{service_endpoint}'
                    f'?signature={signature}'
                    f'&serviceAgreementId={agreement_id}'
                    f'&consumerAddress={account.address}'
                    f'&index={index}')

    @staticmethod
    def _prepare_compute_payload(agreement_id,
                                 account_address,
                                 signature=None,
                                 algorithm_did=None,
                                 algorithm_meta=None,
                                 output=None,
                                 job_id=None):
        assert algorithm_did or algorithm_meta, 'either an algorithm did or an algorithm meta must be provided.'

        if algorithm_meta:
            assert isinstance(algorithm_meta, AlgorithmMetadata), f'expecting a AlgorithmMetadata type ' \
                                                                  f'for `algorithm_meta`, got {type(algorithm_meta)}'
            algorithm_meta = algorithm_meta.as_dictionary()

        return {
            'signature': signature,
            'serviceAgreementId': agreement_id,
            'consumerAddress': account_address,
            'algorithmDID': algorithm_did,
            'algorithmMeta': algorithm_meta,
            'output': output or dict(),
            'jobId': job_id or ""
        }
Exemple #8
0
class DataServiceProvider:
    """DataServiceProvider class.

    The main functions available are:
    - consume_service
    - run_compute_service (not implemented yet)
    """

    _http_client = get_requests_session()
    API_VERSION = "/api/v1"
    provider_info = None

    @staticmethod
    def get_http_client():
        """Get the http client."""
        return DataServiceProvider._http_client

    @staticmethod
    def set_http_client(http_client):
        """Set the http client to something other than the default `requests`."""
        DataServiceProvider._http_client = http_client

    @staticmethod
    def encrypt_files_dict(files_dict, encrypt_endpoint, asset_id,
                           publisher_address, signed_did):
        payload = json.dumps({
            "documentId": asset_id,
            "signature": signed_did,
            "document": json.dumps(files_dict),
            "publisherAddress": publisher_address,
        })

        response = DataServiceProvider._http_method(
            "post",
            encrypt_endpoint,
            data=payload,
            headers={"content-type": "application/json"},
        )
        if response and hasattr(response, "status_code"):
            if response.status_code != 201:
                msg = (
                    f"Encrypt file urls failed at the encryptEndpoint "
                    f"{encrypt_endpoint}, reason {response.text}, status {response.status_code}"
                )
                logger.error(msg)
                raise OceanEncryptAssetUrlsError(msg)

            logger.info(
                f"Asset urls encrypted successfully, encrypted urls str: {response.text},"
                f" encryptedEndpoint {encrypt_endpoint}")

            return response.json()["encryptedDocument"]

    @staticmethod
    def sign_message(wallet, msg, nonce=None, provider_uri=None):
        if nonce is None:
            nonce = DataServiceProvider.get_nonce(wallet.address, provider_uri)
        print(
            f"signing message with nonce {nonce}: {msg}, account={wallet.address}"
        )
        return Web3Helper.sign_hash(
            add_ethereum_prefix_and_hash_msg(f"{msg}{nonce}"), wallet)

    @staticmethod
    def get_nonce(user_address, provider_uri):
        _, url = DataServiceProvider.build_endpoint("nonce",
                                                    provider_uri=provider_uri)
        response = DataServiceProvider._http_method(
            "get", f"{url}?userAddress={user_address}")
        if response.status_code != 200:
            return None

        return response.json()["nonce"]

    @staticmethod
    def get_order_requirements(did, service_endpoint, consumer_address,
                               service_id, service_type, token_address):
        """

        :param did:
        :param service_endpoint:
        :param consumer_address: hex str the ethereum account address of the consumer
        :param service_id:
        :param service_type:
        :param token_address:
        :return: OrderRequirements instance -- named tuple (amount, data_token_address, receiver_address, nonce),
        """
        initialize_url = (f"{service_endpoint}"
                          f"?documentId={did}"
                          f"&serviceId={service_id}"
                          f"&serviceType={service_type}"
                          f"&dataToken={token_address}"
                          f"&consumerAddress={consumer_address}")

        logger.info(
            f"invoke the initialize endpoint with this url: {initialize_url}")
        response = DataServiceProvider._http_method("get", initialize_url)
        # The returned json should contain information about the required number of tokens
        # to consume `service_id`. If service is not available there will be an error or
        # the returned json is empty.
        if response.status_code != 200:
            return None
        order = dict(response.json())

        return OrderRequirements(
            float(order["numTokens"]),
            order["dataToken"],
            order["to"],
            int(order["nonce"]),
            order.get("computeAddress"),
        )

    @staticmethod
    def download_service(
        did,
        service_endpoint,
        wallet,
        files,
        destination_folder,
        service_id,
        token_address,
        order_tx_id,
        index=None,
    ):
        """
        Call the provider endpoint to get access to the different files that form the asset.

        :param did: str id of the asset
        :param service_endpoint: Url to consume, str
        :param wallet: hex str Wallet instance of the consumer signing this request
        :param files: List containing the files to be consumed, list
        :param destination_folder: Path, str
        :param service_id: integer the id of the service inside the DDO's service dict
        :param token_address: hex str the data token address associated with this asset/service
        :param order_tx_id: hex str the transaction hash for the required data token
            transfer (tokens of the same token address above)
        :param index: Index of the document that is going to be downloaded, int
        :return: True if was downloaded, bool
        """
        indexes = range(len(files))
        if index is not None:
            assert isinstance(index,
                              int), logger.error("index has to be an integer.")
            assert index >= 0, logger.error(
                "index has to be 0 or a positive integer.")
            assert index < len(files), logger.error(
                "index can not be bigger than the number of files")
            indexes = [index]

        base_url = (f"{service_endpoint}"
                    f"?documentId={did}"
                    f"&serviceId={service_id}"
                    f"&serviceType={ServiceTypes.ASSET_ACCESS}"
                    f"&dataToken={token_address}"
                    f"&transferTxId={order_tx_id}"
                    f"&consumerAddress={wallet.address}")
        provider_uri = DataServiceProvider.get_root_uri(service_endpoint)
        for i in indexes:
            signature = DataServiceProvider.sign_message(
                wallet, did, provider_uri=provider_uri)
            download_url = base_url + f"&signature={signature}&fileIndex={i}"
            logger.info(
                f"invoke consume endpoint with this url: {download_url}")
            response = DataServiceProvider._http_method("get",
                                                        download_url,
                                                        stream=True)
            file_name = DataServiceProvider._get_file_name(response)
            DataServiceProvider.write_file(response, destination_folder,
                                           file_name or f"file-{i}")

    @staticmethod
    def start_compute_job(
        did: str,
        service_endpoint: str,
        consumer_address: str,
        signature: str,
        service_id: int,
        order_tx_id: str,
        algorithm_did: str = None,
        algorithm_meta: AlgorithmMetadata = None,
        algorithm_tx_id: str = None,
        algorithm_data_token: str = None,
        output: dict = None,
        input_datasets: list = None,
        job_id: str = None,
    ):
        """

        :param did: id of asset starting with `did:op:` and a hex str without 0x prefix
        :param service_endpoint:
        :param consumer_address: hex str the ethereum address of the consumer executing the compute job
        :param signature: hex str signed message to allow the provider to authorize the consumer
        :param service_id:
        :param order_tx_id: hex str id of the token transfer transaction
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: see `OceanCompute.execute`
        :param algorithm_tx_id: transaction hash of algorithm StartOrder tx (Required when using `algorithm_did`)
        :param algorithm_data_token: datatoken address of this algorithm (Required when using `algorithm_did`)
        :param output: see `OceanCompute.execute`
        :param input_datasets: list of ComputeInput
        :param job_id: str id of compute job that was started and stopped (optional, use it
            here to start a job after it was stopped)

        :return: job_info dict with jobId, status, and other values
        """
        assert (
            algorithm_did or algorithm_meta
        ), "either an algorithm did or an algorithm meta must be provided."

        payload = DataServiceProvider._prepare_compute_payload(
            did,
            consumer_address,
            service_id,
            order_tx_id,
            signature=signature,
            algorithm_did=algorithm_did,
            algorithm_meta=algorithm_meta,
            algorithm_tx_id=algorithm_tx_id,
            algorithm_data_token=algorithm_data_token,
            output=output,
            input_datasets=input_datasets,
            job_id=job_id,
        )
        logger.info(f"invoke start compute endpoint with this url: {payload}")
        response = DataServiceProvider._http_method(
            "post",
            service_endpoint,
            data=json.dumps(payload),
            headers={"content-type": "application/json"},
        )
        logger.debug(
            f"got DataProvider execute response: {response.content} with status-code {response.status_code} "
        )
        if not response:
            raise AssertionError(
                f"Failed to get a response for request: serviceEndpoint={service_endpoint}, payload={payload}"
            )

        if response.status_code not in (201, 200):
            raise ValueError(response.content.decode("utf-8"))

        try:
            job_info = json.loads(response.content.decode("utf-8"))
            if isinstance(job_info, list):
                return job_info[0]
            return job_info

        except KeyError as err:
            logger.error(f"Failed to extract jobId from response: {err}")
            raise KeyError(f"Failed to extract jobId from response: {err}")
        except JSONDecodeError as err:
            logger.error(f"Failed to parse response json: {err}")
            raise

    @staticmethod
    def stop_compute_job(did, job_id, service_endpoint, consumer_address,
                         signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was stopped successfully
        """
        return DataServiceProvider._send_compute_request(
            "put", did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def restart_compute_job(
        did,
        job_id,
        service_endpoint,
        consumer_address,
        signature,
        service_id,
        order_tx_id,
        algorithm_did=None,
        algorithm_meta=None,
        output=None,
        input_datasets=None,
    ):
        """

        :param did: id of asset starting with `did:op:` and a hex str without 0x prefix
        :param job_id: str id of compute job that was started and stopped (optional, use it
            here to start a job after it was stopped)
        :param service_endpoint:
        :param consumer_address: hex str the ethereum address of the consumer executing the compute job
        :param signature: hex str signed message to allow the provider to authorize the consumer
        :param service_id:
        :param token_address:
        :param order_tx_id: hex str id of the token transfer transaction
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: see `OceanCompute.execute`
        :param output: see `OceanCompute.execute`
        :param input_datasets: list of ComputeInput

        :return: bool whether the job was restarted successfully
        """
        DataServiceProvider.stop_compute_job(did, job_id, service_endpoint,
                                             consumer_address, signature)
        return DataServiceProvider.start_compute_job(
            did,
            service_endpoint,
            consumer_address,
            signature,
            service_id,
            order_tx_id,
            algorithm_did,
            algorithm_meta,
            output,
            input_datasets=input_datasets,
            job_id=job_id,
        )

    @staticmethod
    def delete_compute_job(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was deleted successfully
        """
        return DataServiceProvider._send_compute_request(
            "delete", did, job_id, service_endpoint, consumer_address,
            signature)

    @staticmethod
    def compute_job_status(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to status info. When job_id is not provided, this will return
            status for each job_id that exist for the did
        """
        return DataServiceProvider._send_compute_request(
            "get", did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def compute_job_result(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to result urls. When job_id is not provided, this will return
            result for each job_id that exist for the did
        """
        return DataServiceProvider._send_compute_request(
            "get", did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def _remove_slash(path):
        if path.endswith("/"):
            path = path[:-1]
        if path.startswith("/"):
            path = path[1:]
        return path

    @staticmethod
    def get_url(config):
        """
        Return the DataProvider component url.

        :param config: Config
        :return: Url, str
        """
        return DataServiceProvider._remove_slash(config.provider_url
                                                 or "http://localhost:8030")

    @staticmethod
    def get_api_version():
        return DataServiceProvider._remove_slash(
            os.getenv(ENV_PROVIDER_API_VERSION,
                      DataServiceProvider.API_VERSION))

    @staticmethod
    def get_service_endpoints(provider_uri=None):
        """
        Return the service endpoints from the provider URL.
        """
        if not provider_uri:
            provider_uri = DataServiceProvider.get_url(
                ConfigProvider.get_config())

        api_version = DataServiceProvider.get_api_version()
        if api_version in provider_uri:
            i = provider_uri.find(api_version)
            provider_uri = provider_uri[:i]
        provider_info = DataServiceProvider._http_method("get",
                                                         provider_uri).json()

        return provider_info["serviceEndpoints"]

    @staticmethod
    def get_provider_address(provider_uri=None):
        """
        Return the provider address
        """
        if not provider_uri:
            provider_uri = ConfigProvider.get_config().provider_url
        provider_info = DataServiceProvider._http_method("get",
                                                         provider_uri).json()
        return provider_info["providerAddress"]

    @staticmethod
    def get_root_uri(service_endpoint):
        provider_uri = service_endpoint
        api_version = DataServiceProvider.get_api_version()
        if api_version in provider_uri:
            i = provider_uri.find(api_version)
            provider_uri = provider_uri[:i]
        parts = provider_uri.split("/")

        if len(parts) < 2:
            raise InvalidURLException(f"InvalidURL {service_endpoint}.")

        if parts[-2] == "services":
            provider_uri = "/".join(parts[:-2])

        result = DataServiceProvider._remove_slash(provider_uri)

        if not result:
            raise InvalidURLException(f"InvalidURL {service_endpoint}.")

        return result

    @staticmethod
    def build_endpoint(service_name, provider_uri=None, config=None):
        if not provider_uri:
            config = config or ConfigProvider.get_config()
            provider_uri = DataServiceProvider.get_url(config)

        provider_uri = DataServiceProvider.get_root_uri(provider_uri)
        service_endpoints = DataServiceProvider.get_service_endpoints(
            provider_uri)

        method, url = service_endpoints[service_name]
        return method, urljoin(provider_uri, url)

    @staticmethod
    def build_encrypt_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint("encrypt", provider_uri)

    @staticmethod
    def build_initialize_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint("initialize", provider_uri)

    @staticmethod
    def build_download_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint("download", provider_uri)

    @staticmethod
    def build_compute_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint("computeStatus",
                                                  provider_uri)

    @staticmethod
    def build_fileinfo(provider_uri=None):
        return DataServiceProvider.build_endpoint("fileinfo", provider_uri)

    @staticmethod
    def write_file(response, destination_folder, file_name):
        """
        Write the response content in a file in the destination folder.
        :param response: Response
        :param destination_folder: Destination folder, string
        :param file_name: File name, string
        :return: bool
        """
        if response.status_code == 200:
            with open(os.path.join(destination_folder, file_name), "wb") as f:
                for chunk in response.iter_content(chunk_size=None):
                    f.write(chunk)
            logger.info(f"Saved downloaded file in {f.name}")
        else:
            logger.warning(f"consume failed: {response.reason}")

    @staticmethod
    def _send_compute_request(http_method, did, job_id, service_endpoint,
                              consumer_address, signature):
        compute_url = (f"{service_endpoint}"
                       f"?signature={signature}"
                       f"&documentId={did}"
                       f"&consumerAddress={consumer_address}"
                       f'&jobId={job_id or ""}')
        logger.info(f"invoke compute endpoint with this url: {compute_url}")
        response = DataServiceProvider._http_method(http_method, compute_url)
        logger.debug(
            f"got provider execute response: {response.content} with status-code {response.status_code} "
        )
        if response.status_code != 200:
            raise Exception(response.content.decode("utf-8"))

        resp_content = json.loads(response.content.decode("utf-8"))
        if isinstance(resp_content, list):
            return resp_content[0]
        return resp_content

    @staticmethod
    def _get_file_name(response):
        try:
            return re.match(r"attachment;filename=(.+)",
                            response.headers.get("content-disposition"))[1]
        except Exception as e:
            logger.warning(f"It was not possible to get the file name. {e}")

    @staticmethod
    def _prepare_compute_payload(
        did: str,
        consumer_address: str,
        service_id: int,
        order_tx_id: str,
        signature: str = None,
        algorithm_did: str = None,
        algorithm_meta=None,
        algorithm_tx_id: str = None,
        algorithm_data_token: str = None,
        output: dict = None,
        input_datasets: list = None,
        job_id: str = None,
    ):
        assert (
            algorithm_did or algorithm_meta
        ), "either an algorithm did or an algorithm meta must be provided."

        if algorithm_meta:
            assert isinstance(algorithm_meta, AlgorithmMetadata), (
                f"expecting a AlgorithmMetadata type "
                f"for `algorithm_meta`, got {type(algorithm_meta)}")
            algorithm_meta = algorithm_meta.as_dictionary()

        _input_datasets = []
        if input_datasets:
            for _input in input_datasets:
                assert _input.did
                assert _input.transfer_tx_id
                assert _input.service_id
                if _input.did != did:
                    _input_datasets.append(_input.as_dictionary())

        payload = {
            "signature": signature,
            "documentId": did,
            "consumerAddress": consumer_address,
            "output": output or dict(),
            "jobId": job_id or "",
            "serviceId": service_id,
            "transferTxId": order_tx_id,
            "additionalInputs": _input_datasets or [],
        }
        if algorithm_did:
            payload.update({
                "algorithmDid": algorithm_did,
                "algorithmDataToken": algorithm_data_token,
                "algorithmTransferTxId": algorithm_tx_id,
            })
        else:
            payload["algorithmMeta"] = algorithm_meta

        return payload

    @staticmethod
    def _http_method(method, *args, **kwargs):
        try:
            return getattr(DataServiceProvider._http_client, method)(*args,
                                                                     **kwargs)
        except Exception:
            logger.error(
                f"Error invoking http method {method}: args={str(args)}, kwargs={str(kwargs)}"
            )
            raise
    get_download_url, get_metadata_url, get_request_data,
    process_compute_request, process_consume_request, record_consume_request,
    validate_algorithm_dict, validate_order,
    validate_transfer_not_used_for_other_service)
from ocean_provider.utils.accounts import verify_signature
from ocean_provider.utils.basics import (LocalFileAdapter,
                                         get_asset_from_metadatastore,
                                         get_config, get_datatoken_minter,
                                         get_provider_wallet, setup_network)
from ocean_provider.utils.encryption import do_encrypt

setup_logging()
services = Blueprint('services', __name__)
setup_network()
provider_wallet = get_provider_wallet()
requests_session = get_requests_session()
requests_session.mount('file://', LocalFileAdapter())
user_nonce = UserNonce(get_config().storage_path)

logger = logging.getLogger(__name__)


@services.route('/nonce', methods=['GET'])
def nonce():
    required_attributes = [
        'userAddress',
    ]
    data = get_request_data(request)

    msg, status = check_required_attributes(required_attributes, data, 'nonce')
    if msg:
Exemple #10
0
class DataServiceProvider:
    """
    `Brizo` is the name chosen for the asset service provider.

    The main functions available are:
    - consume_service
    - run_compute_service (not implemented yet)

    """
    _http_client = get_requests_session()
    API_VERSION = '/api/v1'

    @staticmethod
    def set_http_client(http_client):
        """Set the http client to something other than the default `requests`"""
        DataServiceProvider._http_client = http_client

    @staticmethod
    def encrypt_files_dict(files_dict, encrypt_endpoint, asset_id,
                           publisher_address, signed_did):
        payload = json.dumps({
            'documentId': asset_id,
            'signature': signed_did,
            'document': json.dumps(files_dict),
            'publisherAddress': publisher_address
        })

        response = DataServiceProvider._http_client.post(
            encrypt_endpoint,
            data=payload,
            headers={'content-type': 'application/json'})
        if response and hasattr(response, 'status_code'):
            if response.status_code != 201:
                msg = (
                    f'Encrypt file urls failed at the encryptEndpoint '
                    f'{encrypt_endpoint}, reason {response.text}, status {response.status_code}'
                )
                logger.error(msg)
                raise OceanEncryptAssetUrlsError(msg)

            logger.info(
                f'Asset urls encrypted successfully, encrypted urls str: {response.text},'
                f' encryptedEndpoint {encrypt_endpoint}')

            return response.json()['encryptedDocument']

    @staticmethod
    def sign_message(wallet, msg, config, nonce=None):
        if nonce is None:
            nonce = DataServiceProvider.get_nonce(wallet.address, config)
        print(
            f'signing message with nonce {nonce}: {msg}, account={wallet.address}'
        )
        return Web3Helper.sign_hash(
            add_ethereum_prefix_and_hash_msg(f'{msg}{nonce}'), wallet)

    @staticmethod
    def get_nonce(user_address, config):
        url = DataServiceProvider.build_endpoint('nonce')
        response = DataServiceProvider._http_client.get(
            f'{url}?userAddress={user_address}')
        if response.status_code != 200:
            return None

        return response.json()['nonce']

    @staticmethod
    def get_order_requirements(did, service_endpoint, consumer_address,
                               service_id, service_type, token_address):
        """

        :param did:
        :param service_endpoint:
        :param consumer_address: hex str the ethereum account address of the consumer
        :param service_id:
        :param service_type:
        :param token_address:
        :return: OrderRequirements instance -- named tuple (amount, data_token_address, receiver_address, nonce),
        """
        initialize_url = (f'{service_endpoint}'
                          f'?documentId={did}'
                          f'&serviceId={service_id}'
                          f'&serviceType={service_type}'
                          f'&dataToken={token_address}'
                          f'&consumerAddress={consumer_address}')

        logger.info(
            f'invoke the initialize endpoint with this url: {initialize_url}')
        response = DataServiceProvider._http_client.get(initialize_url)
        # The returned json should contain information about the required number of tokens
        # to consume `service_id`. If service is not available there will be an error or
        # the returned json is empty.
        if response.status_code != 200:
            return None
        order = dict(response.json())

        return OrderRequirements(float(order['numTokens']), order['dataToken'],
                                 order['to'], int(order['nonce']))

    @staticmethod
    def download_service(did,
                         service_endpoint,
                         wallet,
                         files,
                         destination_folder,
                         service_id,
                         token_address,
                         order_tx_id,
                         index=None):
        """
        Call the provider endpoint to get access to the different files that form the asset.

        :param did: str id of the asset
        :param service_endpoint: Url to consume, str
        :param wallet: hex str Wallet instance of the consumer signing this request
        :param files: List containing the files to be consumed, list
        :param destination_folder: Path, str
        :param service_id: integer the id of the service inside the DDO's service dict
        :param token_address: hex str the data token address associated with this asset/service
        :param order_tx_id: hex str the transaction hash for the required data token
            transfer (tokens of the same token address above)
        :param index: Index of the document that is going to be downloaded, int
        :return: True if was downloaded, bool
        """
        indexes = range(len(files))
        if index is not None:
            assert isinstance(index,
                              int), logger.error('index has to be an integer.')
            assert index >= 0, logger.error(
                'index has to be 0 or a positive integer.')
            assert index < len(files), logger.error(
                'index can not be bigger than the number of files')
            indexes = [index]

        base_url = (f'{service_endpoint}'
                    f'?documentId={did}'
                    f'&serviceId={service_id}'
                    f'&serviceType={ServiceTypes.ASSET_ACCESS}'
                    f'&dataToken={token_address}'
                    f'&transferTxId={order_tx_id}'
                    f'&consumerAddress={wallet.address}')
        config = ConfigProvider.get_config()
        for i in indexes:
            signature = DataServiceProvider.sign_message(wallet, did, config)
            download_url = base_url + f'&signature={signature}&fileIndex={i}'
            logger.info(
                f'invoke consume endpoint with this url: {download_url}')
            response = DataServiceProvider._http_client.get(download_url,
                                                            stream=True)
            file_name = DataServiceProvider._get_file_name(response)
            DataServiceProvider.write_file(response, destination_folder,
                                           file_name or f'file-{i}')

    @staticmethod
    def start_compute_job(did,
                          service_endpoint,
                          consumer_address,
                          signature,
                          service_id,
                          token_address,
                          order_tx_id,
                          algorithm_did=None,
                          algorithm_meta=None,
                          output=None,
                          job_id=None):
        """

        :param did: id of asset starting with `did:op:` and a hex str without 0x prefix
        :param service_endpoint:
        :param consumer_address: hex str the ethereum address of the consumer executing the compute job
        :param signature: hex str signed message to allow the provider to authorize the consumer
        :param service_id:
        :param token_address:
        :param order_tx_id: hex str id of the token transfer transaction
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: see `OceanCompute.execute`
        :param output: see `OceanCompute.execute`
        :param job_id: str id of compute job that was started and stopped (optional, use it
            here to start a job after it was stopped)

        :return: job_info dict with jobId, status, and other values
        """
        assert algorithm_did or algorithm_meta, 'either an algorithm did or an algorithm meta must be provided.'

        payload = DataServiceProvider._prepare_compute_payload(
            did,
            consumer_address,
            service_id,
            ServiceTypes.CLOUD_COMPUTE,
            token_address,
            order_tx_id,
            signature=signature,
            algorithm_did=algorithm_did,
            algorithm_meta=algorithm_meta,
            output=output,
            job_id=job_id)
        logger.info(f'invoke start compute endpoint with this url: {payload}')
        response = DataServiceProvider._http_client.post(
            service_endpoint,
            data=json.dumps(payload),
            headers={'content-type': 'application/json'})
        logger.debug(
            f'got DataProvider execute response: {response.content} with status-code {response.status_code} '
        )
        if not response:
            raise AssertionError(
                f'Failed to get a response for request: serviceEndpoint={service_endpoint}, payload={payload}'
            )

        if response.status_code not in (201, 200):
            raise ValueError(response.content.decode('utf-8'))

        try:
            job_info = json.loads(response.content.decode('utf-8'))
            if isinstance(job_info, list):
                return job_info[0]
            return job_info

        except KeyError as err:
            logger.error(f'Failed to extract jobId from response: {err}')
            raise KeyError(f'Failed to extract jobId from response: {err}')
        except JSONDecodeError as err:
            logger.error(f'Failed to parse response json: {err}')
            raise

    @staticmethod
    def stop_compute_job(did, job_id, service_endpoint, consumer_address,
                         signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was stopped successfully
        """
        return DataServiceProvider._send_compute_request(
            'put', did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def restart_compute_job(did,
                            job_id,
                            service_endpoint,
                            consumer_address,
                            signature,
                            service_id,
                            token_address,
                            order_tx_id,
                            algorithm_did=None,
                            algorithm_meta=None,
                            output=None):
        """

        :param did: id of asset starting with `did:op:` and a hex str without 0x prefix
        :param job_id: str id of compute job that was started and stopped (optional, use it
            here to start a job after it was stopped)
        :param service_endpoint:
        :param consumer_address: hex str the ethereum address of the consumer executing the compute job
        :param signature: hex str signed message to allow the provider to authorize the consumer
        :param service_id:
        :param token_address:
        :param order_tx_id: hex str id of the token transfer transaction
        :param algorithm_did: str -- the asset did (of `algorithm` type) which consist of `did:op:` and
            the assetId hex str (without `0x` prefix)
        :param algorithm_meta: see `OceanCompute.execute`
        :param output: see `OceanCompute.execute`

        :return: bool whether the job was restarted successfully
        """
        DataServiceProvider.stop_compute_job(did, job_id, service_endpoint,
                                             consumer_address, signature)
        return DataServiceProvider.start_compute_job(did,
                                                     service_endpoint,
                                                     consumer_address,
                                                     signature,
                                                     service_id,
                                                     token_address,
                                                     order_tx_id,
                                                     algorithm_did,
                                                     algorithm_meta,
                                                     output,
                                                     job_id=job_id)

    @staticmethod
    def delete_compute_job(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: bool whether the job was deleted successfully
        """
        return DataServiceProvider._send_compute_request(
            'delete', did, job_id, service_endpoint, consumer_address,
            signature)

    @staticmethod
    def compute_job_status(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to status info. When job_id is not provided, this will return
            status for each job_id that exist for the did
        """
        return DataServiceProvider._send_compute_request(
            'get', did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def compute_job_result(did, job_id, service_endpoint, consumer_address,
                           signature):
        """

        :param did: hex str the asset/DDO id
        :param job_id: str id of compute job that was returned from `start_compute_job`
        :param service_endpoint: str url of the provider service endpoint for compute service
        :param consumer_address: hex str the ethereum address of the consumer's account
        :param signature: hex str signed message to allow the provider to authorize the consumer

        :return: dict of job_id to result urls. When job_id is not provided, this will return
            result for each job_id that exist for the did
        """
        return DataServiceProvider._send_compute_request(
            'get', did, job_id, service_endpoint, consumer_address, signature)

    @staticmethod
    def _remove_slash(path):
        if path.endswith('/'):
            path = path[:-1]
        if path.startswith('/'):
            path = path[1:]
        return path

    @staticmethod
    def get_url(config):
        """
        Return the DataProvider component url.

        :param config: Config
        :return: Url, str
        """
        return DataServiceProvider._remove_slash(config.provider_url
                                                 or 'http://localhost:8030')

    @staticmethod
    def get_api_version():
        return DataServiceProvider._remove_slash(
            os.getenv(ENV_PROVIDER_API_VERSION,
                      DataServiceProvider.API_VERSION))

    @staticmethod
    def build_endpoint(service_name, provider_uri=None, config=None):
        if not provider_uri:
            config = config or ConfigProvider.get_config()
            provider_uri = DataServiceProvider.get_url(config)

        provider_uri = DataServiceProvider._remove_slash(provider_uri)
        parts = provider_uri.split('/')
        if parts[-2] == 'services':
            base_url = '/'.join(parts[:-2])
            return f'{base_url}/services/initialize'

        api_version = DataServiceProvider.get_api_version()
        if api_version not in provider_uri:
            provider_uri = f'{provider_uri}/{api_version}'

        return f'{provider_uri}/services/{service_name}'

    @staticmethod
    def build_encrypt_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint('encrypt', provider_uri)

    @staticmethod
    def build_initialize_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint('initialize', provider_uri)

    @staticmethod
    def build_download_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint('download', provider_uri)

    @staticmethod
    def build_compute_endpoint(provider_uri=None):
        return DataServiceProvider.build_endpoint('compute', provider_uri)

    @staticmethod
    def get_initialize_endpoint(service_endpoint):
        parts = service_endpoint.split('/')
        if parts[-2] == 'services':
            base_url = '/'.join(parts[:-2])
            return f'{base_url}/services/initialize'

        return DataServiceProvider.build_initialize_endpoint(service_endpoint)

    @staticmethod
    def get_download_endpoint(config):
        """
        Return the url to consume the asset.

        :param config: Config
        :return: Url, str
        """
        return DataServiceProvider.build_download_endpoint(
            DataServiceProvider.get_url(config))

    @staticmethod
    def get_compute_endpoint(config):
        """
        Return the url to execute the asset.

        :param config: Config
        :return: Url, str
        """
        return DataServiceProvider.build_compute_endpoint(
            DataServiceProvider.get_url(config))

    @staticmethod
    def get_encrypt_endpoint(config):
        """
        Return the url to encrypt the asset.

        :param config: Config
        :return: Url, str
        """
        return DataServiceProvider.build_encrypt_endpoint(
            DataServiceProvider.get_url(config))

    @staticmethod
    def write_file(response, destination_folder, file_name):
        """
        Write the response content in a file in the destination folder.
        :param response: Response
        :param destination_folder: Destination folder, string
        :param file_name: File name, string
        :return: bool
        """
        if response.status_code == 200:
            with open(os.path.join(destination_folder, file_name), 'wb') as f:
                for chunk in response.iter_content(chunk_size=None):
                    f.write(chunk)
            logger.info(f'Saved downloaded file in {f.name}')
        else:
            logger.warning(f'consume failed: {response.reason}')

    @staticmethod
    def _send_compute_request(http_method, did, job_id, service_endpoint,
                              consumer_address, signature):
        compute_url = (f'{service_endpoint}'
                       f'?signature={signature}'
                       f'&documentId={did}'
                       f'&consumerAddress={consumer_address}'
                       f'&jobId={job_id or ""}')
        logger.info(f'invoke compute endpoint with this url: {compute_url}')
        method = getattr(DataServiceProvider._http_client, http_method)
        response = method(compute_url)
        print(
            f'got brizo execute response: {response.content} with status-code {response.status_code} '
        )
        if response.status_code != 200:
            raise Exception(response.content.decode('utf-8'))

        resp_content = json.loads(response.content.decode('utf-8'))
        if isinstance(resp_content, list):
            return resp_content[0]
        return resp_content

    @staticmethod
    def _get_file_name(response):
        try:
            return re.match(r'attachment;filename=(.+)',
                            response.headers.get('content-disposition'))[1]
        except Exception as e:
            logger.warning(f'It was not possible to get the file name. {e}')

    @staticmethod
    def _prepare_compute_payload(did,
                                 consumer_address,
                                 service_id,
                                 service_type,
                                 token_address,
                                 order_tx_id,
                                 signature=None,
                                 algorithm_did=None,
                                 algorithm_meta=None,
                                 output=None,
                                 job_id=None):
        assert algorithm_did or algorithm_meta, 'either an algorithm did or an algorithm meta must be provided.'

        if algorithm_meta:
            assert isinstance(algorithm_meta, AlgorithmMetadata), f'expecting a AlgorithmMetadata type ' \
                                                                  f'for `algorithm_meta`, got {type(algorithm_meta)}'
            algorithm_meta = algorithm_meta.as_dictionary()

        return {
            'signature': signature,
            'documentId': did,
            'consumerAddress': consumer_address,
            'algorithmDID': algorithm_did,
            'algorithmMeta': algorithm_meta,
            'output': output or dict(),
            'jobId': job_id or "",
            'serviceId': service_id,
            'serviceType': service_type,
            'dataToken': token_address,
            'transferTxId': order_tx_id
        }
Exemple #11
0
class Brizo:
    """
    `Brizo` is the name chosen for the asset service provider.

    The main functions available are:
    - initialize_service_agreement
    - consume_service
    - run_compute_service (not implemented yet)

    """
    _http_client = get_requests_session()

    @staticmethod
    def set_http_client(http_client):
        """Set the http client to something other than the default `requests`"""
        Brizo._http_client = http_client

    @staticmethod
    def encrypt_files_dict(files_dict, encrypt_endpoint, asset_id,
                           account_address, signed_did):
        payload = json.dumps({
            'documentId': asset_id,
            'signedDocumentId': signed_did,
            'document': json.dumps(files_dict),
            'publisherAddress': account_address
        })
        response = Brizo._http_client.post(
            encrypt_endpoint,
            data=payload,
            headers={'content-type': 'application/json'})
        if response and hasattr(response, 'status_code'):
            if response.status_code != 201:
                msg = (
                    f'Encrypt file urls failed at the encryptEndpoint '
                    f'{encrypt_endpoint}, reason {response.text}, status {response.status_code}'
                )
                logger.error(msg)
                raise OceanEncryptAssetUrlsError(msg)

            logger.info(
                f'Asset urls encrypted successfully, encrypted urls str: {response.text},'
                f' encryptedEndpoint {encrypt_endpoint}')

            return response.text

    @staticmethod
    def initialize_service_agreement(did, agreement_id, service_index,
                                     signature, account_address,
                                     purchase_endpoint):
        """
        Send a request to the service provider (purchase_endpoint) to initialize the service
        agreement for the asset identified by `did`.

        :param did: id of the asset includes the `did:op:` prefix, str
        :param agreement_id: id of the agreement, hex str
        :param service_index: identifier of the service inside the asset DDO, str
        :param signature: signed agreement hash, hex str
        :param account_address: ethereum address of the consumer signing this agreement, hex str
        :param purchase_endpoint: url of the service provider, str
        :return: bool
        """
        payload = Brizo._prepare_consume_payload(did, agreement_id,
                                                 service_index, signature,
                                                 account_address)
        response = Brizo._http_client.post(
            purchase_endpoint,
            data=payload,
            headers={'content-type': 'application/json'})
        if response and hasattr(response, 'status_code'):
            if response.status_code != 201:
                msg = (
                    f'Initialize service agreement failed at the purchaseEndpoint '
                    f'{purchase_endpoint}, reason {response.text}, status {response.status_code}'
                )
                logger.error(msg)
                raise OceanInitializeServiceAgreementError(msg)

            logger.info(
                f'Service agreement initialized successfully, service agreement id {agreement_id},'
                f' purchaseEndpoint {purchase_endpoint}')
            return True

    @staticmethod
    def consume_service(service_agreement_id,
                        service_endpoint,
                        account,
                        files,
                        destination_folder,
                        index=None):
        """
        Call the brizo endpoint to get access to the different files that form the asset.

        :param service_agreement_id: Service Agreement Id, str
        :param service_endpoint: Url to consume, str
        :param account: Account instance of the consumer signing this agreement, hex-str
        :param files: List containing the files to be consumed, list
        :param index: Index of the document that is going to be downloaded, int
        :param destination_folder: Path, str
        :return: True if was downloaded, bool
        """
        signature = Keeper.get_instance().sign_hash(
            add_ethereum_prefix_and_hash_msg(service_agreement_id), account)

        if index is not None:
            assert isinstance(index,
                              int), logger.error('index has to be an integer.')
            assert index >= 0, logger.error(
                'index has to be 0 or a positive integer.')
            assert index < len(files), logger.error(
                'index can not be bigger than the number of files')
            consume_url = Brizo._create_consume_url(service_endpoint,
                                                    service_agreement_id,
                                                    account, None, signature,
                                                    index)
            logger.info(
                f'invoke consume endpoint with this url: {consume_url}')
            response = Brizo._http_client.get(consume_url, stream=True)
            file_name = Brizo._get_file_name(response)
            Brizo.write_file(response, destination_folder, file_name)
        else:
            for i, _file in enumerate(files):
                consume_url = Brizo._create_consume_url(
                    service_endpoint, service_agreement_id, account, _file,
                    signature, i)
                logger.info(
                    f'invoke consume endpoint with this url: {consume_url}')
                response = Brizo._http_client.get(consume_url, stream=True)
                file_name = Brizo._get_file_name(response)
                Brizo.write_file(response, destination_folder, file_name
                                 or f'file-{i}')

    @staticmethod
    def execute_service(service_agreement_id, service_endpoint, account,
                        workflow_ddo):
        """

        :param service_agreement_id:
        :param service_endpoint:
        :param account:
        :return:
        """
        signature = Keeper.get_instance().sign_hash(
            add_ethereum_prefix_and_hash_msg(service_agreement_id), account)
        execute_url = Brizo._create_execute_url(service_endpoint,
                                                service_agreement_id, account,
                                                workflow_ddo.did, signature)
        logger.info(f'invoke execute endpoint with this url: {execute_url}')
        response = Brizo._http_client.post(execute_url)
        print(
            f'got brizo execute response: {response.content} with status-code {response.status_code} '
        )
        if response.status_code != 201:
            raise Exception(response.content.decode('utf-8'))

        return json.loads(response.content)['workflowId']

    @staticmethod
    def _prepare_consume_payload(did, service_agreement_id, service_index,
                                 signature, consumer_address):
        """Prepare a payload to send to `Brizo`.

        :param did: DID, str
        :param service_agreement_id: Service Agreement Id, str
        :param service_index: identifier of the service inside the asset DDO, str
        service in the DDO (DID document)
        :param signature: the signed agreement message hash which includes
         conditions and their parameters values and other details of the agreement, str
        :param consumer_address: ethereum address of the consumer signing this agreement, hex-str
        :return: dict
        """
        return json.dumps({
            'did': did,
            'serviceAgreementId': service_agreement_id,
            ServiceAgreement.SERVICE_INDEX: service_index,
            'signature': signature,
            'consumerAddress': consumer_address
        })

    @staticmethod
    def get_brizo_url(config):
        """
        Return the Brizo component url.

        :param config: Config
        :return: Url, str
        """
        brizo_url = 'http://localhost:8030'
        if config.has_option('resources', 'brizo.url'):
            brizo_url = config.get('resources', 'brizo.url') or brizo_url

        brizo_path = '/api/v1/brizo'
        return f'{brizo_url}{brizo_path}'

    @staticmethod
    def get_purchase_endpoint(config):
        """
        Return the endpoint to consume the asset.

        :param config:Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/access/initialize'

    @staticmethod
    def get_consume_endpoint(config):
        """
        Return the url to consume the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/consume'

    @staticmethod
    def get_execute_endpoint(config):
        """
        Return the url to execute the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/exec'

    @staticmethod
    def get_encrypt_endpoint(config):
        """
        Return the url to encrypt the asset.

        :param config: Config
        :return: Url, str
        """
        return f'{Brizo.get_brizo_url(config)}/services/publish'

    @staticmethod
    def _get_file_name(response):
        try:
            return re.match(r'attachment;filename=(.+)',
                            response.headers.get('content-disposition'))[1]
        except Exception as e:
            logger.warning(f'It was not possible to get the file name. {e}')

    @staticmethod
    def write_file(response, destination_folder, file_name):
        """
        Write the response content in a file in the destination folder.
        :param response: Response
        :param destination_folder: Destination folder, string
        :param file_name: File name, string
        :return: bool
        """
        if response.status_code == 200:
            with open(os.path.join(destination_folder, file_name), 'wb') as f:
                for chunk in response.iter_content(chunk_size=None):
                    f.write(chunk)
            logger.info(f'Saved downloaded file in {f.name}')
        else:
            logger.warning(f'consume failed: {response.reason}')

    @staticmethod
    def _create_consume_url(service_endpoint,
                            service_agreement_id,
                            account,
                            _file=None,
                            signature=None,
                            index=None):
        if _file is not None and 'url' in _file:
            url = _file['url']
            if url.startswith('"') or url.startswith("'"):
                url = url[1:-1]
            return (f'{service_endpoint}'
                    f'?url={url}'
                    f'&serviceAgreementId={service_agreement_id}'
                    f'&consumerAddress={account.address}')
        else:
            return (f'{service_endpoint}'
                    f'?signature={signature}'
                    f'&serviceAgreementId={service_agreement_id}'
                    f'&consumerAddress={account.address}'
                    f'&index={index}')

    @staticmethod
    def _create_execute_url(service_endpoint,
                            service_agreement_id,
                            account,
                            workflow_did,
                            signature=None):
        return (f'{service_endpoint}'
                f'?signature={signature}'
                f'&serviceAgreementId={service_agreement_id}'
                f'&consumerAddress={account.address}'
                f'&workflowDID={workflow_did}')