def _check_job_id(client, job_id, did, token_address, wait_time=20): endpoint = BaseURLs.ASSETS_URL + '/compute' cons_wallet = get_consumer_wallet() nonce = get_nonce(client, cons_wallet.address) msg = f'{cons_wallet.address}{job_id}{did}{nonce}' _id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_id_hash, cons_wallet) payload = dict({ 'signature': signature, 'documentId': did, 'consumerAddress': cons_wallet.address, 'jobId': job_id, }) job_info = get_compute_job_info(client, endpoint, payload) assert job_info, f'Failed to get job info for jobId {job_id}' print(f'got info for compute job {job_id}: {job_info}') assert job_info['statusText'] in get_possible_compute_job_status_text() did = None # get did of results for _ in range(wait_time * 4): job_info = get_compute_job_info(client, endpoint, payload) did = job_info['did'] if did: break time.sleep(0.25) assert did, f'Compute job has no results, job info {job_info}.' # check results ddo ddo = get_asset_from_metadatastore(get_metadata_url(), did) assert ddo, f'Failed to resolve ddo for did {did}'
def _check_job_id(client, job_id, did, token_address, wait_time=20): endpoint = BaseURLs.ASSETS_URL + "/compute" cons_wallet = get_consumer_wallet() nonce = get_nonce(client, cons_wallet.address) msg = f"{cons_wallet.address}{job_id}{did}{nonce}" _id_hash = add_ethereum_prefix_and_hash_msg(msg) signature = Web3Helper.sign_hash(_id_hash, cons_wallet) payload = dict({ "signature": signature, "documentId": did, "consumerAddress": cons_wallet.address, "jobId": job_id, }) job_info = get_compute_job_info(client, endpoint, payload) assert job_info, f"Failed to get job info for jobId {job_id}" print(f"got info for compute job {job_id}: {job_info}") assert job_info["statusText"] in get_possible_compute_job_status_text() did = None # get did of results for _ in range(wait_time * 4): job_info = get_compute_job_info(client, endpoint, payload) did = job_info["did"] if did: break time.sleep(0.25) assert did, f"Compute job has no results, job info {job_info}." # check results ddo ddo = get_asset_from_metadatastore(get_metadata_url(), did) assert ddo, f"Failed to resolve ddo for did {did}"
def fileinfo(): """Retrieves Content-Type and Content-Length from the given URL or asset. Supports a payload of either url or did. This can be used by the publisher of an asset to check basic information about the URL(s). For now, this information consists of the Content-Type and Content-Length of the request, using primarily OPTIONS, with fallback to GET. In the future, we will add a hash to make sure that the file was not tampered with at consumption time. tags: - services responses: 200: description: the URL(s) could be analysed (returns the result). 400: description: the URL(s) could not be analysed (bad request). return: list of file info (index, valid, contentLength, contentType) """ required_attributes = ['url', 'did'] data = get_request_data(request) msg, status = check_at_least_one_attribute(required_attributes, data, 'checkURL') if msg: return jsonify(error=msg), status did = data.get('did') if did and not did.startswith('did:op:'): return jsonify(error=f'Invalid `did` {did}.'), 400 url = data.get('url') if did: asset = get_asset_from_metadatastore(get_metadata_url(), did) url_list = get_asset_download_urls( asset, provider_wallet, config_file=app.config['CONFIG_FILE']) else: url_list = [ get_download_url(url, app.config['CONFIG_FILE']), ] files_info = [] for i, url in enumerate(url_list): valid, details = check_url_details(url) info = {'index': i, 'valid': valid} info.update(details) files_info.append(info) return Response(json.dumps(files_info), 200, headers={'content-type': 'application/json'})
def _validate_trusted_algos(self, algorithm_did, trusted_algorithms): if not trusted_algorithms: self.error = ( "Using algorithmDid but allowAllPublishedAlgorithms is False and no " "trusted algorithms are set in publisherTrustedAlgorithms.") return False try: did_to_trusted_algo_dict = { algo["did"]: algo for algo in trusted_algorithms } if algorithm_did not in did_to_trusted_algo_dict: self.error = f"this algorithm did {algorithm_did} is not trusted." return False except KeyError: self.error = ( "Some algos in the publisherTrustedAlgorithms don't have a did." ) return False trusted_algo_dict = did_to_trusted_algo_dict[algorithm_did] allowed_files_checksum = trusted_algo_dict.get("filesChecksum") allowed_container_checksum = trusted_algo_dict.get( "containerSectionChecksum") algo_ddo = get_asset_from_metadatastore(get_metadata_url(), trusted_algo_dict["did"]) service = algo_ddo.get_service(ServiceTypes.METADATA) files_checksum = create_checksum( service.attributes["encryptedFiles"] + json.dumps(service.main["files"], separators=(",", ":"))) if allowed_files_checksum and files_checksum != allowed_files_checksum: self.error = ( f"filesChecksum for algorithm with did {algo_ddo.did} does not match" ) return False container_section_checksum = create_checksum( json.dumps(service.main["algorithm"]["container"], separators=(",", ":"))) if (allowed_container_checksum and container_section_checksum != allowed_container_checksum): self.error = f"containerSectionChecksum for algorithm with did {algo_ddo.did} does not match" return False return True
def build_stage_algorithm_dict(consumer_address, algorithm_did, algorithm_token_address, algorithm_tx_id, algorithm_meta, provider_wallet, receiver_address=None): if algorithm_did is not None: assert algorithm_token_address and algorithm_tx_id, \ 'algorithm_did requires both algorithm_token_address and algorithm_tx_id.' algo_asset = get_asset_from_metadatastore(get_metadata_url(), algorithm_did) service = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, algo_asset) _tx, _order_log, _transfer_log = validate_order( consumer_address, algorithm_token_address, float(service.get_cost()), algorithm_tx_id, add_0x_prefix(did_to_id(algorithm_did)) if algorithm_did.startswith('did:') else algorithm_did, service.index) validate_transfer_not_used_for_other_service(algorithm_did, service.index, algorithm_tx_id, consumer_address, algorithm_token_address) record_consume_request(algorithm_did, service.index, algorithm_tx_id, consumer_address, algorithm_token_address, service.get_cost()) algo_id = algorithm_did raw_code = '' algo_url = get_asset_url_at_index(0, algo_asset, provider_wallet) container = algo_asset.metadata['main']['algorithm']['container'] else: algo_id = '' algo_url = algorithm_meta.get('url') raw_code = algorithm_meta.get('rawcode') container = algorithm_meta.get('container') return dict({ 'id': algo_id, 'url': algo_url, 'rawcode': raw_code, 'container': container })
def process_consume_request(data: dict): did = data.get("documentId") token_address = data.get("dataToken") consumer_address = data.get("consumerAddress") service_id = data.get("serviceId") service_type = data.get("serviceType") # grab asset for did from the metadatastore associated with # the Data Token address asset = get_asset_from_metadatastore(get_metadata_url(), did) service = ServiceAgreement.from_ddo(service_type, asset) if service.type != service_type: raise AssertionError( f"Requested service with id {service_id} has type {service.type} " f"which does not match the requested service type {service_type}." ) return asset, service, did, consumer_address, token_address
def fileinfo(): """Retrieves Content-Type and Content-Length from the given URL or asset. Supports a payload of either url or did. This can be used by the publisher of an asset to check basic information about the URL(s). For now, this information consists of the Content-Type and Content-Length of the request, using primarily OPTIONS, with fallback to GET. In the future, we will add a hash to make sure that the file was not tampered with at consumption time. --- tags: - services responses: 200: description: the URL(s) could be analysed (returns the result). 400: description: the URL(s) could not be analysed (bad request). return: list of file info (index, valid, contentLength, contentType) """ data = get_request_data(request) did = data.get("did") url = data.get("url") if did: asset = get_asset_from_metadatastore(get_metadata_url(), did) url_list = get_asset_download_urls( asset, provider_wallet, config_file=app.config["CONFIG_FILE"]) else: url_list = [get_download_url(url, app.config["CONFIG_FILE"])] with_checksum = data.get("checksum", False) files_info = [] for i, url in enumerate(url_list): valid, details = check_url_details(url, with_checksum=with_checksum) info = {"index": i, "valid": valid} info.update(details) files_info.append(info) return Response(json.dumps(files_info), 200, headers={"content-type": "application/json"})
def process_consume_request(data: dict, method: str, user_nonce: UserNonce = None, additional_params: list = None, require_signature: bool = True): required_attributes = [ 'documentId', 'serviceId', 'serviceType', 'dataToken', 'consumerAddress' ] if additional_params: required_attributes += additional_params if require_signature: required_attributes.append('signature') msg, status = check_required_attributes(required_attributes, data, method) if msg: raise AssertionError(msg) did = data.get('documentId') token_address = data.get('dataToken') consumer_address = data.get('consumerAddress') service_id = data.get('serviceId') service_type = data.get('serviceType') # grab asset for did from the metadatastore associated with the Data Token address asset = get_asset_from_metadatastore(get_metadata_url(), did) service = ServiceAgreement.from_ddo(service_type, asset) if service.type != service_type: raise AssertionError( f'Requested service with id {service_id} has type {service.type} which ' f'does not match the requested service type {service_type}.') if require_signature: assert user_nonce, '`user_nonce` is required when signature is required.' # Raises ValueError when signature is invalid signature = data.get('signature') verify_signature(consumer_address, signature, did, user_nonce.get_nonce(consumer_address)) return asset, service, did, consumer_address, token_address
def serialize(self): algorithm_meta = self.algo_data.get("algorithmMeta") algorithm_did = self.algo_data.get("algorithmDid") algorithm_tx_id = self.algo_data.get("algorithmTransferTxId") dict_template = {"id": None, "rawcode": None, "container": None} if algorithm_meta and isinstance(algorithm_meta, str): algorithm_meta = json.loads(algorithm_meta) if algorithm_did is None: return dict({ "id": "", "url": algorithm_meta.get("url"), "rawcode": algorithm_meta.get("rawcode"), "container": algorithm_meta.get("container"), }) algo_asset = get_asset_from_metadatastore(get_metadata_url(), algorithm_did) dict_template["id"] = algorithm_did dict_template["rawcode"] = "" asset_urls = get_asset_url_at_index(0, algo_asset, self.provider_wallet) if asset_urls: dict_template["url"] = asset_urls else: dict_template["remote"] = { "serviceEndpoint": self.algo_service.service_endpoint, "txId": algorithm_tx_id, "serviceIndex": self.algo_service.index, } dict_template["container"] = algo_asset.metadata["main"]["algorithm"][ "container"] return dict(dict_template)
def validate(self): required_keys = ["documentId", "transferTxId"] for req_item in required_keys: if not self.data.get(req_item): self.error = f"No {req_item} in input item." return False if not self.data.get("serviceId") and self.data.get("serviceId") != 0: self.error = "No serviceId in input item." return False self.did = self.data.get("documentId") try: self.asset = get_asset_from_metadatastore(get_metadata_url(), self.did) except ValueError: self.error = f"Asset for did {self.did} not found." return False self.service = get_service_at_index(self.asset, self.data["serviceId"]) if not self.service: self.error = f"Service index {self.data['serviceId']} not found." return False if self.service.type not in [ ServiceTypes.ASSET_ACCESS, ServiceTypes.CLOUD_COMPUTE, ]: self.error = "Services in input can only be access or compute." return False if self.service.type != ServiceTypes.CLOUD_COMPUTE and self.index == 0: self.error = "Service for main asset must be compute." return False asset_urls = get_asset_download_urls( self.asset, self.provider_wallet, config_file=app.config["CONFIG_FILE"]) if self.service.type == ServiceTypes.CLOUD_COMPUTE and not asset_urls: self.error = "Services in input with compute type must be in the same provider you are calling." return False if self.service.type == ServiceTypes.CLOUD_COMPUTE: if not self.validate_algo(): return False if asset_urls: self.validated_inputs = dict({ "index": self.index, "id": self.did, "url": asset_urls }) else: self.validated_inputs = dict({ "index": self.index, "id": self.did, "remote": { "txid": self.data.get("transferTxId"), "serviceIndex": self.service.index, }, }) return self.validate_usage()
def _build_and_validate_algo(self, algo_data): """Returns False if invalid, otherwise sets the validated_algo_dict attribute.""" algorithm_did = algo_data.get("algorithmDid") self.algo_service = None if algorithm_did and not algo_data.get("algorithmMeta"): algorithm_token_address = algo_data.get("algorithmDataToken") algorithm_tx_id = algo_data.get("algorithmTransferTxId") algo = get_asset_from_metadatastore(get_metadata_url(), algorithm_did) try: asset_type = algo.metadata["main"]["type"] except ValueError: asset_type = None if asset_type != "algorithm": self.error = f"DID {algorithm_did} is not a valid algorithm" return False try: dt = DataToken(self.consumer_address) tx_receipt = dt.get_tx_receipt(algorithm_tx_id) event_logs = dt.events.OrderStarted().processReceipt( tx_receipt) order_log = event_logs[0] if event_logs else None algo_service_id = order_log.args.serviceId self.algo_service = get_service_at_index(algo, algo_service_id) if self.algo_service.type == ServiceTypes.CLOUD_COMPUTE: asset_urls = get_asset_download_urls( algo, self.provider_wallet, config_file=app.config["CONFIG_FILE"], ) if not asset_urls: self.error = "Services in algorithm with compute type must be in the same provider you are calling." return False if not self.algo_service: self.error = "Failed to retrieve purchased algorithm service id." return False _tx, _order_log, _transfer_log = validate_order( self.consumer_address, algorithm_token_address, float(self.algo_service.get_cost()), algorithm_tx_id, add_0x_prefix(did_to_id(algorithm_did)) if algorithm_did.startswith("did:") else algorithm_did, self.algo_service.index, ) validate_transfer_not_used_for_other_service( algorithm_did, self.algo_service.index, algorithm_tx_id, self.consumer_address, algorithm_token_address, ) record_consume_request( algorithm_did, self.algo_service.index, algorithm_tx_id, self.consumer_address, algorithm_token_address, self.algo_service.get_cost(), ) except Exception: self.error = "Algorithm is already in use or can not be found on chain." return False algorithm_dict = StageAlgoSerializer(self.consumer_address, self.provider_wallet, algo_data, self.algo_service).serialize() valid, error_msg = validate_formatted_algorithm_dict( algorithm_dict, algorithm_did) if not valid: self.error = error_msg return False self.validated_algo_dict = algorithm_dict return True