def fileinfo(): """Retrieves Content-Type and Content-Length from the given URL or asset. Supports a payload of either url or did. This can be used by the publisher of an asset to check basic information about the URL(s). For now, this information consists of the Content-Type and Content-Length of the request, using primarily OPTIONS, with fallback to GET. In the future, we will add a hash to make sure that the file was not tampered with at consumption time. tags: - services responses: 200: description: the URL(s) could be analysed (returns the result). 400: description: the URL(s) could not be analysed (bad request). return: list of file info (index, valid, contentLength, contentType) """ required_attributes = ['url', 'did'] data = get_request_data(request) msg, status = check_at_least_one_attribute(required_attributes, data, 'checkURL') if msg: return jsonify(error=msg), status did = data.get('did') if did and not did.startswith('did:op:'): return jsonify(error=f'Invalid `did` {did}.'), 400 url = data.get('url') if did: asset = get_asset_from_metadatastore(get_metadata_url(), did) url_list = get_asset_download_urls( asset, provider_wallet, config_file=app.config['CONFIG_FILE']) else: url_list = [ get_download_url(url, app.config['CONFIG_FILE']), ] files_info = [] for i, url in enumerate(url_list): valid, details = check_url_details(url) info = {'index': i, 'valid': valid} info.update(details) files_info.append(info) return Response(json.dumps(files_info), 200, headers={'content-type': 'application/json'})
def fileinfo(): """Retrieves Content-Type and Content-Length from the given URL or asset. Supports a payload of either url or did. This can be used by the publisher of an asset to check basic information about the URL(s). For now, this information consists of the Content-Type and Content-Length of the request, using primarily OPTIONS, with fallback to GET. In the future, we will add a hash to make sure that the file was not tampered with at consumption time. --- tags: - services responses: 200: description: the URL(s) could be analysed (returns the result). 400: description: the URL(s) could not be analysed (bad request). return: list of file info (index, valid, contentLength, contentType) """ data = get_request_data(request) did = data.get("did") url = data.get("url") if did: asset = get_asset_from_metadatastore(get_metadata_url(), did) url_list = get_asset_download_urls( asset, provider_wallet, config_file=app.config["CONFIG_FILE"]) else: url_list = [get_download_url(url, app.config["CONFIG_FILE"])] with_checksum = data.get("checksum", False) files_info = [] for i, url in enumerate(url_list): valid, details = check_url_details(url, with_checksum=with_checksum) info = {"index": i, "valid": valid} info.update(details) files_info.append(info) return Response(json.dumps(files_info), 200, headers={"content-type": "application/json"})
def computeStart(): """Call the execution of a workflow. --- tags: - services consumes: - application/json parameters: - name: signature in: query description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of this asset/compute job. The signature uses ethereum based signing method (see https://github.com/ethereum/EIPs/pull/683) type: string - name: consumerAddress in: query description: The consumer ethereum address. required: true type: string - name: algorithmDid in: query description: The DID of the algorithm Asset to be executed required: false type: string - name: algorithmMeta in: query description: json object that define the algorithm attributes and url or raw code required: false type: json string - name: output in: query description: json object that define the output section required: true type: json string responses: 200: description: Call to the operator-service was successful. 400: description: One of the required attributes is missing. 401: description: Consumer signature is invalid or failed verification 500: description: General server error """ data = get_request_data(request) try: asset, service, did, consumer_address, token_address = process_consume_request( # noqa data, 'compute_start_job', additional_params=["transferTxId", "output"], require_signature=False) service_id = data.get('serviceId') service_type = data.get('serviceType') signature = data.get('signature') tx_id = data.get("transferTxId") # Verify that the number of required tokens has been # transferred to the provider's wallet. _tx, _order_log, _transfer_log = validate_order( consumer_address, token_address, float(service.get_cost()), tx_id, add_0x_prefix(did_to_id(did)) if did.startswith('did:') else did, service_id) validate_transfer_not_used_for_other_service(did, service_id, tx_id, consumer_address, token_address) record_consume_request(did, service_id, tx_id, consumer_address, token_address, service.get_cost()) algorithm_did = data.get('algorithmDid') algorithm_token_address = data.get('algorithmDataToken') algorithm_meta = data.get('algorithmMeta') algorithm_tx_id = data.get('algorithmTransferTxId') output_def = data.get('output', dict()) assert service_type == ServiceTypes.CLOUD_COMPUTE # Validate algorithm choice if not (algorithm_meta or algorithm_did): msg = f'Need an `algorithmMeta` or `algorithmDid` to run, otherwise don\'t bother.' # noqa logger.error(msg, exc_info=1) return jsonify(error=msg), 400 # algorithmDid also requires algorithmDataToken # and algorithmTransferTxId if algorithm_did: if not (algorithm_token_address and algorithm_tx_id): msg = ( f'Using `algorithmDid` requires the `algorithmDataToken` and ' # noqa f'`algorithmTransferTxId` values in the request payload. ' f'algorithmDataToken is the DataToken address for the algorithm asset. ' # noqa f'algorithmTransferTxId is the transaction id (hash) of transferring ' # noqa f'data tokens from consumer wallet to this providers wallet.' ) logger.error(msg, exc_info=1) return jsonify(error=msg), 400 # Consumer signature original_msg = f'{consumer_address}{did}' verify_signature(consumer_address, signature, original_msg, user_nonce.get_nonce(consumer_address)) ######################## # Valid service? if service is None: return jsonify( error=f'This DID has no compute service {did}.'), 400 ######################### # Check privacy privacy_options = service.main.get('privacy', {}) if (algorithm_meta and privacy_options.get('allowRawAlgorithm', True) is False): return jsonify( error=f'cannot run raw algorithm on this did {did}.'), 400 trusted_algorithms = privacy_options.get('trustedAlgorithms', []) if (algorithm_did and trusted_algorithms and algorithm_did not in trusted_algorithms): return jsonify( error=f'cannot run raw algorithm on this did {did}.'), 400 ######################### # Validate ALGORITHM meta if algorithm_meta: algorithm_meta = json.loads(algorithm_meta) if isinstance( algorithm_meta, str) else algorithm_meta algorithm_dict = build_stage_algorithm_dict( consumer_address, algorithm_did, algorithm_token_address, algorithm_tx_id, algorithm_meta, provider_wallet) error_msg, status_code = validate_algorithm_dict( algorithm_dict, algorithm_did) if error_msg: return jsonify(error=error_msg), status_code ######################### # INPUT asset_urls = get_asset_download_urls( asset, provider_wallet, config_file=app.config['CONFIG_FILE']) if not asset_urls: return jsonify(error=f'cannot get url(s) in input did {did}.'), 400 input_dict = dict({'index': 0, 'id': did, 'url': asset_urls}) ######################### # OUTPUT if output_def: output_def = json.loads(output_def) if isinstance( output_def, str) else output_def output_dict = build_stage_output_dict(output_def, asset, consumer_address, provider_wallet) ######################### # STAGE stage = build_stage_dict(input_dict, algorithm_dict, output_dict) ######################### # WORKFLOW workflow = dict({'stages': list([stage])}) # workflow is ready, push it to operator logger.info('Sending: %s', workflow) msg_to_sign = f'{provider_wallet.address}{did}' msg_hash = add_ethereum_prefix_and_hash_msg(msg_to_sign) payload = { 'workflow': workflow, 'providerSignature': Web3Helper.sign_hash(msg_hash, provider_wallet), 'documentId': did, 'agreementId': tx_id, 'owner': consumer_address, 'providerAddress': provider_wallet.address } response = requests_session.post( get_compute_endpoint(), data=json.dumps(payload), headers={'content-type': 'application/json'}) user_nonce.increment_nonce(consumer_address) return Response(response.content, response.status_code, headers={'content-type': 'application/json'}) except InvalidSignatureError as e: msg = f'Consumer signature failed verification: {e}' logger.error(msg, exc_info=1) return jsonify(error=msg), 401 except (ValueError, KeyError, Exception) as e: logger.error(f'Error- {str(e)}', exc_info=1) return jsonify(error=f'Error : {str(e)}'), 500
def validate(self): required_keys = ["documentId", "transferTxId"] for req_item in required_keys: if not self.data.get(req_item): self.error = f"No {req_item} in input item." return False if not self.data.get("serviceId") and self.data.get("serviceId") != 0: self.error = "No serviceId in input item." return False self.did = self.data.get("documentId") try: self.asset = get_asset_from_metadatastore(get_metadata_url(), self.did) except ValueError: self.error = f"Asset for did {self.did} not found." return False self.service = get_service_at_index(self.asset, self.data["serviceId"]) if not self.service: self.error = f"Service index {self.data['serviceId']} not found." return False if self.service.type not in [ ServiceTypes.ASSET_ACCESS, ServiceTypes.CLOUD_COMPUTE, ]: self.error = "Services in input can only be access or compute." return False if self.service.type != ServiceTypes.CLOUD_COMPUTE and self.index == 0: self.error = "Service for main asset must be compute." return False asset_urls = get_asset_download_urls( self.asset, self.provider_wallet, config_file=app.config["CONFIG_FILE"]) if self.service.type == ServiceTypes.CLOUD_COMPUTE and not asset_urls: self.error = "Services in input with compute type must be in the same provider you are calling." return False if self.service.type == ServiceTypes.CLOUD_COMPUTE: if not self.validate_algo(): return False if asset_urls: self.validated_inputs = dict({ "index": self.index, "id": self.did, "url": asset_urls }) else: self.validated_inputs = dict({ "index": self.index, "id": self.did, "remote": { "txid": self.data.get("transferTxId"), "serviceIndex": self.service.index, }, }) return self.validate_usage()
def _build_and_validate_algo(self, algo_data): """Returns False if invalid, otherwise sets the validated_algo_dict attribute.""" algorithm_did = algo_data.get("algorithmDid") self.algo_service = None if algorithm_did and not algo_data.get("algorithmMeta"): algorithm_token_address = algo_data.get("algorithmDataToken") algorithm_tx_id = algo_data.get("algorithmTransferTxId") algo = get_asset_from_metadatastore(get_metadata_url(), algorithm_did) try: asset_type = algo.metadata["main"]["type"] except ValueError: asset_type = None if asset_type != "algorithm": self.error = f"DID {algorithm_did} is not a valid algorithm" return False try: dt = DataToken(self.consumer_address) tx_receipt = dt.get_tx_receipt(algorithm_tx_id) event_logs = dt.events.OrderStarted().processReceipt( tx_receipt) order_log = event_logs[0] if event_logs else None algo_service_id = order_log.args.serviceId self.algo_service = get_service_at_index(algo, algo_service_id) if self.algo_service.type == ServiceTypes.CLOUD_COMPUTE: asset_urls = get_asset_download_urls( algo, self.provider_wallet, config_file=app.config["CONFIG_FILE"], ) if not asset_urls: self.error = "Services in algorithm with compute type must be in the same provider you are calling." return False if not self.algo_service: self.error = "Failed to retrieve purchased algorithm service id." return False _tx, _order_log, _transfer_log = validate_order( self.consumer_address, algorithm_token_address, float(self.algo_service.get_cost()), algorithm_tx_id, add_0x_prefix(did_to_id(algorithm_did)) if algorithm_did.startswith("did:") else algorithm_did, self.algo_service.index, ) validate_transfer_not_used_for_other_service( algorithm_did, self.algo_service.index, algorithm_tx_id, self.consumer_address, algorithm_token_address, ) record_consume_request( algorithm_did, self.algo_service.index, algorithm_tx_id, self.consumer_address, algorithm_token_address, self.algo_service.get_cost(), ) except Exception: self.error = "Algorithm is already in use or can not be found on chain." return False algorithm_dict = StageAlgoSerializer(self.consumer_address, self.provider_wallet, algo_data, self.algo_service).serialize() valid, error_msg = validate_formatted_algorithm_dict( algorithm_dict, algorithm_did) if not valid: self.error = error_msg return False self.validated_algo_dict = algorithm_dict return True