def _sign(self, agreement_id, did, consumer_account, service_index): """ Sign a service agreement. :param agreement_id: 32 bytes identifier created by the consumer and will be used on-chain for the executed agreement. :param did: str representation fo the asset DID. Use this to retrieve the asset DDO. :param consumer_account: Account instance of the consumer :param service_index: int identifies the specific service in the ddo to use in this agreement. :return: signature """ asset = self._asset_resolver.resolve(did) service_agreement = asset.get_service_by_index(service_index) publisher_address = self._keeper.did_registry.get_did_owner(asset.asset_id) agreement_hash = service_agreement.get_service_agreement_hash( agreement_id, asset.asset_id, consumer_account.address, publisher_address, self._keeper ) signature = self._keeper.sign_hash(add_ethereum_prefix_and_hash_msg(agreement_hash), consumer_account) address = self._keeper.personal_ec_recover(agreement_hash, signature) assert address == consumer_account.address logger.debug(f'agreement-signature={signature}, agreement-hash={agreement_hash}') return signature
def generate_token(account): raw_msg = get_config( ).auth_token_message or "Nevermined Protocol Authentication" _time = int(datetime.now().timestamp()) _message = f'{raw_msg}\n{_time}' prefixed_msg_hash = add_ethereum_prefix_and_hash_msg(_message) return f'{keeper_instance().sign_hash(prefixed_msg_hash, account)}-{_time}'
def test_consume(client, provider_account, consumer_account): endpoint = BaseURLs.ASSETS_URL + '/consume' for method in constants.ConfigSections.DECRYPTION_METHODS: print('Testing Consume with Authorization Method: ' + method) ddo = get_registered_ddo(provider_account, providers=[provider_account.address], auth_service=method) # initialize an agreement agreement_id = place_order(provider_account, ddo, consumer_account, ServiceTypes.ASSET_ACCESS) payload = dict({ 'serviceAgreementId': agreement_id, 'consumerAddress': consumer_account.address }) print('Provider: ' + provider_account.address) print('Consumer: ' + consumer_account.address) keeper = keeper_instance() agr_id_hash = add_ethereum_prefix_and_hash_msg(agreement_id) signature = keeper.sign_hash(agr_id_hash, consumer_account) index = 0 event = keeper.access_template.subscribe_agreement_created( agreement_id, 15, None, (), wait=True, from_block=0) assert event, "Agreement event is not found, check the keeper node's logs" consumer_balance = keeper.token.get_token_balance( consumer_account.address) if consumer_balance < 50: keeper.dispenser.request_tokens(50 - consumer_balance, consumer_account) sa = ServiceAgreement.from_ddo(ServiceTypes.ASSET_ACCESS, ddo) lock_payment(agreement_id, ddo.asset_id, sa, amounts, receivers, consumer_account) event = keeper.lock_payment_condition.subscribe_condition_fulfilled( agreement_id, 15, None, (), wait=True, from_block=0) assert event, "Lock reward condition fulfilled event is not found, check the keeper " \ "node's logs" grant_access(agreement_id, ddo, consumer_account, provider_account) event = keeper.access_condition.subscribe_condition_fulfilled( agreement_id, 15, None, (), wait=True, from_block=0) assert event or keeper.access_condition.check_permissions( ddo.asset_id, consumer_account.address ), f'Failed to get access permission: agreement_id={agreement_id}, ' \ f'did={ddo.did}, consumer={consumer_account.address}' # Consume using url index and signature (let the gateway do the decryption) payload['signature'] = signature payload['index'] = index request_url = endpoint + '?' + '&'.join( [f'{k}={v}' for k, v in payload.items()]) response = client.get(request_url) assert response.status == '200 OK'
def get(self, account): """ :param account: Account instance signing the token :return: hex str the token generated/signed by account """ _message, _time = self._get_message_and_time() msg_hash = Web3Provider.get_web3().keccak(text=_message) try: prefixed_msg_hash = self._keeper.sign_hash( add_ethereum_prefix_and_hash_msg(msg_hash), account) return f'{prefixed_msg_hash}-{_time}' except Exception as e: logging.error(f'Error signing token: {str(e)}')
def test_publish(client): keeper = keeper_instance() account = get_provider_account() endpoint = BaseURLs.ASSETS_URL + '/publish' did_seed = generate_new_id() asset_id = keeper.did_registry.hash_did(did_seed, account.address) # did = DID.did({"0": str(uuid.uuid4())}) # asset_id = did_to_id(did) test_urls = ['url 00', 'url 11', 'url 22'] urls_json = json.dumps(test_urls) asset_id_hash = add_ethereum_prefix_and_hash_msg(asset_id) signature = keeper.sign_hash(asset_id_hash, account) address = web3().eth.account.recoverHash(asset_id_hash, signature=signature) assert address.lower() == account.address.lower() address = keeper.personal_ec_recover(asset_id, signature) assert address.lower() == account.address.lower() payload = { 'documentId': asset_id, 'signature': signature, 'document': urls_json, 'publisherAddress': account.address } post_response = client.post(endpoint, data=json.dumps(payload), content_type='application/json') encrypted_url = post_response.data.decode('utf-8') assert encrypted_url.startswith('0x') # publish using auth token signature = generate_token(account) payload['signature'] = signature # did = DID.did({"0": str(uuid.uuid4())}) # asset_id = did_to_id(did) did_seed = generate_new_id() asset_id = keeper.did_registry.hash_did(did_seed, account.address) payload['documentId'] = add_0x_prefix(asset_id) post_response = client.post(endpoint, data=json.dumps(payload), content_type='application/json') encrypted_url = post_response.data.decode('utf-8') assert encrypted_url.startswith('0x')
def execute_service(service_agreement_id, service_endpoint, account, workflow_ddo): """ :param service_agreement_id: :param service_endpoint: :param account: :return: """ signature = Keeper.get_instance().sign_hash( add_ethereum_prefix_and_hash_msg(service_agreement_id), account) execute_url = Gateway._create_execute_url(service_endpoint, service_agreement_id, account, workflow_ddo.did, signature) logger.info(f'invoke execute endpoint with this url: {execute_url}') response = Gateway._http_client.post(execute_url) return response
def test_sign_and_recover(web3_instance): w3 = web3_instance account = get_publisher_account() msg = 'testing-signature-and-recovery-of-signer-address' msg_hash = w3.keccak(text=msg) signature = Keeper.sign_hash(msg_hash, account) address = w3.toChecksumAddress(Keeper.ec_recover(msg_hash, signature)) assert address == account.address # Signature created on msg with the ethereum prefix. `web3.eth.account.recoverHash` does NOT # add any prefixes to the message, so we have to add the prefix before the call. address = w3.eth.account.recoverHash(msg_hash, signature=signature) assert address == account.address # Now do the opposite, sign with eth.account.sign_hash() (using prefixed msg hash), # then recover address with Keeper.ec_recover() on the msg hash with no prefix. with open(get_resource_path('data', 'publisher_key_file.json')) as kf: key = kf.read() prvkey = w3.eth.account.decrypt(key, account.password) account_sig_prefixed = add_0x_prefix( w3.eth.account.signHash(msg_hash, prvkey)['signature'].hex()) assert Keeper.ec_recover( msg_hash, account_sig_prefixed).lower() == account.address.lower() # Test specific case where message is signed by some Wallet web3 such as Metamask or # burner wallet. Such signature uses the `web3.personal` `sign` method which adds # `Ethereum Signed Message` prefix in a generic way, see `add_ethereum_prefix_and_hash_msg` for details. sig = '0xa9e78d2c088c0b17a8c35b69e0dfa774692ccabed570e40502795bd41f561cf7677ed02bf4ee7967a55979d585bbf203b4a490e1d747e5a4d60a50859d816ac51b' publisher_address = '0x903322C7E45A60d7c8C3EA236c5beA9Af86310c7' doc_id = '028faa498d154388a89dc0dea908a4e27700920217a44abe8f1cdd64953125b8' prefixed_hash = add_ethereum_prefix_and_hash_msg(doc_id) recovered_address = w3.eth.account.recoverHash(prefixed_hash, signature=sig) assert recovered_address == publisher_address recovered_address = Keeper.ec_recover(prefixed_hash, sig) assert recovered_address == publisher_address recovered_address = Keeper.personal_ec_recover(doc_id, sig) assert recovered_address == publisher_address
def create(self, metadata, publisher_account, service_descriptors=None, providers=None, authorization_type=ServiceAuthorizationTypes.PSK_RSA, use_secret_store=False, activity_id=None, attributes=None, asset_rewards={ "_amounts": [], "_receivers": [] }, cap=None, royalties=None): """ Register an asset in both the keeper's DIDRegistry (on-chain) and in the Metadata store. :param metadata: dict conforming to the Metadata accepted by Nevermined Protocol. :param publisher_account: Account of the publisher registering this asset :param service_descriptors: list of ServiceDescriptor tuples of length 2. The first item must be one of ServiceTypes and the second item is a dict of parameters and values required by the service :param providers: list of addresses of providers of this asset (a provider is an ethereum account that is authorized to provide asset services) :param authorization_type: str indicate the authorization type that is going to be used to encrypt the urls. SecretStore, PSK-RSA and PSK-ECDSA are supported. :param use_secret_store: bool indicate whether to use the secret store directly for encrypting urls (Uses Gateway provider service if set to False) :param activity_id: identifier of the activity creating the new entity :param attributes: attributes associated with the action :param asset_rewards: rewards distribution including the amounts and the receivers :param cap: max cap of nfts that can be minted for the asset :param royalties: royalties in the secondary market going to the original creator :return: DDO instance """ assert isinstance( metadata, dict), f'Expected metadata of type dict, got {type(metadata)}' # copy metadata so we don't change the original metadata_copy = copy.deepcopy(metadata) # Create a DDO object ddo = DDO() gateway = GatewayProvider.get_gateway() ddo_service_endpoint = self._get_metadata_provider( ).get_service_endpoint() metadata_service_desc = ServiceDescriptor.metadata_service_descriptor( metadata_copy, ddo_service_endpoint) if metadata_copy['main']['type'] == 'dataset' or metadata_copy['main'][ 'type'] == 'algorithm': access_service_attributes = self._build_access( metadata_copy, publisher_account, asset_rewards) if not service_descriptors: if authorization_type == ServiceAuthorizationTypes.PSK_RSA: service_descriptors = [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization( authorization_type, public_key=gateway.get_rsa_public_key( self._config)), gateway.get_access_endpoint(self._config)) ] elif authorization_type == ServiceAuthorizationTypes.PSK_ECDSA: service_descriptors = [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization( authorization_type, public_key=gateway.get_ecdsa_public_key( self._config)), gateway.get_access_endpoint(self._config)) ] else: service_descriptors = [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization(authorization_type, threshold=0), self._config.secret_store_url) ] service_descriptors += [ ServiceDescriptor.access_service_descriptor( access_service_attributes, gateway.get_access_endpoint(self._config)) ] else: service_types = set(map(lambda x: x[0], service_descriptors)) if ServiceTypes.AUTHORIZATION not in service_types: if authorization_type == ServiceAuthorizationTypes.PSK_RSA: service_descriptors += [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization( authorization_type, public_key=gateway.get_rsa_public_key( self._config)), gateway.get_access_endpoint(self._config)) ] elif authorization_type == ServiceAuthorizationTypes.PSK_ECDSA: service_descriptors += [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization( authorization_type, public_key=gateway.get_ecdsa_public_key( self._config)), gateway.get_access_endpoint(self._config)) ] else: service_descriptors += [ ServiceDescriptor.authorization_service_descriptor( self._build_authorization(authorization_type, threshold=0), self._config.secret_store_url) ] else: service_descriptors += [ ServiceDescriptor.access_service_descriptor( access_service_attributes, gateway.get_access_endpoint(self._config)) ] else: if not service_descriptors: service_descriptors = [] else: service_descriptors += [] logger.info('registering a workflow.') # Add all services to ddo service_descriptors = [metadata_service_desc] + service_descriptors services = ServiceFactory.build_services(service_descriptors) checksums = dict() for service in services: checksums[str(service.index)] = checksum(service.main) # Adding proof to the ddo. ddo.add_proof(checksums, publisher_account) # Generating the did and adding to the ddo. did = ddo.assign_did(DID.did(ddo.proof['checksum'])) logger.debug(f'Generating new did: {did}') # Check if it's already registered first! if did in self._get_metadata_provider().list_assets(): raise DIDAlreadyExist( f'Asset id {did} is already registered to another asset.') for service in services: if service.type == ServiceTypes.ASSET_ACCESS: access_service = ServiceFactory.complete_access_service( did, gateway.get_access_endpoint(self._config), access_service_attributes, self._keeper.access_template.address, self._keeper.escrow_payment_condition.address) ddo.add_service(access_service) elif service.type == ServiceTypes.METADATA: ddo_service_endpoint = service.service_endpoint.replace( '{did}', did) service.set_service_endpoint(ddo_service_endpoint) ddo.add_service(service) elif service.type == ServiceTypes.CLOUD_COMPUTE: compute_service = ServiceFactory.complete_compute_service( did, service.service_endpoint, service.attributes, self._keeper.compute_execution_condition.address, self._keeper.escrow_payment_condition.address) ddo.add_service(compute_service) else: ddo.add_service(service) ddo.proof['signatureValue'] = self._keeper.sign_hash( add_ethereum_prefix_and_hash_msg(did_to_id_bytes(did)), publisher_account) # Add public key and authentication ddo.add_public_key(did, publisher_account.address) ddo.add_authentication(did, PUBLIC_KEY_TYPE_RSA) # Setup metadata service # First compute files_encrypted if metadata_copy['main']['type'] in ['dataset', 'algorithm']: assert metadata_copy['main'][ 'files'], 'files is required in the metadata main attributes.' logger.debug('Encrypting content urls in the metadata.') if not use_secret_store: encrypt_endpoint = gateway.get_encrypt_endpoint(self._config) files_encrypted = gateway.encrypt_files_dict( metadata_copy['main']['files'], encrypt_endpoint, ddo.asset_id, authorization_type) else: files_encrypted = self._get_secret_store(publisher_account) \ .encrypt_document( did_to_id(did), json.dumps(metadata_copy['main']['files']), ) # only assign if the encryption worked if files_encrypted: logger.debug( f'Content urls encrypted successfully {files_encrypted}') index = 0 for file in metadata_copy['main']['files']: file['index'] = index index = index + 1 del file['url'] metadata_copy['encryptedFiles'] = files_encrypted else: raise AssertionError('Encrypting the files failed.') # DDO url and `Metadata` service logger.debug(f'Generated ddo and services, DID is {ddo.did},' f' metadata service @{ddo_service_endpoint}.') response = None # register on-chain registered_on_chain = self._keeper.did_registry.register( ddo.asset_id, checksum=Web3Provider.get_web3().toBytes(hexstr=ddo.asset_id), url=ddo_service_endpoint, account=publisher_account, cap=cap, royalties=royalties, providers=providers, activity_id=activity_id, attributes=attributes) if registered_on_chain is False: logger.warning(f'Registering {did} on-chain failed.') return None logger.info(f'Successfully registered DDO (DID={did}) on chain.') try: # publish the new ddo response = self._get_metadata_provider().publish_asset_ddo(ddo) logger.info('Asset/ddo published successfully in Metadata.') except ValueError as ve: raise ValueError( f'Invalid value to publish in the metadata: {str(ve)}') except Exception as e: logger.error(f'Publish asset in Metadata failed: {str(e)}') if not response: return None return ddo
def run(args): logging.debug(f"script called with args: {args}") # setup config options = { "keeper-contracts": { "keeper.url": args.node, "secret_store.url": args.secretstore_url, }, "resources": { "downloads.path": args.path.as_posix(), "metadata.url": args.metadata_url, "gateway.url": args.gateway_url, }, } config = Config(options_dict=options) logging.debug(f"nevermined config: {config}") # setup paths outputs_path = args.path / "outputs" # setup nevermined nevermined = Nevermined(config) # setup consumer # here we need to create a temporary key file from the credentials key_file = NamedTemporaryFile("w", delete=False) json.dump(args.credentials, key_file) key_file.flush() key_file.close() account = Account( Web3.toChecksumAddress(args.credentials["address"]), password=args.password, key_file=key_file.name, ) # resolve workflow workflow = nevermined.assets.resolve(args.workflow) logging.info(f"resolved workflow {args.workflow}") logging.debug(f"workflow ddo {workflow.as_dictionary()}") workflow_owner = nevermined.assets.owner(workflow.did) provenance_id = uuid.uuid4() # get files to upload files = [] index = 0 for f in outputs_path.rglob("*"): if f.is_file(): files.append({ "index": index, "name": f.name, "path": f.as_posix(), "contentType": mimetypes.guess_type(f)[0], "contentLength": f.stat().st_size, }) index += 1 # create bucket minio_client = Minio( "172.17.0.1:8060", access_key="AKIAIOSFODNN7EXAMPLE", secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", secure=False, ) bucket_name = f"pod-publishing-{str(uuid.uuid4())}" minio_client.make_bucket(bucket_name, location="eu-central-1") logging.info(f"Created bucket {bucket_name}") minio_client.set_bucket_policy(bucket_name, s3_readonly_policy(bucket_name)) logging.info(f"Set bucket {bucket_name} policy to READ_ONLY") nevermined.provenance.used( provenance_id=Web3.toBytes(provenance_id.bytes), did=convert_to_bytes(workflow.did), agent_id=convert_to_bytes(workflow_owner), activity_id=convert_to_bytes(nevermined._web3.keccak(text='compute')), signature=nevermined.keeper.sign_hash(add_ethereum_prefix_and_hash_msg( str(provenance_id)), account=account), account=account, attributes='compute') # upload files for f in files: minio_client.fput_object(bucket_name, f["name"], f["path"]) logging.info(f"Uploaded file {f['path']}") del f["path"] f["url"] = minio_client.presigned_get_object(bucket_name, f["name"]) logging.info(f"File url {f['url']}") # Create ddo publishing_date = datetime.utcnow().isoformat(timespec="seconds") + "Z" metadata = { "main": { "dateCreated": publishing_date, "datePublished": publishing_date, "author": "pod-publishing", "license": "No License Specified", "price": "1", "metadata": { "workflow": workflow.metadata, "executionId": os.getenv("EXECUTION_ID"), }, "files": files, "type": "dataset", } } # publish the ddo ddo = None retry = 0 while ddo is None: try: ddo = nevermined.assets.create( metadata, account, providers=[account.address], ) nevermined.provenance.was_derived_from( provenance_id=Web3.toBytes(provenance_id.bytes), new_entity_did=convert_to_bytes(ddo.did), used_entity_did=convert_to_bytes(workflow.did), agent_id=convert_to_bytes(workflow_owner), activity_id=convert_to_bytes( nevermined._web3.keccak(text='published')), account=account, attributes='published') except ValueError: if retry == 3: raise logging.warning("retrying creation of asset") retry += 1 time.sleep(30) logging.info(f"Publishing {ddo.did}") logging.debug(f"Publishing ddo: {ddo}") # transfer ownership to the owner of the workflow retry = 0 while True: try: nevermined.assets.transfer_ownership(ddo.did, workflow_owner, account) nevermined.provenance.was_associated_with( provenance_id=Web3.toBytes(provenance_id.bytes), did=workflow.did, agent_id=workflow_owner, activity_id=convert_to_bytes( nevermined._web3.keccak(text='transferOwnership')), account=account, attributes='transferOwnership') except ValueError: if retry == 3: raise logging.warning("retrying transfer of ownership") retry += 1 time.sleep(30) else: break logging.info( f"Transfered ownership of {workflow.did} from {account.address} to {workflow_owner}" )
def personal_ec_recover(message, signed_message): prefixed_hash = add_ethereum_prefix_and_hash_msg(message) return Keeper.ec_recover(prefixed_hash, signed_message)