def search_assets(): ConfigProvider.set_config(ExampleConfig.get_config()) nevermined = Nevermined() account = get_account(0) ddo = nevermined.assets.create( example_metadata.metadata, account, ) sleep(ASYNC_DELAY) logging.info(f'Registered asset: did={ddo.did}, ddo={ddo.as_text()}') resolved_ddo = nevermined.assets.resolve(ddo.did) logging.info( f'resolved asset ddo: did={resolved_ddo.did}, ddo={resolved_ddo.as_text()}' ) ddo_list = nevermined.assets.search('bonding curve') logging.info( f'found {len(ddo_list)} assets that contain `bonding curve` in their metadata.' ) ddo_list = nevermined.assets.query( {"query": { "text": ['Nevermined protocol white paper'] }}) logging.info( f'found {len(ddo_list)} assets with name that contains `Nevermined protocol white paper`' )
def get_status(execution_id): """ Get the status for an execution id. swagger_from_file: docs/status.yml """ try: api_workflow = v1alpha1.get_namespaced_workflow( namespace, execution_id) except ApiException as e: logging.error( f"Exception when calling v1alpha1.get_namespaced_workflow: {e}") return f'Error getting workflow {execution_id}', 400 result = {} pods = [] for (node_id, status) in api_workflow.status.nodes.items(): pod_name = status.display_name if pod_name == execution_id: result = { "status": status.phase, "startedAt": status.started_at.isoformat(timespec="seconds") + "Z", "finishedAt": status.finished_at.isoformat(timespec="seconds") + "Z" \ if status.finished_at else None, "did": None, "pods": [] } else: status_message = { "podName": pod_name, "status": status.phase, "startedAt": status.started_at.isoformat(timespec="seconds") + "Z", "finishedAt": status.finished_at.isoformat(timespec="seconds") + "Z" \ if status.finished_at else None, } pods.append(status_message) result["pods"] = pods if result["status"] == "Succeeded": options = { "resources": { "metadata.url": "http://172.17.0.1:5000", }, "keeper-contracts": { "keeper.url": "http://172.17.0.1:8545" } } config = Config(options_dict=options) nevermined = Nevermined(config) ddo = nevermined.assets.search(f'"{execution_id}"')[0] result["did"] = ddo.did return jsonify(result), 200
def resolve_asset(): ConfigProvider.set_config(ExampleConfig.get_config()) nevermined = Nevermined() account = get_account(0) ddo = nevermined.assets.create( example_metadata.metadata, account, ) sleep(ASYNC_DELAY) logging.info(f'Registered asset: did={ddo.did}, ddo={ddo.as_text()}') resolved_ddo = nevermined.assets.resolve(ddo.did) logging.info(f'resolved asset ddo: did={resolved_ddo.did}, ddo={resolved_ddo.as_text()}')
def sign_service_agreement(): ConfigProvider.set_config(ExampleConfig.get_config()) # make ocean instance and register an asset nevermined = Nevermined() acc = get_account(0) ddo = nevermined.assets.create(example_metadata.metadata, acc) consumer_account = get_account(1) agreement_id, signature = nevermined.agreements.prepare( ddo.did, consumer_account) sleep(ASYNC_DELAY) logging.info(f'service agreement signed: ' f'\nservice agreement id: {agreement_id}, ' f'\nsignature: {signature}')
def main(): """Main routine that calls the demo and waits for the results of the compute jobs. """ jobs = demo() acc = Account(Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD, PARITY_KEYFILE) nevermined = Nevermined(Config("config.ini")) print("Waiting for compute jobs...\n") try: dids = wait_for_compute_jobs(nevermined, acc, jobs) except ValueError: print("Some jobs have failed!") print("All jobs finished successfully!\n") print("Downloading data assets...") download(nevermined, acc, dids)
def register_asset(): # make nevermined instance ConfigProvider.set_config(ExampleConfig.get_config()) nevermined = Nevermined() account = get_account(0) # account = ([acc for acc in nevermined.accounts.list() if acc.password] or # nevermined.accounts.list())[0] ddo = nevermined.assets.create( example_metadata.metadata, account, providers=['0xfEF2d5e1670342b9EF22eeeDcb287EC526B48095']) sleep(ASYNC_DELAY) logging.info( f'Registered asset: did={ddo.did}, ddo-services={ddo.services}') resolved_ddo = nevermined.assets.resolve(ddo.did) logging.info( f'resolved asset ddo: did={resolved_ddo.did}, ddo={resolved_ddo.as_text()}' )
def test_e2e_demo(): jobs = demo() assert len(jobs) == 3 nevermined = Nevermined(Config("config.ini")) acc = Account( Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD, PARITY_KEYFILE ) dids = wait_for_compute_jobs(nevermined, acc, jobs) assert len(dids) == 3 with tempfile.TemporaryDirectory() as tmpdirname: download(nevermined, acc, dids, downloads_path=tmpdirname) path = Path(tmpdirname) filenames = [f.name for f in path.rglob("*") if f.is_file()] # check that we get the output of the participants assert filenames.count("perf.txt") == 2 # check that we get the models from the coordinator for i in range(10): assert filenames.count(f"model_{i}.npy") == 1
def run(args): logging.debug(f"script callef with args: {args}") # setup config options = { "keeper-contracts": { "keeper.url": args.node, "secret_store.url": args.secretstore_url, }, "resources": { "downloads.path": args.path.as_posix(), "metadata.url": args.metadata_url, "gateway.url": args.gateway_url, }, } config = Config(options_dict=options) logging.debug(f"nevermined config: {config}") # setup paths inputs_path = args.path / "inputs" inputs_path.mkdir() outputs_path = args.path / "outputs" outputs_path.mkdir() transformations_path = args.path / "transformations" transformations_path.mkdir() # setup nevermined nevermined = Nevermined(config) keeper = Keeper.get_instance() # setup consumer # here we need to create a temporary key file from the credentials key_file = NamedTemporaryFile("w", delete=False) json.dump(args.credentials, key_file) key_file.flush() key_file.close() consumer = Account( Web3.toChecksumAddress(args.credentials["address"]), password=args.password, key_file=key_file.name, ) # resolve workflow workflow = nevermined.assets.resolve(args.workflow) logging.info(f"resolved workflow {args.workflow}") logging.debug(f"workflow ddo {workflow.as_dictionary()}") # get stages stages = workflow.get_service("metadata").main["workflow"]["stages"] logging.debug(f"stages {stages}") # get inputs and transformations inputs = [] transformations = [] for stage in stages: inputs += [input_["id"] for input_ in stage["input"]] if "transformation" in stage: transformations.append(stage["transformation"]["id"]) logging.debug(f"inputs: {inputs}") logging.debug(f"transformations: {transformations}") # download assets for did in inputs: ddo = nevermined.assets.resolve(did) service_agreement = ddo.get_service("compute") logging.info(f"downloading asset {ddo.did}") nevermined.assets.download(ddo.did, service_agreement.index, consumer, inputs_path.as_posix()) for did in transformations: ddo = nevermined.assets.resolve(did) service_agreement = ddo.get_service("access") logging.info(f"downloading asset {ddo.did}") nevermined.assets.download(ddo.did, service_agreement.index, consumer, transformations_path.as_posix())
def demo(): """The Nevermined Federated Learning demo. This demo showcases the nevermined Federated Learning capabilities. FLow: 1. Setup nevermined 2. Setup accounts 3. Publish compute to the data assets 4. Publish algorithm 5. Publish workflows 6. Order computations 7. Execute workflows """ print("Setting up...\n") date_created = dates_generator() # 1. Setup nevermined nevermined = Nevermined(Config("config.ini")) keeper = Keeper.get_instance() provider = "0x068Ed00cF0441e4829D9784fCBe7b9e26D4BD8d0" # 2. Setup accounts acc = Account(Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD, PARITY_KEYFILE) nevermined.accounts.request_tokens(acc, 100) provider_data0 = acc provider_data1 = acc provider_coordinator = acc consumer = acc # 3. Publish compute to the data with open("resources/metadata/metadata0.json") as f: metadata_data0 = json.load(f) metadata_data0["main"]["dateCreated"] = next(date_created) with open("resources/metadata/metadata1.json") as f: metadata_data1 = json.load(f) metadata_data1["main"]["dateCreated"] = next(date_created) ddo_compute0 = nevermined.assets.create_compute( metadata_data0, provider_data0, providers=[provider], ) assert ddo_compute0 is not None, "Creating asset compute0 on-chain failed" print( f"[DATA_PROVIDER0 --> NEVERMINED] Publishing compute to the data asset for asset0: {ddo_compute0.did}" ) ddo_compute1 = nevermined.assets.create_compute( metadata_data1, provider_data1, providers=[provider], ) assert ddo_compute1 is not None, "Creating asset compute1 on-chain failed" print( f"[DATA_PROVIDER1 --> NEVERMINED] Publishing compute to the data asset for asset1: {ddo_compute1.did}" ) with open("resources/metadata/metadata_compute_coordinator.json") as f: metadata_compute_coordinator = json.load(f) metadata_compute_coordinator["main"]["dateCreated"] = next( date_created) ddo_compute_coordinator = nevermined.assets.create_compute( metadata_compute_coordinator, provider_coordinator, providers=[provider], ) assert (ddo_compute_coordinator is not None), "Creating asset compute_coordinator on-chain failed" print( f"[COORDINATOR_PROVIDER --> NEVERMINED] Publishing coordinator compute asset: {ddo_compute_coordinator.did}" ) # 4. Publish algorithm with open("resources/metadata/metadata_transformation.json") as f: metadata_transformation = json.load(f) metadata_transformation["main"]["dateCreated"] = next(date_created) ddo_transformation = nevermined.assets.create( metadata_transformation, consumer, providers=[provider], ) assert (ddo_transformation is not None), "Creating asset transformation on-chain failed" print( f"[DATA_SCIENTIST --> NEVERMINED] Publishing algorithm asset: {ddo_transformation.did}" ) # 5. Publish the workflows with open("resources/metadata/metadata_workflow.json") as f: metadata_workflow = json.load(f) with open("resources/metadata/metadata_workflow_coordinator.json") as f: metadata_workflow_coordinator = json.load(f) metadata_workflow0 = copy.deepcopy(metadata_workflow) metadata_workflow0["main"]["workflow"]["stages"][0]["input"][0][ "id"] = ddo_compute0.did metadata_workflow0["main"]["workflow"]["stages"][0]["transformation"][ "id"] = ddo_transformation.did metadata_workflow1 = copy.deepcopy(metadata_workflow) metadata_workflow1["main"]["workflow"]["stages"][0]["input"][0][ "id"] = ddo_compute1.did metadata_workflow1["main"]["workflow"]["stages"][0]["transformation"][ "id"] = ddo_transformation.did metadata_workflow_coordinator["main"]["dateCreated"] = next(date_created) ddo_workflow0 = nevermined.assets.create( metadata_workflow0, consumer, providers=[provider], ) assert ddo_workflow0 is not None, "Creating asset workflow0 on-chain failed" print( f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for asset0: {ddo_workflow0.did}" ) ddo_workflow1 = nevermined.assets.create( metadata_workflow1, consumer, providers=[provider], ) assert ddo_workflow1 is not None, "Creating asset workflow1 on-chain failed" print( f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for asset1: {ddo_workflow1.did}" ) ddo_workflow_coordinator = nevermined.assets.create( metadata_workflow_coordinator, consumer, providers=[provider], ) assert (ddo_workflow_coordinator is not None), "Creating asset workflow_coordinator on-chain failed" print( f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for coordinator: {ddo_workflow_coordinator.did}" ) # 6. Order computations service0 = ddo_compute0.get_service( service_type=ServiceTypes.CLOUD_COMPUTE) service_agreement0 = ServiceAgreement.from_service_dict( service0.as_dictionary()) agreement_id0 = nevermined.assets.order(ddo_compute0.did, service_agreement0.index, consumer, consumer) print( f"[DATA_SCIENTIST --> DATA_PROVIDER0] Requesting an agreement for compute to the data for asset0: {agreement_id0}" ) event = keeper.lock_reward_condition.subscribe_condition_fulfilled( agreement_id0, 60, None, (), wait=True) assert event is not None, "Reward condition is not found" event = keeper.compute_execution_condition.subscribe_condition_fulfilled( agreement_id0, 60, None, (), wait=True) assert event is not None, "Execution condition not found" service1 = ddo_compute1.get_service( service_type=ServiceTypes.CLOUD_COMPUTE) service_agreement1 = ServiceAgreement.from_service_dict( service1.as_dictionary()) agreement_id1 = nevermined.assets.order(ddo_compute1.did, service_agreement1.index, consumer, consumer) print( f"[DATA_SCIENTIST --> DATA_PROVIDER1] Requesting an agreement for compute to the data for asset1: {agreement_id1}" ) event = keeper.lock_reward_condition.subscribe_condition_fulfilled( agreement_id1, 60, None, (), wait=True) assert event is not None, "Reward condition is not found" event = keeper.compute_execution_condition.subscribe_condition_fulfilled( agreement_id1, 60, None, (), wait=True) assert event is not None, "Execution condition not found" service_coordinator = ddo_compute_coordinator.get_service( service_type=ServiceTypes.CLOUD_COMPUTE) service_agreement_coordinator = ServiceAgreement.from_service_dict( service_coordinator.as_dictionary()) agreement_id_coordinator = nevermined.assets.order( ddo_compute_coordinator.did, service_agreement_coordinator.index, consumer, consumer) print( f"[DATA_SCIENTIST --> COORDINATOR_PROVIDER] Requesting an agreement for coordinator compute: {agreement_id_coordinator}" ) event = keeper.lock_reward_condition.subscribe_condition_fulfilled( agreement_id_coordinator, 60, None, (), wait=True) assert event is not None, "Reward condition is not found" event = keeper.compute_execution_condition.subscribe_condition_fulfilled( agreement_id_coordinator, 60, None, (), wait=True) assert event is not None, "Execution condition not found" # 7. Execute workflows compute_coordinator_id = nevermined.assets.execute( agreement_id_coordinator, ddo_compute_coordinator.did, service_agreement_coordinator.index, consumer, ddo_workflow_coordinator.did, ) print( f"[DATA_SCIENTIST --> COORDINATOR_PROVIDER] Requesting execution for coordinator compute: {compute_coordinator_id}" ) compute_asset0_id = nevermined.assets.execute( agreement_id0, ddo_compute0.did, service_agreement0.index, consumer, ddo_workflow0.did, ) print( f"[DATA_SCIENTIST --> DATA_PROVIDER0] Requesting execution for compute to data for asset0: {compute_asset0_id}" ) compute_asset1_id = nevermined.assets.execute( agreement_id1, ddo_compute1.did, service_agreement1.index, consumer, ddo_workflow1.did, ) print( f"[DATA_SCIENTIST --> DATA_PROVIDER1] Requesting execution for compute to data for asset1: {compute_asset1_id}" ) jobs = [ (agreement_id_coordinator, compute_coordinator_id), (agreement_id0, compute_asset0_id), (agreement_id1, compute_asset1_id), ] return jobs
def demo(): nevermined = Nevermined(Config(CONFIG_FILE)) provider_account = Account(PROVIDER_ADDRESS, PROVIDER_PASSWORD, PROVIDER_KEYFILE) # publish asset metadata_compute = { "main": { "name": "CIFAR-10 Part 1", "dateCreated": date_now(), "author": "Nevermined Provider", "license": "", "price": "1", "files": [{ "index": 0, "contentType": "image/png", "checksum": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", "checksumType": "MD5", "contentLength": "12057507", "url": "https://ck2a37sxobgcdarvr7jewxvrlvde6kehhoy6lmfuks4uabuavtiq.arweave.net/ErQN_ldwTCGCNY_SS16xXUZPKIc7seWwtFS5QAaArNE", }, { "index": 1, "contentType": "application/json", "checksum": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", "checksumType": "MD5", "contentLength": "12057507", "url": "https://raw.githubusercontent.com/keyko-io/eth-nft-hack/rod/artsgenerator-demo/config.json" }], "type": "dataset", } } ddo_compute = nevermined.assets.create_compute(metadata_compute, provider_account) print(f"Published asset with DID: {ddo_compute.did}") # publish algorithm metadata_algorithm = { "main": { "name": "Generative artist", "dateCreated": date_now(), "author": "Gene Kogan", "license": "", "price": "0", # This file will not be used but there is a bug on the sdk that # expects a least one file to exist in an algorithm "files": [ { "index": 0, "contentType": "text/text", "checksum": "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377", "checksumType": "MD5", "contentLength": "12057507", "url": "https://github.com/nevermined-io/tools/raw/master/README.md", }, ], "type": "algorithm", "algorithm": { "language": "python", "format": "py", "version": "0.1.0", "entrypoint": "pwd && ls -lR && cat /data/inputs/**/config.json && python /nevermined-demo/run.py", "requirements": { "container": { "image": "neverminedio/artgenerator", "tag": "latest", "checksum": "sha256:53ad3a03b2fb240b6c494339821e6638cd44c989bcf26ec4d51a6a52f7518c1d", } }, }, } } ddo_algorithm = nevermined.assets.create(metadata_algorithm, provider_account) print(f"Published algorithm with DID: {ddo_algorithm.did}") metadata_workflow = { "main": { "name": "Mint my NFT", "dateCreated": date_now(), "author": "Nevermined Consumer", "license": "", "price": "0", "type": "workflow", "workflow": { "stages": [{ "index": 0, "input": [{ "index": 0, "id": ddo_compute.did }], "transformation": { "id": ddo_algorithm.did }, }] }, } } ddo_workflow = nevermined.assets.create(metadata_workflow, provider_account) print(f"Published workflow with DID: {ddo_workflow.did}") # order the asset keeper = Keeper.get_instance() service_agreement_id = nevermined.assets.order( ddo_compute.did, ServiceTypesIndices.DEFAULT_COMPUTING_INDEX, provider_account, provider_account, ) print() print("Ordering Data In-Situ Compute") print(f"Service Agreement ID: {service_agreement_id}") wait_for_event(keeper, service_agreement_id) # execute workflow execution_id = nevermined.assets.execute( service_agreement_id, ddo_compute.did, ServiceTypesIndices.DEFAULT_COMPUTING_INDEX, provider_account, ddo_workflow.did, ) print("Firing up the GPUs to mine some Art...") print(f"Execution ID: {execution_id}") print("This will take a about 1h...") print() print('Monitoring compute status:') # wait for compute job outputs_did = None while True: status = nevermined.assets.compute_status(service_agreement_id, execution_id, provider_account) if status["status"] == "Failed": raise ValueError("The job failed") elif status["status"] == "Succeeded": outputs_did = status["did"] break print(f"{execution_id}: {status['status']}") time.sleep(60) print(f"Outputs DID: {outputs_did}") # download the output assets print() print('Downloading outputs...') nevermined.assets.download(outputs_did, ServiceTypesIndices.DEFAULT_ACCESS_INDEX, provider_account, "./") print("Finished!")
def create_arguments(ddo): """Create the arguments that need to be add to the argo template. Args: ddo (:py:class:`common_utils_py.ddo.ddo.DDO`): The workflow DDO. Returns: list: The list of arguments to be appended to the argo workflow """ args = '' image = '' tag = '' if ddo.metadata["main"]["type"] != "fl-coordinator": workflow = ddo.metadata["main"]["workflow"] options = { "resources": { "metadata.url": "http://172.17.0.1:5000", }, "keeper-contracts": { "keeper.url": "http://172.17.0.1:8545" } } config = Config(options_dict=options) nevermined = Nevermined(config) # TODO: Currently this only supports one stage transformation_did = workflow["stages"][0]["transformation"]["id"] transformation_ddo = nevermined.assets.resolve(transformation_did) transformation_metadata = transformation_ddo.get_service("metadata") # get args and container args = transformation_metadata.main["algorithm"]["entrypoint"] image = transformation_metadata.main["algorithm"]["requirements"][ "container"]["image"] tag = transformation_metadata.main["algorithm"]["requirements"][ "container"]["tag"] arguments = [ { "name": "credentials", # remove white spaces "value": json.dumps(KEYFILE, separators=(",", ":")) }, { "name": "password", "value": os.getenv("PROVIDER_PASSWORD") }, { "name": "metadata_url", "value": "http://172.17.0.1:5000" }, { "name": "gateway_url", "value": "http://172.17.0.1:8030" }, { "name": "node", "value": "http://172.17.0.1:8545" }, { "name": "secret_store_url", "value": "http://172.17.0.1:12001" }, { "name": "workflow", "value": f"did:nv:{ddo.asset_id[2:]}" }, { "name": "verbose", "value": "false" }, { "name": "transformation_container_image", "value": f"{image}:{tag}" }, { "name": "transformation_arguments", "value": args } ] return arguments
def compute_example(verbose=False): print("Setting up...") if verbose: configure_logging() date_created = dates_generator() # Setup nevermined ConfigProvider.set_config(ExampleConfig.get_config()) config = ConfigProvider.get_config() nevermined = Nevermined() keeper = Keeper.get_instance() provider = "0x068Ed00cF0441e4829D9784fCBe7b9e26D4BD8d0" asset_rewards = { "_amounts": ["10", "2"], "_receivers": ["0x00Bd138aBD70e2F00903268F3Db08f2D25677C9e", "0x068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0"] } # Setup accounts acc = Account( Web3.toChecksumAddress(PROVIDER_ADDRESS), PROVIDER_PASSWORD, PROVIDER_KEYFILE ) nevermined.accounts.request_tokens(acc, 10) provider_acc = acc consumer_acc = acc # Publish compute example_metadata.compute_ddo["main"]["dateCreated"] = next(date_created) ddo_compute = nevermined.assets.create_compute( example_metadata.metadata, provider_acc, asset_rewards, providers=[provider] ) assert ddo_compute is not None, "Creating compute asset on-chain failed." print( f"[PROVIDER --> NEVERMINED] Publishing compute to the data asset: {ddo_compute.did}" ) # Publish algorithm example_metadata.algo_metadata["main"]["dateCreated"] = next(date_created) ddo_transformation = nevermined.assets.create( example_metadata.algo_metadata, consumer_acc, providers=[provider] ) assert ( ddo_transformation is not None ), "Creating asset transformation on-chain failed." print( f"[CONSUMER --> NEVERMINED] Publishing algorithm asset: {ddo_transformation.did}" ) # Publish workflows workflow_metadata = example_metadata.workflow_ddo workflow_metadata["main"]["workflow"]["stages"][0]["input"][0]["id"] = ddo_compute.did workflow_metadata["main"]["workflow"]["stages"][0]["transformation"][ "id" ] = ddo_transformation.did ddo_workflow = nevermined.assets.create( workflow_metadata, consumer_acc, providers=[provider] ) assert ddo_workflow is not None, "Creating asset workflow on-chain failed." print(f"[CONSUMER --> NEVERMINED] Publishing compute workflow: {ddo_workflow.did}") # Order computation service = ddo_compute.get_service(service_type=ServiceTypes.CLOUD_COMPUTE) service_agreement = ServiceAgreement.from_service_dict(service.as_dictionary()) agreement_id = nevermined.assets.order( ddo_compute.did, service_agreement.index, consumer_acc ) print( f"[CONSUMER --> PROVIDER] Requesting an agreement for compute to the data: {agreement_id}" ) event = keeper.lock_payment_condition.subscribe_condition_fulfilled( agreement_id, 60, None, (), wait=True ) assert event is not None, "Reward condition is not found" event = keeper.compute_execution_condition.subscribe_condition_fulfilled( agreement_id, 60, None, (), wait=True ) assert event is not None, "Execution condition not found" # Execute workflow nevermined.assets.execute( agreement_id, ddo_compute.did, service_agreement.index, consumer_acc, ddo_workflow.did, ) print("[CONSUMER --> PROVIDER] Requesting execution of the compute workflow") event = keeper.escrow_payment_condition.subscribe_condition_fulfilled( agreement_id, 60, None, (), wait=True ) assert event is not None, "Escrow Reward condition not found" print("Workflow successfully executed")
def buy_asset(): """ Requires all Nevermined services running. """ ConfigProvider.set_config(ExampleConfig.get_config()) config = ConfigProvider.get_config() providers = { 'duero': '0xfEF2d5e1670342b9EF22eeeDcb287EC526B48095', 'nile': '0x4aaab179035dc57b35e2ce066919048686f82972' } # make nevermined instance nevermined = Nevermined() Diagnostics.verify_contracts() acc = get_account(1) if not acc: acc = ([acc for acc in nevermined.accounts.list() if acc.password] or nevermined.accounts.list())[0] keeper = Keeper.get_instance() # Register ddo did = '' # 'did:nv:7648596b60f74301ae1ef9baa5d637255d517ff362434754a3779e1de4c8219b' if did: ddo = nevermined.assets.resolve(did) logging.info(f'using ddo: {did}') else: ddo = nevermined.assets.create(example_metadata.metadata, acc, providers=[], authorization_type='SecretStore', use_secret_store=True) assert ddo is not None, f'Registering asset on-chain failed.' did = ddo.did logging.info(f'registered ddo: {did}') # nevermined here will be used only to publish the asset. Handling the asset by the publisher # will be performed by the Gateway server running locally test_net = os.environ.get('TEST_NET', '') if test_net.startswith('nile'): provider = keeper.did_registry.to_checksum_address(providers['nile']) elif test_net.startswith('duero'): provider = keeper.did_registry.to_checksum_address(providers['duero']) else: provider = '0x068Ed00cF0441e4829D9784fCBe7b9e26D4BD8d0' # Wait for did registry event event = keeper.did_registry.subscribe_to_event( keeper.did_registry.DID_REGISTRY_EVENT_NAME, 30, event_filter={ '_did': Web3Provider.get_web3().toBytes(hexstr=ddo.asset_id), '_owner': acc.address}, wait=True ) if not event: logging.warning(f'Failed to get the did registry event for asset with did {did}.') assert keeper.did_registry.get_block_number_updated(ddo.asset_id) > 0, \ f'There is an issue in registering asset {did} on-chain.' keeper.did_registry.add_provider(ddo.asset_id, provider, acc) logging.info(f'is {provider} set as did provider: ' f'{keeper.did_registry.is_did_provider(ddo.asset_id, provider)}') nevermined_cons = Nevermined() consumer_account = get_account(0) # sign agreement using the registered asset did above service = ddo.get_service(service_type=ServiceTypes.ASSET_ACCESS) # This will send the order request to Gateway which in turn will execute the agreement on-chain nevermined_cons.accounts.request_tokens(consumer_account, 10) sa = ServiceAgreement.from_service_dict(service.as_dictionary()) agreement_id = '' if not agreement_id: # Use these 2 lines to request new agreement from Gateway # agreement_id, signature = nevermined_cons.agreements.prepare(did, sa.service_definition_id, # consumer_account) # nevermined_cons.agreements.send(did, agreement_id, sa.service_definition_id, signature, # consumer_account) # assets.order now creates agreement directly using consumer account. agreement_id = nevermined_cons.assets.order( did, sa.index, consumer_account) logging.info('placed order: %s, %s', did, agreement_id) event = keeper.access_template.subscribe_agreement_created( agreement_id, 60, None, (), wait=True ) assert event, "Agreement event is not found, check the keeper node's logs" logging.info(f'Got agreement event, next: lock reward condition') event = keeper.lock_payment_condition.subscribe_condition_fulfilled( agreement_id, 60, None, (), wait=True ) assert event, "Lock reward condition fulfilled event is not found, check the keeper node's logs" logging.info('Got lock reward event, next: wait for the access condition..') event = keeper.access_condition.subscribe_condition_fulfilled( agreement_id, 15, None, (), wait=True ) logging.info(f'Got access event {event}') i = 0 while nevermined.agreements.is_access_granted( agreement_id, did, consumer_account.address) is not True and i < 15: time.sleep(1) i += 1 assert nevermined.agreements.is_access_granted(agreement_id, did, consumer_account.address) nevermined.assets.access( agreement_id, did, sa.index, consumer_account, config.downloads_path, index=0) logging.info('Success buying asset.') event = keeper.escrow_payment_condition.subscribe_condition_fulfilled( agreement_id, 30, None, (), wait=True ) assert event, 'no event for EscrowReward.Fulfilled' logging.info(f'got EscrowReward.FULFILLED event: {event}') logging.info('Done buy asset.')
def run(args): logging.debug(f"script called with args: {args}") # setup config options = { "keeper-contracts": { "keeper.url": args.node, "secret_store.url": args.secretstore_url, }, "resources": { "downloads.path": args.path.as_posix(), "metadata.url": args.metadata_url, "gateway.url": args.gateway_url, }, } config = Config(options_dict=options) logging.debug(f"nevermined config: {config}") # setup paths outputs_path = args.path / "outputs" # setup nevermined nevermined = Nevermined(config) # setup consumer # here we need to create a temporary key file from the credentials key_file = NamedTemporaryFile("w", delete=False) json.dump(args.credentials, key_file) key_file.flush() key_file.close() account = Account( Web3.toChecksumAddress(args.credentials["address"]), password=args.password, key_file=key_file.name, ) # resolve workflow workflow = nevermined.assets.resolve(args.workflow) logging.info(f"resolved workflow {args.workflow}") logging.debug(f"workflow ddo {workflow.as_dictionary()}") workflow_owner = nevermined.assets.owner(workflow.did) provenance_id = uuid.uuid4() # get files to upload files = [] index = 0 for f in outputs_path.rglob("*"): if f.is_file(): files.append({ "index": index, "name": f.name, "path": f.as_posix(), "contentType": mimetypes.guess_type(f)[0], "contentLength": f.stat().st_size, }) index += 1 # create bucket minio_client = Minio( "172.17.0.1:8060", access_key="AKIAIOSFODNN7EXAMPLE", secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", secure=False, ) bucket_name = f"pod-publishing-{str(uuid.uuid4())}" minio_client.make_bucket(bucket_name, location="eu-central-1") logging.info(f"Created bucket {bucket_name}") minio_client.set_bucket_policy(bucket_name, s3_readonly_policy(bucket_name)) logging.info(f"Set bucket {bucket_name} policy to READ_ONLY") nevermined.provenance.used( provenance_id=Web3.toBytes(provenance_id.bytes), did=convert_to_bytes(workflow.did), agent_id=convert_to_bytes(workflow_owner), activity_id=convert_to_bytes(nevermined._web3.keccak(text='compute')), signature=nevermined.keeper.sign_hash(add_ethereum_prefix_and_hash_msg( str(provenance_id)), account=account), account=account, attributes='compute') # upload files for f in files: minio_client.fput_object(bucket_name, f["name"], f["path"]) logging.info(f"Uploaded file {f['path']}") del f["path"] f["url"] = minio_client.presigned_get_object(bucket_name, f["name"]) logging.info(f"File url {f['url']}") # Create ddo publishing_date = datetime.utcnow().isoformat(timespec="seconds") + "Z" metadata = { "main": { "dateCreated": publishing_date, "datePublished": publishing_date, "author": "pod-publishing", "license": "No License Specified", "price": "1", "metadata": { "workflow": workflow.metadata, "executionId": os.getenv("EXECUTION_ID"), }, "files": files, "type": "dataset", } } # publish the ddo ddo = None retry = 0 while ddo is None: try: ddo = nevermined.assets.create( metadata, account, providers=[account.address], ) nevermined.provenance.was_derived_from( provenance_id=Web3.toBytes(provenance_id.bytes), new_entity_did=convert_to_bytes(ddo.did), used_entity_did=convert_to_bytes(workflow.did), agent_id=convert_to_bytes(workflow_owner), activity_id=convert_to_bytes( nevermined._web3.keccak(text='published')), account=account, attributes='published') except ValueError: if retry == 3: raise logging.warning("retrying creation of asset") retry += 1 time.sleep(30) logging.info(f"Publishing {ddo.did}") logging.debug(f"Publishing ddo: {ddo}") # transfer ownership to the owner of the workflow retry = 0 while True: try: nevermined.assets.transfer_ownership(ddo.did, workflow_owner, account) nevermined.provenance.was_associated_with( provenance_id=Web3.toBytes(provenance_id.bytes), did=workflow.did, agent_id=workflow_owner, activity_id=convert_to_bytes( nevermined._web3.keccak(text='transferOwnership')), account=account, attributes='transferOwnership') except ValueError: if retry == 3: raise logging.warning("retrying transfer of ownership") retry += 1 time.sleep(30) else: break logging.info( f"Transfered ownership of {workflow.did} from {account.address} to {workflow_owner}" )