Exemplo n.º 1
0
def get_status(execution_id):
    """
    Get the status for an execution id.
    swagger_from_file: docs/status.yml
    """
    try:
        api_workflow = v1alpha1.get_namespaced_workflow(
            namespace, execution_id)
    except ApiException as e:
        logging.error(
            f"Exception when calling v1alpha1.get_namespaced_workflow: {e}")
        return f'Error getting workflow {execution_id}', 400

    result = {}
    pods = []
    for (node_id, status) in api_workflow.status.nodes.items():
        pod_name = status.display_name
        if pod_name == execution_id:
            result = {
                "status": status.phase,
                "startedAt": status.started_at.isoformat(timespec="seconds") + "Z",
                "finishedAt": status.finished_at.isoformat(timespec="seconds") + "Z" \
                    if status.finished_at else None,
                "did": None,
                "pods": []
            }
        else:
            status_message = {
                "podName": pod_name,
                "status": status.phase,
                "startedAt": status.started_at.isoformat(timespec="seconds") + "Z",
                "finishedAt": status.finished_at.isoformat(timespec="seconds") + "Z" \
                    if status.finished_at else None,
            }
            pods.append(status_message)

    result["pods"] = pods

    if result["status"] == "Succeeded":
        options = {
            "resources": {
                "metadata.url": "http://172.17.0.1:5000",
            },
            "keeper-contracts": {
                "keeper.url": "http://172.17.0.1:8545"
            }
        }
        config = Config(options_dict=options)
        nevermined = Nevermined(config)
        ddo = nevermined.assets.search(f'"{execution_id}"')[0]
        result["did"] = ddo.did

    return jsonify(result), 200
Exemplo n.º 2
0
def main():
    """Main routine that calls the demo and waits for the results of the compute
    jobs.

    """
    jobs = demo()

    acc = Account(Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD,
                  PARITY_KEYFILE)
    nevermined = Nevermined(Config("config.ini"))

    print("Waiting for compute jobs...\n")
    try:
        dids = wait_for_compute_jobs(nevermined, acc, jobs)
    except ValueError:
        print("Some jobs have failed!")

    print("All jobs finished successfully!\n")
    print("Downloading data assets...")
    download(nevermined, acc, dids)
Exemplo n.º 3
0
def test_e2e_demo():
    jobs = demo()
    assert len(jobs) == 3

    nevermined = Nevermined(Config("config.ini"))
    acc = Account(
        Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD, PARITY_KEYFILE
    )
    dids = wait_for_compute_jobs(nevermined, acc, jobs)
    assert len(dids) == 3

    with tempfile.TemporaryDirectory() as tmpdirname:
        download(nevermined, acc, dids, downloads_path=tmpdirname)
        path = Path(tmpdirname)
        filenames = [f.name for f in path.rglob("*") if f.is_file()]

    # check that we get the output of the participants
    assert filenames.count("perf.txt") == 2

    # check that we get the models from the coordinator
    for i in range(10):
        assert filenames.count(f"model_{i}.npy") == 1
Exemplo n.º 4
0
 def get_config():
     logging.debug("Configuration loaded for environment '{}'".format(
         ExampleConfig.get_config_net()))
     return Config(options_dict=ExampleConfig.get_config_dict())
Exemplo n.º 5
0
def run(args):
    logging.debug(f"script callef with args: {args}")

    # setup config
    options = {
        "keeper-contracts": {
            "keeper.url": args.node,
            "secret_store.url": args.secretstore_url,
        },
        "resources": {
            "downloads.path": args.path.as_posix(),
            "metadata.url": args.metadata_url,
            "gateway.url": args.gateway_url,
        },
    }
    config = Config(options_dict=options)
    logging.debug(f"nevermined config: {config}")

    # setup paths
    inputs_path = args.path / "inputs"
    inputs_path.mkdir()
    outputs_path = args.path / "outputs"
    outputs_path.mkdir()
    transformations_path = args.path / "transformations"
    transformations_path.mkdir()

    # setup nevermined
    nevermined = Nevermined(config)
    keeper = Keeper.get_instance()

    # setup consumer
    # here we need to create a temporary key file from the credentials
    key_file = NamedTemporaryFile("w", delete=False)
    json.dump(args.credentials, key_file)
    key_file.flush()
    key_file.close()
    consumer = Account(
        Web3.toChecksumAddress(args.credentials["address"]),
        password=args.password,
        key_file=key_file.name,
    )

    # resolve workflow
    workflow = nevermined.assets.resolve(args.workflow)
    logging.info(f"resolved workflow {args.workflow}")
    logging.debug(f"workflow ddo {workflow.as_dictionary()}")

    # get stages
    stages = workflow.get_service("metadata").main["workflow"]["stages"]
    logging.debug(f"stages {stages}")

    # get inputs and transformations
    inputs = []
    transformations = []
    for stage in stages:
        inputs += [input_["id"] for input_ in stage["input"]]
        if "transformation" in stage:
            transformations.append(stage["transformation"]["id"])
    logging.debug(f"inputs: {inputs}")
    logging.debug(f"transformations: {transformations}")

    # download assets
    for did in inputs:
        ddo = nevermined.assets.resolve(did)
        service_agreement = ddo.get_service("compute")

        logging.info(f"downloading asset {ddo.did}")
        nevermined.assets.download(ddo.did, service_agreement.index, consumer,
                                   inputs_path.as_posix())

    for did in transformations:
        ddo = nevermined.assets.resolve(did)
        service_agreement = ddo.get_service("access")

        logging.info(f"downloading asset {ddo.did}")
        nevermined.assets.download(ddo.did, service_agreement.index, consumer,
                                   transformations_path.as_posix())
Exemplo n.º 6
0
def demo():
    """The Nevermined Federated Learning demo.

    This demo showcases the nevermined Federated Learning capabilities.
    FLow:
        1. Setup nevermined
        2. Setup accounts
        3. Publish compute to the data assets
        4. Publish algorithm
        5. Publish workflows
        6. Order computations
        7. Execute workflows

    """

    print("Setting up...\n")

    date_created = dates_generator()

    # 1. Setup nevermined
    nevermined = Nevermined(Config("config.ini"))
    keeper = Keeper.get_instance()
    provider = "0x068Ed00cF0441e4829D9784fCBe7b9e26D4BD8d0"

    # 2. Setup accounts
    acc = Account(Web3.toChecksumAddress(PARITY_ADDRESS), PARITY_PASSWORD,
                  PARITY_KEYFILE)
    nevermined.accounts.request_tokens(acc, 100)
    provider_data0 = acc
    provider_data1 = acc
    provider_coordinator = acc
    consumer = acc

    # 3. Publish compute to the data
    with open("resources/metadata/metadata0.json") as f:
        metadata_data0 = json.load(f)
        metadata_data0["main"]["dateCreated"] = next(date_created)
    with open("resources/metadata/metadata1.json") as f:
        metadata_data1 = json.load(f)
        metadata_data1["main"]["dateCreated"] = next(date_created)

    ddo_compute0 = nevermined.assets.create_compute(
        metadata_data0,
        provider_data0,
        providers=[provider],
    )
    assert ddo_compute0 is not None, "Creating asset compute0 on-chain failed"
    print(
        f"[DATA_PROVIDER0 --> NEVERMINED] Publishing compute to the data asset for asset0: {ddo_compute0.did}"
    )

    ddo_compute1 = nevermined.assets.create_compute(
        metadata_data1,
        provider_data1,
        providers=[provider],
    )
    assert ddo_compute1 is not None, "Creating asset compute1 on-chain failed"
    print(
        f"[DATA_PROVIDER1 --> NEVERMINED] Publishing compute to the data asset for asset1: {ddo_compute1.did}"
    )

    with open("resources/metadata/metadata_compute_coordinator.json") as f:
        metadata_compute_coordinator = json.load(f)
        metadata_compute_coordinator["main"]["dateCreated"] = next(
            date_created)

    ddo_compute_coordinator = nevermined.assets.create_compute(
        metadata_compute_coordinator,
        provider_coordinator,
        providers=[provider],
    )
    assert (ddo_compute_coordinator
            is not None), "Creating asset compute_coordinator on-chain failed"
    print(
        f"[COORDINATOR_PROVIDER --> NEVERMINED] Publishing coordinator compute asset: {ddo_compute_coordinator.did}"
    )

    # 4. Publish algorithm
    with open("resources/metadata/metadata_transformation.json") as f:
        metadata_transformation = json.load(f)
        metadata_transformation["main"]["dateCreated"] = next(date_created)

    ddo_transformation = nevermined.assets.create(
        metadata_transformation,
        consumer,
        providers=[provider],
    )
    assert (ddo_transformation
            is not None), "Creating asset transformation on-chain failed"
    print(
        f"[DATA_SCIENTIST --> NEVERMINED] Publishing algorithm asset: {ddo_transformation.did}"
    )

    # 5. Publish the workflows
    with open("resources/metadata/metadata_workflow.json") as f:
        metadata_workflow = json.load(f)
    with open("resources/metadata/metadata_workflow_coordinator.json") as f:
        metadata_workflow_coordinator = json.load(f)

    metadata_workflow0 = copy.deepcopy(metadata_workflow)
    metadata_workflow0["main"]["workflow"]["stages"][0]["input"][0][
        "id"] = ddo_compute0.did
    metadata_workflow0["main"]["workflow"]["stages"][0]["transformation"][
        "id"] = ddo_transformation.did

    metadata_workflow1 = copy.deepcopy(metadata_workflow)
    metadata_workflow1["main"]["workflow"]["stages"][0]["input"][0][
        "id"] = ddo_compute1.did
    metadata_workflow1["main"]["workflow"]["stages"][0]["transformation"][
        "id"] = ddo_transformation.did

    metadata_workflow_coordinator["main"]["dateCreated"] = next(date_created)

    ddo_workflow0 = nevermined.assets.create(
        metadata_workflow0,
        consumer,
        providers=[provider],
    )
    assert ddo_workflow0 is not None, "Creating asset workflow0 on-chain failed"
    print(
        f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for asset0: {ddo_workflow0.did}"
    )

    ddo_workflow1 = nevermined.assets.create(
        metadata_workflow1,
        consumer,
        providers=[provider],
    )
    assert ddo_workflow1 is not None, "Creating asset workflow1 on-chain failed"
    print(
        f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for asset1: {ddo_workflow1.did}"
    )

    ddo_workflow_coordinator = nevermined.assets.create(
        metadata_workflow_coordinator,
        consumer,
        providers=[provider],
    )
    assert (ddo_workflow_coordinator
            is not None), "Creating asset workflow_coordinator on-chain failed"
    print(
        f"[DATA_SCIENTIST --> NEVERMINED] Publishing compute workflow for coordinator: {ddo_workflow_coordinator.did}"
    )

    # 6. Order computations
    service0 = ddo_compute0.get_service(
        service_type=ServiceTypes.CLOUD_COMPUTE)
    service_agreement0 = ServiceAgreement.from_service_dict(
        service0.as_dictionary())
    agreement_id0 = nevermined.assets.order(ddo_compute0.did,
                                            service_agreement0.index, consumer,
                                            consumer)
    print(
        f"[DATA_SCIENTIST --> DATA_PROVIDER0] Requesting an agreement for compute to the data for asset0: {agreement_id0}"
    )

    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id0, 60, None, (), wait=True)
    assert event is not None, "Reward condition is not found"

    event = keeper.compute_execution_condition.subscribe_condition_fulfilled(
        agreement_id0, 60, None, (), wait=True)
    assert event is not None, "Execution condition not found"

    service1 = ddo_compute1.get_service(
        service_type=ServiceTypes.CLOUD_COMPUTE)
    service_agreement1 = ServiceAgreement.from_service_dict(
        service1.as_dictionary())
    agreement_id1 = nevermined.assets.order(ddo_compute1.did,
                                            service_agreement1.index, consumer,
                                            consumer)
    print(
        f"[DATA_SCIENTIST --> DATA_PROVIDER1] Requesting an agreement for compute to the data for asset1: {agreement_id1}"
    )

    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id1, 60, None, (), wait=True)
    assert event is not None, "Reward condition is not found"

    event = keeper.compute_execution_condition.subscribe_condition_fulfilled(
        agreement_id1, 60, None, (), wait=True)
    assert event is not None, "Execution condition not found"

    service_coordinator = ddo_compute_coordinator.get_service(
        service_type=ServiceTypes.CLOUD_COMPUTE)
    service_agreement_coordinator = ServiceAgreement.from_service_dict(
        service_coordinator.as_dictionary())
    agreement_id_coordinator = nevermined.assets.order(
        ddo_compute_coordinator.did, service_agreement_coordinator.index,
        consumer, consumer)
    print(
        f"[DATA_SCIENTIST --> COORDINATOR_PROVIDER] Requesting an agreement for coordinator compute: {agreement_id_coordinator}"
    )

    event = keeper.lock_reward_condition.subscribe_condition_fulfilled(
        agreement_id_coordinator, 60, None, (), wait=True)
    assert event is not None, "Reward condition is not found"

    event = keeper.compute_execution_condition.subscribe_condition_fulfilled(
        agreement_id_coordinator, 60, None, (), wait=True)
    assert event is not None, "Execution condition not found"

    # 7. Execute workflows
    compute_coordinator_id = nevermined.assets.execute(
        agreement_id_coordinator,
        ddo_compute_coordinator.did,
        service_agreement_coordinator.index,
        consumer,
        ddo_workflow_coordinator.did,
    )
    print(
        f"[DATA_SCIENTIST --> COORDINATOR_PROVIDER] Requesting execution for coordinator compute: {compute_coordinator_id}"
    )

    compute_asset0_id = nevermined.assets.execute(
        agreement_id0,
        ddo_compute0.did,
        service_agreement0.index,
        consumer,
        ddo_workflow0.did,
    )
    print(
        f"[DATA_SCIENTIST --> DATA_PROVIDER0] Requesting execution for compute to data for asset0: {compute_asset0_id}"
    )

    compute_asset1_id = nevermined.assets.execute(
        agreement_id1,
        ddo_compute1.did,
        service_agreement1.index,
        consumer,
        ddo_workflow1.did,
    )
    print(
        f"[DATA_SCIENTIST --> DATA_PROVIDER1] Requesting execution for compute to data for asset1: {compute_asset1_id}"
    )

    jobs = [
        (agreement_id_coordinator, compute_coordinator_id),
        (agreement_id0, compute_asset0_id),
        (agreement_id1, compute_asset1_id),
    ]
    return jobs
Exemplo n.º 7
0
def demo():
    nevermined = Nevermined(Config(CONFIG_FILE))
    provider_account = Account(PROVIDER_ADDRESS, PROVIDER_PASSWORD,
                               PROVIDER_KEYFILE)

    # publish asset
    metadata_compute = {
        "main": {
            "name":
            "CIFAR-10 Part 1",
            "dateCreated":
            date_now(),
            "author":
            "Nevermined Provider",
            "license":
            "",
            "price":
            "1",
            "files": [{
                "index":
                0,
                "contentType":
                "image/png",
                "checksum":
                "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377",
                "checksumType":
                "MD5",
                "contentLength":
                "12057507",
                "url":
                "https://ck2a37sxobgcdarvr7jewxvrlvde6kehhoy6lmfuks4uabuavtiq.arweave.net/ErQN_ldwTCGCNY_SS16xXUZPKIc7seWwtFS5QAaArNE",
            }, {
                "index":
                1,
                "contentType":
                "application/json",
                "checksum":
                "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377",
                "checksumType":
                "MD5",
                "contentLength":
                "12057507",
                "url":
                "https://raw.githubusercontent.com/keyko-io/eth-nft-hack/rod/artsgenerator-demo/config.json"
            }],
            "type":
            "dataset",
        }
    }

    ddo_compute = nevermined.assets.create_compute(metadata_compute,
                                                   provider_account)
    print(f"Published asset with DID: {ddo_compute.did}")

    # publish algorithm
    metadata_algorithm = {
        "main": {
            "name":
            "Generative artist",
            "dateCreated":
            date_now(),
            "author":
            "Gene Kogan",
            "license":
            "",
            "price":
            "0",
            # This file will not be used but there is a bug on the sdk that
            # expects a least one file to exist in an algorithm
            "files": [
                {
                    "index":
                    0,
                    "contentType":
                    "text/text",
                    "checksum":
                    "0x52b5c93b82dd9e7ecc3d9fdf4755f7f69a54484941897dc517b4adfe3bbc3377",
                    "checksumType":
                    "MD5",
                    "contentLength":
                    "12057507",
                    "url":
                    "https://github.com/nevermined-io/tools/raw/master/README.md",
                },
            ],
            "type":
            "algorithm",
            "algorithm": {
                "language": "python",
                "format": "py",
                "version": "0.1.0",
                "entrypoint":
                "pwd && ls -lR && cat /data/inputs/**/config.json && python /nevermined-demo/run.py",
                "requirements": {
                    "container": {
                        "image":
                        "neverminedio/artgenerator",
                        "tag":
                        "latest",
                        "checksum":
                        "sha256:53ad3a03b2fb240b6c494339821e6638cd44c989bcf26ec4d51a6a52f7518c1d",
                    }
                },
            },
        }
    }

    ddo_algorithm = nevermined.assets.create(metadata_algorithm,
                                             provider_account)
    print(f"Published algorithm with DID: {ddo_algorithm.did}")

    metadata_workflow = {
        "main": {
            "name": "Mint my NFT",
            "dateCreated": date_now(),
            "author": "Nevermined Consumer",
            "license": "",
            "price": "0",
            "type": "workflow",
            "workflow": {
                "stages": [{
                    "index": 0,
                    "input": [{
                        "index": 0,
                        "id": ddo_compute.did
                    }],
                    "transformation": {
                        "id": ddo_algorithm.did
                    },
                }]
            },
        }
    }

    ddo_workflow = nevermined.assets.create(metadata_workflow,
                                            provider_account)
    print(f"Published workflow with DID: {ddo_workflow.did}")

    # order the asset
    keeper = Keeper.get_instance()

    service_agreement_id = nevermined.assets.order(
        ddo_compute.did,
        ServiceTypesIndices.DEFAULT_COMPUTING_INDEX,
        provider_account,
        provider_account,
    )
    print()
    print("Ordering Data In-Situ Compute")
    print(f"Service Agreement ID: {service_agreement_id}")
    wait_for_event(keeper, service_agreement_id)

    # execute workflow
    execution_id = nevermined.assets.execute(
        service_agreement_id,
        ddo_compute.did,
        ServiceTypesIndices.DEFAULT_COMPUTING_INDEX,
        provider_account,
        ddo_workflow.did,
    )
    print("Firing up the GPUs to mine some Art...")
    print(f"Execution ID: {execution_id}")
    print("This will take a about 1h...")

    print()
    print('Monitoring compute status:')
    # wait for compute job
    outputs_did = None
    while True:
        status = nevermined.assets.compute_status(service_agreement_id,
                                                  execution_id,
                                                  provider_account)
        if status["status"] == "Failed":
            raise ValueError("The job failed")
        elif status["status"] == "Succeeded":
            outputs_did = status["did"]
            break

        print(f"{execution_id}: {status['status']}")
        time.sleep(60)

    print(f"Outputs DID: {outputs_did}")

    # download the output assets
    print()
    print('Downloading outputs...')
    nevermined.assets.download(outputs_did,
                               ServiceTypesIndices.DEFAULT_ACCESS_INDEX,
                               provider_account, "./")

    print("Finished!")
Exemplo n.º 8
0
def create_arguments(ddo):
    """Create the arguments that need to be add to the argo template.

    Args:
        ddo (:py:class:`common_utils_py.ddo.ddo.DDO`): The workflow DDO.

    Returns:
        list: The list of arguments to be appended to the argo workflow

    """
    args = ''
    image = ''
    tag = ''

    if ddo.metadata["main"]["type"] != "fl-coordinator":
        workflow = ddo.metadata["main"]["workflow"]

        options = {
            "resources": {
                "metadata.url": "http://172.17.0.1:5000",
            },
            "keeper-contracts": {
                "keeper.url": "http://172.17.0.1:8545"
            }
        }
        config = Config(options_dict=options)
        nevermined = Nevermined(config)

        # TODO: Currently this only supports one stage
        transformation_did = workflow["stages"][0]["transformation"]["id"]
        transformation_ddo = nevermined.assets.resolve(transformation_did)
        transformation_metadata = transformation_ddo.get_service("metadata")

        # get args and container
        args = transformation_metadata.main["algorithm"]["entrypoint"]
        image = transformation_metadata.main["algorithm"]["requirements"][
            "container"]["image"]
        tag = transformation_metadata.main["algorithm"]["requirements"][
            "container"]["tag"]

    arguments = [
        {
            "name": "credentials",
            # remove white spaces
            "value": json.dumps(KEYFILE, separators=(",", ":"))
        },
        {
            "name": "password",
            "value": os.getenv("PROVIDER_PASSWORD")
        },
        {
            "name": "metadata_url",
            "value": "http://172.17.0.1:5000"
        },
        {
            "name": "gateway_url",
            "value": "http://172.17.0.1:8030"
        },
        {
            "name": "node",
            "value": "http://172.17.0.1:8545"
        },
        {
            "name": "secret_store_url",
            "value": "http://172.17.0.1:12001"
        },
        {
            "name": "workflow",
            "value": f"did:nv:{ddo.asset_id[2:]}"
        },
        {
            "name": "verbose",
            "value": "false"
        },
        {
            "name": "transformation_container_image",
            "value": f"{image}:{tag}"
        },
        {
            "name": "transformation_arguments",
            "value": args
        }
    ]
    return arguments
Exemplo n.º 9
0
def run(args):
    logging.debug(f"script called with args: {args}")

    # setup config
    options = {
        "keeper-contracts": {
            "keeper.url": args.node,
            "secret_store.url": args.secretstore_url,
        },
        "resources": {
            "downloads.path": args.path.as_posix(),
            "metadata.url": args.metadata_url,
            "gateway.url": args.gateway_url,
        },
    }
    config = Config(options_dict=options)
    logging.debug(f"nevermined config: {config}")

    # setup paths
    outputs_path = args.path / "outputs"

    # setup nevermined
    nevermined = Nevermined(config)

    # setup consumer
    # here we need to create a temporary key file from the credentials
    key_file = NamedTemporaryFile("w", delete=False)
    json.dump(args.credentials, key_file)
    key_file.flush()
    key_file.close()
    account = Account(
        Web3.toChecksumAddress(args.credentials["address"]),
        password=args.password,
        key_file=key_file.name,
    )

    # resolve workflow
    workflow = nevermined.assets.resolve(args.workflow)
    logging.info(f"resolved workflow {args.workflow}")
    logging.debug(f"workflow ddo {workflow.as_dictionary()}")

    workflow_owner = nevermined.assets.owner(workflow.did)
    provenance_id = uuid.uuid4()

    # get files to upload
    files = []
    index = 0
    for f in outputs_path.rglob("*"):
        if f.is_file():
            files.append({
                "index": index,
                "name": f.name,
                "path": f.as_posix(),
                "contentType": mimetypes.guess_type(f)[0],
                "contentLength": f.stat().st_size,
            })
            index += 1

    # create bucket
    minio_client = Minio(
        "172.17.0.1:8060",
        access_key="AKIAIOSFODNN7EXAMPLE",
        secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
        secure=False,
    )
    bucket_name = f"pod-publishing-{str(uuid.uuid4())}"
    minio_client.make_bucket(bucket_name, location="eu-central-1")
    logging.info(f"Created bucket {bucket_name}")
    minio_client.set_bucket_policy(bucket_name,
                                   s3_readonly_policy(bucket_name))
    logging.info(f"Set bucket {bucket_name} policy to READ_ONLY")
    nevermined.provenance.used(
        provenance_id=Web3.toBytes(provenance_id.bytes),
        did=convert_to_bytes(workflow.did),
        agent_id=convert_to_bytes(workflow_owner),
        activity_id=convert_to_bytes(nevermined._web3.keccak(text='compute')),
        signature=nevermined.keeper.sign_hash(add_ethereum_prefix_and_hash_msg(
            str(provenance_id)),
                                              account=account),
        account=account,
        attributes='compute')

    # upload files
    for f in files:
        minio_client.fput_object(bucket_name, f["name"], f["path"])
        logging.info(f"Uploaded file {f['path']}")

        del f["path"]
        f["url"] = minio_client.presigned_get_object(bucket_name, f["name"])
        logging.info(f"File url {f['url']}")

    # Create ddo
    publishing_date = datetime.utcnow().isoformat(timespec="seconds") + "Z"
    metadata = {
        "main": {
            "dateCreated": publishing_date,
            "datePublished": publishing_date,
            "author": "pod-publishing",
            "license": "No License Specified",
            "price": "1",
            "metadata": {
                "workflow": workflow.metadata,
                "executionId": os.getenv("EXECUTION_ID"),
            },
            "files": files,
            "type": "dataset",
        }
    }

    # publish the ddo
    ddo = None
    retry = 0
    while ddo is None:
        try:
            ddo = nevermined.assets.create(
                metadata,
                account,
                providers=[account.address],
            )
            nevermined.provenance.was_derived_from(
                provenance_id=Web3.toBytes(provenance_id.bytes),
                new_entity_did=convert_to_bytes(ddo.did),
                used_entity_did=convert_to_bytes(workflow.did),
                agent_id=convert_to_bytes(workflow_owner),
                activity_id=convert_to_bytes(
                    nevermined._web3.keccak(text='published')),
                account=account,
                attributes='published')
        except ValueError:
            if retry == 3:
                raise
            logging.warning("retrying creation of asset")
            retry += 1
            time.sleep(30)
    logging.info(f"Publishing {ddo.did}")
    logging.debug(f"Publishing ddo: {ddo}")

    # transfer ownership to the owner of the workflow
    retry = 0
    while True:
        try:
            nevermined.assets.transfer_ownership(ddo.did, workflow_owner,
                                                 account)
            nevermined.provenance.was_associated_with(
                provenance_id=Web3.toBytes(provenance_id.bytes),
                did=workflow.did,
                agent_id=workflow_owner,
                activity_id=convert_to_bytes(
                    nevermined._web3.keccak(text='transferOwnership')),
                account=account,
                attributes='transferOwnership')
        except ValueError:
            if retry == 3:
                raise
            logging.warning("retrying transfer of ownership")
            retry += 1
            time.sleep(30)
        else:
            break
    logging.info(
        f"Transfered ownership of {workflow.did} from {account.address} to {workflow_owner}"
    )