Esempio n. 1
0
def _update_data_price():
    Ebb = cfg.Ebb
    if not Ebb.does_provider_exist(env.PROVIDER_ID):
        log(f"warning: Provider {env.PROVIDER_ID} is not registered.\n")
        raise QuietExit

    source_code_hash = "b6aaf03752dc68d625fc57b451faa2bf"
    new_data_price = 21
    commitment_block_duration = 600
    source_code_hash_bytes = cfg.w3.toBytes(text=source_code_hash)
    try:
        (price, _commitment_block_duration) = cfg.Ebb.get_registered_data_prices(
            env.PROVIDER_ID, source_code_hash_bytes, 0
        )
        if price == new_data_price and _commitment_block_duration == commitment_block_duration:
            log(f"## data([green]{source_code_hash}[/green]) already registerered with the given values")
            raise QuietExit
    except:
        raise QuietExit

    try:
        tx = Ebb.update_data_price(source_code_hash_bytes, new_data_price, commitment_block_duration)
        get_tx_status(Ebb.tx_id(tx))
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)
Esempio n. 2
0
def deposit_storage(eth_address, is_provider=False):
    """Deposit storage balance.

    :param str eth_address: Ethereum address of the provider
    :param bool is_provider: Checks it the caller provider
    """
    from_block = Ebb.get_deployed_block_number()
    if is_provider:
        event_filter = Ebb._eBlocBroker.events.LogJob.createFilter(
            fromBlock=int(from_block),
            argument_filters={"provider": eth_address},
            toBlock="latest",
        )
    else:  # should be owner of the job
        event_filter = Ebb._eBlocBroker.events.LogJob.createFilter(
            fromBlock=int(from_block),
            argument_filters={"owner": eth_address},
            toBlock="latest",
        )

    for job in enumerate(event_filter.get_all_entries()):
        job_info = job[1].args
        flag_check = []
        for idx, code_hash in enumerate(job_info["sourceCodeHash"]):
            main_cloud_storage_id = job_info["cloudStorageID"][idx]
            if main_cloud_storage_id in (StorageID.IPFS, StorageID.IPFS_GPG):
                _hash = bytes32_to_ipfs(code_hash)
                _type = "ipfs_hash"
            else:
                _hash = cfg.w3.toText(code_hash)
                _type = "md5sum"

            log(br(f"{idx}, {_type}"), "bold cyan", end="")
            if len(code_hash) <= 32:
                log(f" {_hash} bytes={code_hash}", "bold")
            else:
                log(f" {_hash}\n\t{code_hash}", "bold")

            provider = Ebb.w3.toChecksumAddress(job_info["provider"])
            if is_provider and eth_address.lower() == provider.lower():
                data_owner = Ebb.w3.toChecksumAddress(job_info["owner"])
                deposit, output = Ebb.get_storage_info(provider, data_owner,
                                                       code_hash)
                flag_check.append(output[3])
                log(f"deposit={deposit}, {output}", "bold")

        if deposit > 0 and not any(
                flag_check):  # if not any(i for i in flag_check):
            is_verified_list = [True, True]
            tx = Ebb._data_received(
                job_info["jobKey"],
                job_info["index"],
                job_info["sourceCodeHash"],
                job_info["cacheType"],
                is_verified_list,
            )
            get_tx_status(Ebb.tx_id(tx))
        else:
            log("warning: already all data files are are verifid")
Esempio n. 3
0
def main():
    owner_address = Ebb.get_owner()
    for user in users:
        try:
            tx_hash = Ebb.authenticate_orc_id(user, "0000-0001-7642-0552", owner_address)
            if tx_hash:
                get_tx_status(tx_hash)
        except Exception as e:
            print_tb(e)
Esempio n. 4
0
def main():
    yaml_fn = "~/ebloc-broker/broker/test_setup/requester.yaml"
    for user in users:
        yaml_user = Yaml(yaml_fn)
        yaml_user["config"]["account"] = user
        with suppress(Exception):
            tx_hash = Ebb.register_requester(yaml_fn, is_question=False)
            if tx_hash:
                get_tx_status(tx_hash)
Esempio n. 5
0
def main():
    provider = env.PROVIDER_ID
    pre_check_data(provider)
    Ebb = cfg.Ebb
    data_hash = b"f13d75bc60898f0823566347e380a34d"
    try:
        if is_data_registered(provider, data_hash):
            tx = Ebb.remove_registered_data(data_hash)
            get_tx_status(Ebb.tx_id(tx))
        else:
            log(f"## data({data_hash}) is alread deleted or not registered")
    except Exception as e:
        print_tb(e)
Esempio n. 6
0
def pre_submit(storage_ids, provider_address):
    is_pass = True
    required_confs = 0
    yaml_fn = Path.home(
    ) / "ebloc-broker" / "broker" / "test_setup" / "nas" / "job_nas.yaml"
    yaml_cfg = Yaml(yaml_fn)
    yaml_cfg["config"]["provider_address"] = provider_address
    for storage_id in storage_ids:
        yaml_cfg["config"]["source_code"]["storage_id"] = storage_id
        benchmark_name = create_nas_job_script(is_small=True)
        submit_base = SubmitBase(yaml_cfg.path)
        tx_hash = submit_base.submit(is_pass, required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash, is_silent=True)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob(
                ).processReceipt(tx_receipt, errors=DISCARD)
                try:
                    if processed_logs:
                        job_result = vars(processed_logs[0].args)
                        job_result["tx_hash"] = tx_hash
                        job_result[
                            "submitted_job_kind"] = f"nas_{benchmark_name}"
                        log(job_result)
                except IndexError:
                    log(f"E: Tx({tx_hash}) is reverted")
Esempio n. 7
0
def register_requester(yaml_fn):
    """Return provider info.

    :param str yaml_fn: Full file path of Yaml file that contains the requester info
    """
    t1.join()
    try:
        tx_hash = Ebb.register_requester(yaml_fn)
        if tx_hash:
            get_tx_status(tx_hash)
        else:
            log()
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)
Esempio n. 8
0
def main():
    Ebb = cfg.Ebb
    if len(sys.argv) == 3:
        address = str(sys.argv[1])
        orc_id = str(sys.argv[2])
    else:
        log("E: Please provide the address and its orc_id as argument")
        log("[bold]   ./authenticate_orc_id.py <address> <orc_id>", "mangenta")
        sys.exit(1)

    try:
        owner_address = Ebb.get_owner()
        tx_hash = Ebb.authenticate_orc_id(address, orc_id, owner_address)
        if tx_hash:
            get_tx_status(tx_hash)
    except Exception as e:
        print_tb(e)
Esempio n. 9
0
def _withdraw():
    Ebb = cfg.Ebb
    if len(sys.argv) == 2:
        account = str(sys.argv[1])
    else:
        log("## provide an ethereum account as an argument")
        sys.exit(1)

    try:
        balance = Ebb.get_balance(account)
        if balance > 0:
            log(f"account_balance={balance}", "bold")
            get_tx_status(Ebb.withdraw(account))
        else:
            log("warning: account balance is empty nothing to do")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)
Esempio n. 10
0
def submit_gdrive(job: Job, is_pass=False, required_confs=1):
    log("==> Submitting source code through [blue]GDRIVE[/blue]")
    pre_check()
    Ebb = cfg.Ebb
    job.folders_to_share = job.paths
    check_link_folders(job.data_paths,
                       job.registered_data_files,
                       is_pass=is_pass)
    _git.generate_git_repo(job.folders_to_share)
    job.clean_before_submit()
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    provider = Ebb.w3.toChecksumAddress(job.provider_addr)
    job = gdrive.submit(provider, requester, job)
    for folder_to_share in job.folders_to_share:
        if isinstance(folder_to_share, bytes):
            code_hash = folder_to_share
            job.code_hashes.append(code_hash)
            job.code_hashes_str.append(code_hash.decode("utf-8"))
        else:
            tar_hash = job.foldername_tar_hash[folder_to_share]
            #: required to send string as bytes == str_data.encode('utf-8')
            code_hash = Ebb.w3.toBytes(text=tar_hash)
            job.code_hashes.append(code_hash)
            job.code_hashes_str.append(code_hash.decode("utf-8"))

    tar_hash = job.foldername_tar_hash[job.folders_to_share[0]]
    key = job.keys[tar_hash]
    job.price, *_ = job.cost(provider, requester)
    try:
        tx_hash = Ebb.submit_job(provider,
                                 key,
                                 job,
                                 requester=requester,
                                 required_confs=required_confs)
        tx_receipt = get_tx_status(tx_hash)
        if tx_receipt["status"] == 1:
            processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(
                tx_receipt, errors=DISCARD)
            log(vars(processed_logs[0].args))
            try:
                log(f"{ok()} [bold]job_index={processed_logs[0].args['index']}"
                    )
            except IndexError:
                log(f"E: Tx({tx_hash}) is reverted")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    log()
    for k, v in job.tar_hashes.items():
        log(f"{k} [blue]=>[/blue] {v}")

    return tx_hash
Esempio n. 11
0
def _register_data(source_code_hash, data_price, commitment_blk_duration):
    Ebb = cfg.Ebb
    is_exit = False
    price = None
    if not Ebb.does_provider_exist(env.PROVIDER_ID):
        log(f"warning: provider [green]{env.PROVIDER_ID}[/green] is not registered")
        raise QuietExit

    if not Ebb.is_orcid_verified(env.PROVIDER_ID):
        log(f"warning: provider [green]{env.PROVIDER_ID}[/green]'s orcid id is not authenticated yet")
        raise QuietExit

    source_code_hash_bytes = cfg.w3.toBytes(text=source_code_hash)
    try:
        (price, _commitment_blk_duration) = cfg.Ebb.get_registered_data_prices(
            env.PROVIDER_ID, source_code_hash_bytes, 0
        )
        log(
            f"## data([green]{source_code_hash}[/green]) is already registerered.\n"
            "Use [blue]./update_data_price.py[/blue] to update its price"
        )
        is_exit = True
    except:
        pass

    if is_exit:
        raise QuietExit

    if price == data_price and _commitment_blk_duration == commitment_blk_duration:
        log(f"## data([green]{source_code_hash}[/green]) already registerered with the given values")
        raise QuietExit

    try:
        tx = Ebb.register_data(source_code_hash_bytes, data_price, commitment_blk_duration)
        get_tx_status(Ebb.tx_id(tx))
    except QuietExit as e:
        raise e
    except Exception as e:
        print_tb(e)
Esempio n. 12
0
    def analyze_tx_status(self, tx_hash) -> bool:
        try:
            tx_receipt = get_tx_status(tx_hash)
            try:
                if not self.Ebb:
                    log("warning: self.Ebb is empty object")

                processed_logs = self.Ebb.eBlocBroker.events.LogJob(
                ).processReceipt(tx_receipt, errors=self.w3.DISCARD)
                log(vars(processed_logs[0].args))
                log(f"==> job_index={processed_logs[0].args['index']}")
            except IndexError:
                log("E: Transaction is reverted")
            return True
        except Exception as e:
            print_tb(e)
            return False
Esempio n. 13
0
def submit(provider, requester, job, required_confs=1):
    try:
        tx_hash = _submit(provider, requester, job, required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(tx_receipt, errors=DISCARD)
                log(vars(processed_logs[0].args))
                try:
                    log(f"{ok()} [bold]job_index={processed_logs[0].args['index']}")
                except IndexError:
                    log(f"E: Tx({tx_hash}) is reverted")
        else:
            log(f"tx_hash={tx_hash}", "bold")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    return tx_hash
Esempio n. 14
0
    if not available_core:
        raise Exception("Please enter positive value for the available core number")

    if not commitment_blk:
        raise Exception("Please enter positive value for the commitment block number")

    try:
        tx = self._update_provider_prices(available_core, commitment_blk, prices)
        return self.tx_id(tx)
    except Exception as e:
        print_tb(e)
        raise e


if __name__ == "__main__":
    Ebb = cfg.Ebb
    available_core = 4
    commitment_blk = 600
    price_core_min = 100
    price_data_transfer = 1
    price_storage = 1
    price_cache = 1
    prices = [price_core_min, price_data_transfer, price_storage, price_cache]
    try:
        tx_hash = Ebb.update_provider_prices(available_core, commitment_blk, prices)
        receipt = get_tx_status(tx_hash)
    except Exception as e:
        print_tb(e)
        sys.exit(1)
Esempio n. 15
0
def submit_ipfs(job: Job, is_pass=False, required_confs=1):
    Ebb = cfg.Ebb
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    provider = Ebb.w3.toChecksumAddress(job.provider_addr)
    pre_check(job, requester)
    log("==> Attemptting to submit a job")
    main_storage_id = job.storage_ids[0]
    job.folders_to_share = job.paths
    check_link_folders(job.data_paths, job.registered_data_files, is_pass=is_pass)
    if main_storage_id == StorageID.IPFS:
        log("==> Submitting source code through [blue]IPFS[/blue]")
    elif main_storage_id == StorageID.IPFS_GPG:
        log("==> Submitting source code through [blue]IPFS_GPG[/blue]")
    else:
        log("E: Please provide IPFS or IPFS_GPG storage type for the source code")
        sys.exit(1)

    targets = []
    try:
        provider_info = Ebb.get_provider_info(provider)
    except Exception as e:
        print_tb(e)
        sys.exit(1)

    for idx, folder in enumerate(job.folders_to_share):
        if isinstance(folder, Path):
            target = folder
            if job.storage_ids[idx] == StorageID.IPFS_GPG:
                provider_gpg_finderprint = provider_info["gpg_fingerprint"]
                if not provider_gpg_finderprint:
                    log("E: Provider did not register any GPG fingerprint")
                    sys.exit(1)

                log(f"==> provider_gpg_finderprint={provider_gpg_finderprint}")
                try:
                    # target is updated
                    target = cfg.ipfs.gpg_encrypt(provider_gpg_finderprint, target)
                    log(f"==> gpg_file={target}")
                except Exception as e:
                    print_tb(e)
                    sys.exit(1)

            try:
                ipfs_hash = cfg.ipfs.add(target)
                # ipfs_hash = ipfs.add(folder, True)  # True includes .git/
                run(["ipfs", "refs", ipfs_hash])
            except Exception as e:
                print_tb(e)
                sys.exit(1)

            if idx == 0:
                key = ipfs_hash

            job.code_hashes.append(ipfs_to_bytes32(ipfs_hash))
            job.code_hashes_str.append(ipfs_hash)
            log(f"==> ipfs_hash={ipfs_hash} | md5sum={generate_md5sum(target)}")
            if main_storage_id == StorageID.IPFS_GPG:
                # created gpg file will be removed since its already in ipfs
                targets.append(target)
        else:
            code_hash = folder
            if isinstance(code_hash, bytes):
                job.code_hashes.append(code_hash)
                job.code_hashes_str.append(code_hash.decode("utf-8"))

            # TODO: if its ipfs
            # if isinstance(code_hash, bytes):
            #     code_hash = code_hash.decode("utf-8")

            # if len(code_hash) == 32:
            #     value = cfg.w3.toBytes(text=code_hash)
            #     job.code_hashes.append(value)
            #     job.code_hashes_str.append(value.decode("utf-8"))
            # else:
            #     job.code_hashes.append(ipfs_to_bytes32(code_hash))
            #     job.code_hashes_str.append(code_hash)

        # if idx != len(job.folders_to_share) - 1:
        #     log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-", "cyan")

    # requester inputs for testing purpose
    job.price, *_ = job.cost(provider, requester)
    try:
        tx_hash = Ebb.submit_job(provider, key, job, requester=requester, required_confs=required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(tx_receipt, errors=DISCARD)
                try:
                    if processed_logs:
                        log("job_info:", "bold yellow")
                        log(vars(processed_logs[0].args))

                    for target in targets:
                        if ".tar.gz.gpg" in str(target):
                            _remove(target)
                except IndexError:
                    log(f"E: Tx={tx_hash} is reverted")
        else:
            pass
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    return tx_hash
Esempio n. 16
0
def register_provider_wrapper(yaml_fn):
    """Register provider."""
    yaml_fn = os.path.expanduser(yaml_fn)
    if not os.path.exists(yaml_fn):
        log(f"E: yaml_fn({yaml_fn}) does not exist")
        raise QuietExit

    args = Yaml(yaml_fn)
    # @b2drop.eudat.eu

    federation_cloud_id = args["cfg"]["oc_user"]
    email = args["cfg"]["gmail"]
    available_core = args["cfg"]["provider"]["available_core"]
    commitment_blk = args["cfg"]["provider"]["prices"]["commitment_blk"]
    price_core_min = args["cfg"]["provider"]["prices"]["price_core_min"]
    price_data_transfer = args["cfg"]["provider"]["prices"][
        "price_data_transfer"]
    price_storage = args["cfg"]["provider"]["prices"]["price_storage"]
    price_cache = args["cfg"]["provider"]["prices"]["price_cache"]
    exit_flag = False

    if not federation_cloud_id:
        log(f"E: [blue]federation_cloud_id[/blue] is empty in [magenta]{yaml_fn}"
            )
        exit_flag = True

    if not available_core:
        log(f"E: [blue]available_core[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if not commitment_blk:
        log(f"E: [blue]commitment_blk[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if not price_core_min:
        log(f"E: [blue]price_core_min[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if not price_data_transfer:
        log(f"E: [blue]price_data_transfer[/blue] is empty in [magenta]{yaml_fn}"
            )
        exit_flag = True

    if not price_storage:
        log(f"E: [blue]price_storage[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if not price_cache:
        log(f"E: [blue]price_cache[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if not email:
        log(f"E: [blue]email[/blue] is empty in [magenta]{yaml_fn}")
        exit_flag = True

    if exit_flag:
        sys.exit(1)

    ipfs_id = get_ipfs_id()
    ip_address = get_ip()
    if ip_address not in ipfs_id:
        # public IP should exists in the ipfs id
        ipfs_address = re.sub("ip4.*?tcp",
                              f"ip4/{ip_address}/tcp",
                              ipfs_id,
                              flags=re.DOTALL)
        log(f"==> ipfs_address={ipfs_address}")
    else:
        ipfs_address = ipfs_id

    try:
        email = env.GMAIL
        gpg_fingerprint = get_gpg_fingerprint(email)
        is_gpg_published(gpg_fingerprint)
    except Exception as e:
        raise e

    if not email:
        log("E: Please provide a valid e-mail")
        sys.exit(1)

    prices = [price_core_min, price_data_transfer, price_storage, price_cache]
    args = (gpg_fingerprint, email, federation_cloud_id, ipfs_address,
            available_core, prices, commitment_blk)
    kwargs = {
        "email": email,
        "federation_cloud_id": federation_cloud_id,
        "commitment_blk": commitment_blk,
    }
    try:
        tx_hash = Ebb._register_provider(*args, **kwargs)
        if tx_hash:
            get_tx_status(tx_hash)
        else:
            log()
    except QuietExit:
        pass
    except Exception as e:
        raise e
Esempio n. 17
0
 def _get_tx_status(self, tx_hash):
     get_tx_status(tx_hash)
Esempio n. 18
0
def main():
    check_gdrive_user()
    console_ruler(f"NEW_TEST {Ebb.get_block_number()}")
    log(f" * {datetime.now().strftime('%Y-%m-%d %H:%M')}")
    if not is_process_on("mongod", "mongod"):
        raise Exception("mongodb is not running in the background")

    storage_ids = ["eudat", "gdrive", "ipfs"]
    ipfs_ids = ["ipfs_gpg", "ipfs"]
    # for provider_address in provider_addresses:
    #     pre_submit(storage_ids, provider_address)

    benchmarks = ["nas", "cppr"]
    test_dir = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "nas"
    nas_yaml_fn = test_dir / "job_nas.yaml"
    cppr_yam_fn = test_dir / "job_cppr.yaml"
    yaml_cfg = None
    counter = 0
    for _ in range(60):
        for _ in range(2):  # submitted as batch is faster
            for idx, provider_address in enumerate(provider_addresses):
                # yaml_cfg["config"]["data"]["data3"]["storage_id"] = random.choice(storage_ids)
                storage_id = (idx + counter) % len(storage_ids)
                selected_benchmark = random.choice(benchmarks)
                storage = storage_ids[storage_id]
                if storage == "ipfs":
                    storage = random.choice(ipfs_ids)

                if selected_benchmark == "nas":
                    log(
                        f" * Submitting job from NAS Benchmark to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(nas_yaml_fn)
                    benchmark_name = create_nas_job_script()
                elif selected_benchmark == "cppr":
                    log(
                        f" * Submitting job with cppr datasets to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(cppr_yam_fn)
                    hash_small_data, hash_med_data = create_cppr_job_script()
                    yaml_cfg["config"]["data"]["data1"][
                        "hash"] = hash_small_data
                    yaml_cfg["config"]["data"]["data2"]["hash"] = hash_med_data
                    yaml_cfg["config"]["data"]["data3"]["storage_id"] = storage
                    small_datasets = Path.home(
                    ) / "test_eblocbroker" / "dataset_zip" / "small"
                    dirs = [
                        d for d in os.listdir(small_datasets)
                        if os.path.isdir(os.path.join(small_datasets, d))
                    ]
                    dir_name = random.choice(dirs)
                    yaml_cfg["config"]["data"]["data3"]["path"] = str(
                        small_datasets / dir_name)

                yaml_cfg["config"]["source_code"]["storage_id"] = storage
                yaml_cfg["config"]["provider_address"] = provider_address
                try:
                    submit_base = SubmitBase(yaml_cfg.path)
                    submission_date = _date()
                    submission_timestamp = _timestamp()
                    requester_address = random.choice(users).lower()
                    yaml_cfg["config"]["requester_address"] = requester_address
                    log(f"requester={requester_address}", "bold")
                    tx_hash = submit_base.submit(is_pass=True)
                    log(f"tx_hash={tx_hash}", "bold")
                    tx_receipt = get_tx_status(tx_hash, is_silent=True)
                    if tx_receipt["status"] == 1:
                        processed_logs = Ebb._eBlocBroker.events.LogJob(
                        ).processReceipt(tx_receipt, errors=DISCARD)
                        job_result = vars(processed_logs[0].args)
                        job_result["submit_date"] = submission_date
                        job_result["submit_timestamp"] = submission_timestamp
                        job_result["tx_hash"] = tx_hash
                        if selected_benchmark == "nas":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{benchmark_name}"
                        elif selected_benchmark == "cppr":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{hash_small_data}_{hash_med_data}"

                        ebb_mongo.add_item(tx_hash, job_result)
                        log(job_result)

                    countdown(seconds=5, is_silent=True)
                except Exception as e:
                    print_tb(e)

            counter += 1

        sleep_time = randint(200, 400)
        countdown(sleep_time)
Esempio n. 19
0
            "email": email,
            "federation_cloud_id": federation_cloud_id,
            "gpg_fingerprint": gpg_fingerprint,
            "ipfs_id": ipfs_id,
        }
        log("==> [bold yellow]new_requester_info:")
        log(_requester_info)
        if is_question and not question_yes_no(
                "#> Would you like to update requester info?"):
            return

    try:
        tx = self._register_requester(account, gpg_fingerprint, email,
                                      federation_cloud_id, ipfs_id)
        return self.tx_id(tx)
    except Exception as e:
        print_tb(e)
        raise e


if __name__ == "__main__":
    try:
        yaml_fn = "~/ebloc-broker/broker/yaml_files/register_requester.yaml"
        tx_hash = Ebb.register_requester(yaml_fn)
        if tx_hash:
            get_tx_status(tx_hash)
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)