Exemple #1
0
def is_geth_on():
    """Check whether geth runs on the background."""
    process_name = f"geth@{env.RPC_PORT}"
    if not is_process_on(process_name, "Geth", process_count=0):
        log(f"E: geth is not running on the background, {process_name}. Please run:")
        log("sudo ~/eBlocPOA/server.sh", "bold yellow")
        raise QuietExit
Exemple #2
0
def main():
    check_gdrive_user()
    console_ruler(f"NEW_TEST {Ebb.get_block_number()}")
    log(f" * {datetime.now().strftime('%Y-%m-%d %H:%M')}")
    if not is_process_on("mongod", "mongod"):
        raise Exception("mongodb is not running in the background")

    storage_ids = ["eudat", "gdrive", "ipfs"]
    ipfs_ids = ["ipfs_gpg", "ipfs"]
    # for provider_address in provider_addresses:
    #     pre_submit(storage_ids, provider_address)

    benchmarks = ["nas", "cppr"]
    test_dir = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "nas"
    nas_yaml_fn = test_dir / "job_nas.yaml"
    cppr_yam_fn = test_dir / "job_cppr.yaml"
    yaml_cfg = None
    counter = 0
    for _ in range(60):
        for _ in range(2):  # submitted as batch is faster
            for idx, provider_address in enumerate(provider_addresses):
                # yaml_cfg["config"]["data"]["data3"]["storage_id"] = random.choice(storage_ids)
                storage_id = (idx + counter) % len(storage_ids)
                selected_benchmark = random.choice(benchmarks)
                storage = storage_ids[storage_id]
                if storage == "ipfs":
                    storage = random.choice(ipfs_ids)

                if selected_benchmark == "nas":
                    log(
                        f" * Submitting job from NAS Benchmark to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(nas_yaml_fn)
                    benchmark_name = create_nas_job_script()
                elif selected_benchmark == "cppr":
                    log(
                        f" * Submitting job with cppr datasets to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(cppr_yam_fn)
                    hash_small_data, hash_med_data = create_cppr_job_script()
                    yaml_cfg["config"]["data"]["data1"][
                        "hash"] = hash_small_data
                    yaml_cfg["config"]["data"]["data2"]["hash"] = hash_med_data
                    yaml_cfg["config"]["data"]["data3"]["storage_id"] = storage
                    small_datasets = Path.home(
                    ) / "test_eblocbroker" / "dataset_zip" / "small"
                    dirs = [
                        d for d in os.listdir(small_datasets)
                        if os.path.isdir(os.path.join(small_datasets, d))
                    ]
                    dir_name = random.choice(dirs)
                    yaml_cfg["config"]["data"]["data3"]["path"] = str(
                        small_datasets / dir_name)

                yaml_cfg["config"]["source_code"]["storage_id"] = storage
                yaml_cfg["config"]["provider_address"] = provider_address
                try:
                    submit_base = SubmitBase(yaml_cfg.path)
                    submission_date = _date()
                    submission_timestamp = _timestamp()
                    requester_address = random.choice(users).lower()
                    yaml_cfg["config"]["requester_address"] = requester_address
                    log(f"requester={requester_address}", "bold")
                    tx_hash = submit_base.submit(is_pass=True)
                    log(f"tx_hash={tx_hash}", "bold")
                    tx_receipt = get_tx_status(tx_hash, is_silent=True)
                    if tx_receipt["status"] == 1:
                        processed_logs = Ebb._eBlocBroker.events.LogJob(
                        ).processReceipt(tx_receipt, errors=DISCARD)
                        job_result = vars(processed_logs[0].args)
                        job_result["submit_date"] = submission_date
                        job_result["submit_timestamp"] = submission_timestamp
                        job_result["tx_hash"] = tx_hash
                        if selected_benchmark == "nas":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{benchmark_name}"
                        elif selected_benchmark == "cppr":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{hash_small_data}_{hash_med_data}"

                        ebb_mongo.add_item(tx_hash, job_result)
                        log(job_result)

                    countdown(seconds=5, is_silent=True)
                except Exception as e:
                    print_tb(e)

            counter += 1

        sleep_time = randint(200, 400)
        countdown(sleep_time)
Exemple #3
0
def is_ganache_on(port) -> bool:
    """Check whether Ganache CLI runs on the background."""
    return is_process_on("node.*[g]anache-cli", "Ganache CLI", process_count=0, port=port)
Exemple #4
0
def is_driver_on(process_count=0, is_print=True):
    """Check whether driver runs on the background."""
    if is_process_on("python.*[D]river", "Driver", process_count, is_print=is_print):
        log(f"## Track output using:\n[blue]tail -f {_log.DRIVER_LOG}")
        raise QuietExit
Exemple #5
0
def is_ipfs_on(is_print=False) -> bool:
    """Check whether ipfs runs on the background."""
    return is_process_on("[i]pfs\ daemon", "IPFS", process_count=0, is_print=is_print)
Exemple #6
0
def _tools(block_continue):  # noqa
    """Check whether the required functions are running or not.

    :param block_continue: Continue from given block number
    """
    session_start_msg(env.SLURMUSER, block_continue, pid)
    try:
        if not is_internet_on():
            raise Terminate("Network connection is down. Please try again")

        if not check_ubuntu_packages():
            raise Terminate()

        if not env.IS_BLOXBERG:
            is_geth_on()
        else:
            log(":beer:  Connected into [green]BLOXBERG[/green]", "bold")

        if not Contract.Ebb.is_orcid_verified(env.PROVIDER_ID):
            log(f"warning: provider [green]{env.PROVIDER_ID}[/green]'s orcid id is not authenticated yet")
            raise QuietExit

        slurm.is_on()
        if not is_process_on("mongod"):
            raise Exception("mongodb is not running in the background")

        # run_driver_cancel()  # TODO: uncomment
        if env.IS_EUDAT_USE:
            if not env.OC_USER:
                raise Terminate(f"OC_USER is not set in {env.LOG_PATH.joinpath('.env')}")

            eudat.login(env.OC_USER, env.LOG_PATH.joinpath(".eudat_client.txt"), env.OC_CLIENT)

        if env.IS_GDRIVE_USE:
            is_program_valid(["gdrive", "version"])
            if env.GDRIVE == "":
                raise Terminate(f"E: Path for gdrive='{env.GDRIVE}' please set a valid path in the .env file")

            provider_info = Contract.Ebb.get_provider_info(env.PROVIDER_ID)
            email = provider_info["email"]
            try:
                output, gdrive_email = gdrive.check_user(email)
            except Exception as e:
                print_tb(e)
                raise QuietExit from e

            if not output:
                log(
                    f"E: provider's registered email=[magenta]{email}[/magenta] does not match\n"
                    f"   with the set gdrive's email=[magenta]{gdrive_email}[/magenta]"
                )
                raise QuietExit

            log(f"==> provider_email=[magenta]{email}")

        if env.IS_IPFS_USE:
            if not os.path.isfile(env.GPG_PASS_FILE):
                log(f"E: Please store your gpg password in the {env.GPG_PASS_FILE}\nfile for decrypting using ipfs")
                raise QuietExit

            if not os.path.isdir(env.IPFS_REPO):
                log(f"E: {env.IPFS_REPO} does not exist")
                raise QuietExit

            run_ipfs_daemon()
    except QuietExit as e:
        raise e
    except Exception as e:
        print_tb(e)
        raise Terminate from e
Exemple #7
0
def run_driver_cancel():
    """Run driver_cancel daemon on the background."""
    if not is_process_on("python.*[d]river_cancel", "driver_cancel"):
        # Running driver_cancel.py on the background if it is not already
        config.driver_cancel_process = subprocess.Popen(
            ["python3", "driver_cancel.py"])