コード例 #1
0
    def process_logged_job(self, idx):
        """Process logged job one by one."""
        self.received_block = []
        self.storage_duration = []
        wait_until_idle_core_available()
        self.is_provider_received_job = True
        console_ruler(idx, character="-")
        # sourceCodeHash = binascii.hexlify(logged_job.args['sourceCodeHash'][0]).decode("utf-8")[0:32]
        job_key = self.logged_job.args["jobKey"]
        index = self.logged_job.args["index"]
        self.job_block_number = self.logged_job["blockNumber"]
        self.cloud_storage_id = self.logged_job.args["cloudStorageID"]
        log(f"## job_key=[magenta]{job_key}[/magenta] | index={index}")
        log(
            f"received_block_number={self.job_block_number} \n"
            f"transactionHash={self.logged_job['transactionHash'].hex()} | "
            f"log_index={self.logged_job['logIndex']} \n"
            f"provider={self.logged_job.args['provider']} \n"
            f"received={self.logged_job.args['received']}",
            "bold yellow",
        )
        if self.logged_job["blockNumber"] > self.latest_block_number:
            self.latest_block_number = self.logged_job["blockNumber"]

        try:
            run([env.BASH_SCRIPTS_PATH / "is_str_valid.sh", job_key])
        except:
            logging.error("E: Filename contains an invalid character")
            return

        try:
            job_id = 0  # main job_id
            self.job_info = eblocbroker_function_call(
                partial(self.Ebb.get_job_info, env.PROVIDER_ID, job_key, index, job_id, self.job_block_number),
                max_retries=10,
            )
            cfg.Ebb.get_job_code_hashes(env.PROVIDER_ID, job_key, index, self.job_block_number)
            self.requester_id = self.job_info["job_owner"]
            self.job_info.update({"received_block": self.received_block})
            self.job_info.update({"storage_duration": self.storage_duration})
            self.job_info.update({"cacheType": self.logged_job.args["cacheType"]})
            cfg.Ebb.analyze_data(job_key, env.PROVIDER_ID)
            self.job_infos.append(self.job_info)
            log(f"==> requester={self.requester_id}")
            log("==> [yellow]job_info:", "bold")
            log(self.job_info)
        except Exception as e:
            print_tb(e)
            return

        for job in range(1, len(self.job_info["core"])):
            with suppress(Exception):
                self.job_infos.append(  # if workflow is given then add jobs into list
                    self.Ebb.get_job_info(env.PROVIDER_ID, job_key, index, job, self.job_block_number)
                )

        self.check_requested_job()
コード例 #2
0
def main(args):
    given_bn = 0
    try:
        if args.bn:
            given_bn = args.bn
        elif args.latest:
            given_bn = cfg.Ebb.get_block_number()

        if args.is_thread is False:
            cfg.IS_THREADING_ENABLED = False

        console_ruler("provider session starts")
        log(f" * {datetime.now().strftime('%Y-%m-%d %H:%M')}")
        with launch_ipdb_on_exception():
            # if an exception is raised, launch ipdb
            _main(given_bn)
    except KeyboardInterrupt:
        sys.exit(1)
コード例 #3
0
def handle_event(logged_jobs):
    for job in logged_jobs:
        cloud_storage_id = job.args["cloudStorageID"]
        """
        if StorageID.IPFS == cloud_storage_id or cloudStorageID.IPFS_GPG == cloud_storage_id:
            jobKey = bytes32_to_ipfs(logged_jobs[i].args['jobKey'])
        else:
            jobKey = logged_jobs[i].args['jobKey']
        """
        log(f"transaction_hash={job['transactionHash'].hex()} | log_index={job['logIndex']}"
            )
        log(f"block_number={job['blockNumber']}")
        log(f"provider={job.args['provider']}")
        log(f"job_key={job.args['jobKey']}")
        log(f"index={job.args['index']}")
        log(f"cloud_storage_id={StorageID(cloud_storage_id).name}")
        log(f"cache_type={CacheType(job.args['cacheType'].name)}")
        log(f"received={job.args['received']}")
        for value in job.args["sourceCodeHash"]:
            sourceCodeHash = job.args["sourceCodeHash"][value]
            log(f"source_code_hash{br(value)} => {bytes32_to_ipfs(sourceCodeHash)}"
                )

        console_ruler()
コード例 #4
0
ファイル: test_overlap.py プロジェクト: ebloc/ebloc-broker
def submit_receipt(index,
                   cores,
                   start_time,
                   completion_time,
                   elapsed_time,
                   is_print=True):
    text = f"{start_time},{completion_time}"
    log(f"==> {br(text)} cores={cores}")
    job = Job()
    job.code_hashes = [b"8b3e98abb65d0c1aceea8d606fc55403"]
    job.key = job.code_hashes[0]
    job.index = index
    job._id = 0
    job.cores = cores
    job.run_time = [1]
    job.data_transfer_ins = [1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.EUDAT.value]
    job.cache_types = [CacheType.PUBLIC.value]
    job.storage_hours = [0]
    job.data_prices_set_block_numbers = [0]
    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(provider)[-1]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job.key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    tx = ebb.setJobStatusRunning(job.key, job.index, job._id, start_time,
                                 {"from": provider})
    rpc.sleep(60)

    mine(5)
    data_transfer_in = 0
    data_transfer_out = 0

    args = [
        job.index, job._id, completion_time, data_transfer_in,
        data_transfer_out, job.cores, [1], True
    ]
    tx = ebb.processPayment(job.key, args, elapsed_time, "",
                            {"from": provider})
    if is_print:
        log(f"==> process_payment received_gas_used={tx.__dict__['gas_used']}")
    # received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    # refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    # withdraw(provider, received_sum)
    # withdraw(requester, refunded_sum)
    check_list(is_print)
    if is_print:
        console_ruler(character="-=")

    return tx
コード例 #5
0
ファイル: run_jobs.py プロジェクト: ebloc/ebloc-broker
def main():
    check_gdrive_user()
    console_ruler(f"NEW_TEST {Ebb.get_block_number()}")
    log(f" * {datetime.now().strftime('%Y-%m-%d %H:%M')}")
    if not is_process_on("mongod", "mongod"):
        raise Exception("mongodb is not running in the background")

    storage_ids = ["eudat", "gdrive", "ipfs"]
    ipfs_ids = ["ipfs_gpg", "ipfs"]
    # for provider_address in provider_addresses:
    #     pre_submit(storage_ids, provider_address)

    benchmarks = ["nas", "cppr"]
    test_dir = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "nas"
    nas_yaml_fn = test_dir / "job_nas.yaml"
    cppr_yam_fn = test_dir / "job_cppr.yaml"
    yaml_cfg = None
    counter = 0
    for _ in range(60):
        for _ in range(2):  # submitted as batch is faster
            for idx, provider_address in enumerate(provider_addresses):
                # yaml_cfg["config"]["data"]["data3"]["storage_id"] = random.choice(storage_ids)
                storage_id = (idx + counter) % len(storage_ids)
                selected_benchmark = random.choice(benchmarks)
                storage = storage_ids[storage_id]
                if storage == "ipfs":
                    storage = random.choice(ipfs_ids)

                if selected_benchmark == "nas":
                    log(
                        f" * Submitting job from NAS Benchmark to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(nas_yaml_fn)
                    benchmark_name = create_nas_job_script()
                elif selected_benchmark == "cppr":
                    log(
                        f" * Submitting job with cppr datasets to [green]{provider_address}",
                        "blue")
                    yaml_cfg = Yaml(cppr_yam_fn)
                    hash_small_data, hash_med_data = create_cppr_job_script()
                    yaml_cfg["config"]["data"]["data1"][
                        "hash"] = hash_small_data
                    yaml_cfg["config"]["data"]["data2"]["hash"] = hash_med_data
                    yaml_cfg["config"]["data"]["data3"]["storage_id"] = storage
                    small_datasets = Path.home(
                    ) / "test_eblocbroker" / "dataset_zip" / "small"
                    dirs = [
                        d for d in os.listdir(small_datasets)
                        if os.path.isdir(os.path.join(small_datasets, d))
                    ]
                    dir_name = random.choice(dirs)
                    yaml_cfg["config"]["data"]["data3"]["path"] = str(
                        small_datasets / dir_name)

                yaml_cfg["config"]["source_code"]["storage_id"] = storage
                yaml_cfg["config"]["provider_address"] = provider_address
                try:
                    submit_base = SubmitBase(yaml_cfg.path)
                    submission_date = _date()
                    submission_timestamp = _timestamp()
                    requester_address = random.choice(users).lower()
                    yaml_cfg["config"]["requester_address"] = requester_address
                    log(f"requester={requester_address}", "bold")
                    tx_hash = submit_base.submit(is_pass=True)
                    log(f"tx_hash={tx_hash}", "bold")
                    tx_receipt = get_tx_status(tx_hash, is_silent=True)
                    if tx_receipt["status"] == 1:
                        processed_logs = Ebb._eBlocBroker.events.LogJob(
                        ).processReceipt(tx_receipt, errors=DISCARD)
                        job_result = vars(processed_logs[0].args)
                        job_result["submit_date"] = submission_date
                        job_result["submit_timestamp"] = submission_timestamp
                        job_result["tx_hash"] = tx_hash
                        if selected_benchmark == "nas":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{benchmark_name}"
                        elif selected_benchmark == "cppr":
                            job_result[
                                "submitted_job_kind"] = f"{selected_benchmark}_{hash_small_data}_{hash_med_data}"

                        ebb_mongo.add_item(tx_hash, job_result)
                        log(job_result)

                    countdown(seconds=5, is_silent=True)
                except Exception as e:
                    print_tb(e)

            counter += 1

        sleep_time = randint(200, 400)
        countdown(sleep_time)
コード例 #6
0
def run_driver(given_bn):
    """Run the main driver script for eblocbroker on the background."""
    # dummy sudo command to get the password when session starts for only to
    # create users and submit the slurm job under another user
    run(["sudo", "printf", "hello"])
    kill_process_by_name("gpg-agent")
    config.logging = setup_logger(_log.DRIVER_LOG)
    # driver_cancel_process = None
    try:
        from broker.imports import connect

        connect()
        Ebb: "Contract.Contract" = cfg.Ebb
        driver = Driver()
    except Exception as e:
        raise Terminate from e

    if not env.PROVIDER_ID:
        raise Terminate(f"PROVIDER_ID is None in {env.LOG_PATH}/.env")

    if not env.WHOAMI or not env.EBLOCPATH or not env.PROVIDER_ID:
        raise Terminate(f"Please run: {env.BASH_SCRIPTS_PATH}/folder_setup.sh")

    if not env.SLURMUSER:
        raise Terminate(f"SLURMUSER is not set in {env.LOG_PATH}/.env")

    try:
        deployed_block_number = Ebb.get_deployed_block_number()
    except Exception as e:
        raise e

    if not env.config["block_continue"]:
        env.config["block_continue"] = deployed_block_number

    if given_bn > 0:
        block_number_saved = int(given_bn)
    else:
        block_number_saved = env.config["block_continue"]
        if not isinstance(env.config["block_continue"], int):
            log("E: block_continue variable is empty or contains an invalid character")
            if not question_yes_no("#> Would you like to read from the contract's deployed block number?"):
                terminate()

            block_number_saved = deployed_block_number
            if deployed_block_number:
                env.config["block_continue"] = deployed_block_number
            else:
                raise Terminate(f"deployed_block_number={deployed_block_number} is invalid")

    _tools(block_number_saved)
    try:
        Ebb.is_contract_exists()
    except:
        terminate(
            "Contract address does not exist on the blockchain, is the blockchain sync?\n"
            f"block_number={Ebb.get_block_number()}",
            is_traceback=False,
        )

    if cfg.IS_THREADING_ENABLED:
        log(f"## is_threading={cfg.IS_THREADING_ENABLED}")

    Ebb.is_eth_account_locked(env.PROVIDER_ID)
    log(f"==> whoami={env.WHOAMI}")
    log(f"==> log_file={_log.DRIVER_LOG}")
    log(f"==> rootdir={os.getcwd()}")
    log(f"==> is_web3_connected={Ebb.is_web3_connected()}")
    if not Ebb.does_provider_exist(env.PROVIDER_ID):
        # updated since cluster is not registered
        env.config["block_continue"] = Ebb.get_block_number()
        terminate(
            textwrap.fill(
                f"Your Ethereum address {env.PROVIDER_ID} "
                "does not match with any provider in eBlocBroker. Please register your "
                "provider using your Ethereum Address in to the eBlocBroker. You can "
                "use eblocbroker/register_provider.py script to register your provider."
            ),
            is_traceback=False,
        )

    if not Ebb.is_orcid_verified(env.PROVIDER_ID):
        raise QuietExit(f"provider's ({env.PROVIDER_ID}) ORCID is not verified")

    blk_read = block_number_saved
    balance_temp = Ebb.get_balance(env.PROVIDER_ID)
    eth_balance = Ebb.eth_balance(env.PROVIDER_ID)
    log(f"==> deployed_block_number={deployed_block_number}")
    log(f"==> account_balance={eth_balance} gwei | {cfg.w3.fromWei(eth_balance, 'ether')} eth")
    log(f"==> Ebb_balance={balance_temp}")
    while True:
        wait_until_idle_core_available()
        time.sleep(0.2)
        if not str(blk_read).isdigit():
            raise Terminate(f"block_read_from={blk_read}")

        balance = Ebb.get_balance(env.PROVIDER_ID)
        if cfg.IS_THREADING_ENABLED:
            _squeue()

        console_ruler()
        if isinstance(balance, int):
            value = int(balance) - int(balance_temp)
            if value > 0:
                log(f"==> Since Driver started provider_gained_wei={value}")

        current_bn = Ebb.get_block_number()
        log(f" * {get_date()} waiting new job to come since block_number={blk_read}")
        log(f"==> current_block={current_bn} | sync_from={blk_read}")
        flag = True
        while current_bn < int(blk_read):
            current_bn = Ebb.get_block_number()
            if flag:
                log(f"## Waiting block number to be updated, it remains constant at {current_bn}")

            flag = False
            time.sleep(2)

        log(f"#> [bold yellow]Passed incremented block number... Watching from block_number=[cyan]{blk_read}")
        blk_read = str(blk_read)  # reading events' block number has been updated
        slurm.pending_jobs_check()
        try:
            driver.logged_jobs_to_process = Ebb.run_log_job(blk_read, env.PROVIDER_ID)
            driver.process_logged_jobs()
            if len(driver.logged_jobs_to_process) > 0 and driver.latest_block_number > 0:
                # updates the latest read block number
                blk_read = driver.latest_block_number + 1
                env.config["block_continue"] = blk_read
            if not driver.is_provider_received_job:
                blk_read = env.config["block_continue"] = current_bn
        except Exception as e:
            log()
            log(f"E: {e}")
            if "Filter not found" in str(e) or "Read timed out" in str(e):
                # HTTPSConnectionPool(host='core.bloxberg.org', port=443): Read timed out. (read timeout=10)
                log("## sleeping for 60 seconds...", end="")
                time.sleep(60)
                log(ok())
            else:
                print_tb(e)
コード例 #7
0
def test_workflow():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider()
    register_requester(requester)
    job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
    code_hash = ipfs_to_bytes32(job_key)
    with brownie.reverts():
        ebb.updataDataPrice(code_hash, 20, 100, {"from": provider})

    ebb.registerData(code_hash, 20, cfg.BLOCK_DURATION_1_HOUR,
                     {"from": provider})
    ebb.removeRegisteredData(
        code_hash,
        {"from": provider})  # should submitJob fail if it is not removed

    code_hash1 = "0x68b8d8218e730fc2957bcb12119cb204"
    # "web3.toBytes(hexstr=ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve"))
    ebb.registerData(code_hash1, 20, cfg.BLOCK_DURATION_1_HOUR,
                     {"from": provider})
    mine(6)

    with brownie.reverts():
        ebb.registerData(code_hash1, 20, 1000, {"from": provider})

    ebb.updataDataPrice(code_hash1, 250, cfg.BLOCK_DURATION_1_HOUR + 1,
                        {"from": provider})

    data_block_numbers = ebb.getRegisteredDataBlockNumbers(
        provider, code_hash1)
    log(f"get_registered_data_block_numbers={data_block_numbers[1]}", "bold")
    get_block_number()
    data_prices = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={data_prices}", "bold")
    assert data_prices[0] == 20

    res = ebb.getRegisteredDataPrice(provider, code_hash1,
                                     data_block_numbers[1])
    log(f"register_data_price={res}", "bold")
    assert res[0] == 250
    mine(cfg.BLOCK_DURATION_1_HOUR - 9)

    res = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={res}", "bold")
    assert res[0] == 20
    mine(1)

    res = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={res}", "bold")
    assert res[0] == 250

    job.code_hashes = [code_hash,
                       code_hash1]  # Hashed of the data file in array
    job.storage_hours = [0, 0]
    job.data_transfer_ins = [100, 0]
    job.data_transfer_out = 100

    # job.data_prices_set_block_numbers = [0, 253]  # TODO: check this ex 253 exists or not
    job.data_prices_set_block_numbers = [
        0, data_block_numbers[1]
    ]  # TODO: check this ex 253 exists or not
    check_price_keys(job.data_prices_set_block_numbers, provider, code_hash1)
    job.cores = [2, 4, 2]
    job.run_time = [10, 15, 20]
    job.storage_ids = [StorageID.IPFS.value, StorageID.NONE.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    args = [
        provider,
        ebb.getProviderSetBlockNumbers(accounts[0])[-1],
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]

    job_price, _cost = job.cost(provider, requester)
    tx = ebb.submitJob(  # first submit
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    for idx in range(0, 3):
        log(ebb.getJobInfo(provider, job_key, 0, idx))

    console_ruler(character="-=")
    assert (
        tx.events["LogRegisteredDataRequestToUse"][0]["registeredDataHash"] ==
        "0x0000000000000000000000000000000068b8d8218e730fc2957bcb12119cb204"
    ), "registered data should be used"

    with brownie.reverts():
        log(ebb.getJobInfo(provider, job_key, 1, 2))
        log(ebb.getJobInfo(provider, job_key, 0, 3))

    # setJobStatus for the workflow:
    index = 0
    job_id = 0
    start_time = 10
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    index = 0
    job_id = 1
    start_time = 20
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    # process_payment for the workflow
    index = 0
    job_id = 0
    execution_time = 10
    data_transfer = [100, 0]
    end_time = 20
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")

    received_sums = []
    refunded_sums = []
    received_sum = 0
    refunded_sum = 0
    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, False
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    # log(tx.events['LogProcessPayment'])
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    # -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
    index = 0
    job_id = 1
    execution_time = 15
    data_transfer = [0, 0]
    end_time = 39
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")

    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, False
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    # -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
    index = 0
    job_id = 2
    execution_time = 20
    data_transfer = [0, 100]
    end_time = 39
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")
    with brownie.reverts(
    ):  # processPayment should revert, setRunning is not called for the job=2
        args = [
            index,
            job_id,
            end_time,
            data_transfer[0],
            data_transfer[1],
            job.cores,
            job.run_time,
            False,
        ]
        tx = ebb.processPayment(job_key, args, execution_time,
                                result_ipfs_hash, {"from": accounts[0]})

    index = 0
    job_id = 2
    start_time = 20
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})

    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, True
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    # log(tx.events['LogProcessPayment'])
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    log(received_sums)
    log(refunded_sums)
    assert job_price - _cost["storage"] == received_sum + refunded_sum
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
コード例 #8
0
def test_storage_refund():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]

    register_provider()
    register_requester(requester)

    job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
    job.code_hashes.append(ipfs_to_bytes32(job_key))
    job.storage_hours.append(1)

    job_key_2 = "QmVqtWxuBdZQdLnLce6XCBMuqoazAcbmuxoJHQbfbuqDu2"
    job.code_hashes.append(ipfs_to_bytes32(job_key_2))
    job.storage_hours.append(1)
    job.data_transfer_ins = [100, 100]
    job.data_transfer_out = 100
    job.data_prices_set_block_numbers = [0, 0]
    job.cores = [2]
    job.run_time = [10]
    job.provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    job.storage_ids = [StorageID.EUDAT.value, StorageID.IPFS.value]
    job.cache_types = [CacheType.PRIVATE.value, CacheType.PUBLIC.value]

    # provider's registered data won't be used
    job.data_prices_set_block_numbers = [0, 0]

    job_price, _cost = job.cost(provider, requester)
    job_price += 1  # for test 1 wei extra is paid
    args = [
        provider,
        job.provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    refunded = tx.events["LogJob"]["refunded"]
    log(f"==> job_index={tx.events['LogJob']['index']}")
    log(f"refunded={refunded}", "bold")
    log(tx.events["LogJob"]["jobKey"])
    assert requester == tx.events["LogJob"]["owner"]
    withdraw(requester, refunded)  # check for extra payment is checked
    index = 0
    job_id = 0
    tx = ebb.refund(provider, job_key, index, job_id, job.cores, job.run_time,
                    {"from": provider})
    log(ebb.getJobInfo(provider, job_key, index, job_id))
    refundedWei = tx.events["LogRefundRequest"]["refundedWei"]
    log(f"refunded_wei={refundedWei}", "bold")
    withdraw(requester, refundedWei)
    # VM Exception while processing transaction: invalid opcode
    with brownie.reverts():
        ebb.getJobInfo(provider, job_key, 5, job_id)

    storage_cost_sum = 0
    for code_hash in job.code_hashes:
        _storage_cost_sum, *_ = ebb.getStorageInfo(provider, requester,
                                                   code_hash)
        storage_cost_sum += _storage_cost_sum

    assert _cost["storage"] == storage_cost_sum
    assert _cost["computational"] + _cost["data_transfer"] + _cost[
        "cache"] == refundedWei
    mine(cfg.BLOCK_DURATION_1_HOUR)
    tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0], {
        "from": requester,
        "gas": 4500000
    })
    refundedWei = tx.events["LogDepositStorage"]["payment"]
    log(f"refunded_wei={refundedWei}", "bold")
    withdraw(requester, refundedWei)
    with brownie.reverts():
        tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0],
                                      {
                                          "from": requester,
                                          "gas": 4500000
                                      })

    tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[1], {
        "from": requester,
        "gas": 4500000
    })
    refundedWei = tx.events["LogDepositStorage"]["payment"]
    paid_address = tx.events["LogDepositStorage"]["paidAddress"]
    withdraw(requester, refundedWei)
    with brownie.reverts():
        tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0],
                                      {
                                          "from": requester,
                                          "gas": 4500000
                                      })

    assert requester == paid_address
    assert ebb.balanceOf(provider) == 0
    console_ruler("same job submitted after full refund", color="blue")
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(f"job_index={tx.events['LogJob']['index']}", "bold")
    log(tx.events["LogJob"]["jobKey"])
    index = 1
    job_id = 0
    tx = ebb.refund(provider, job_key, index, job_id, job.cores, job.run_time,
                    {"from": provider})
    log(ebb.getJobInfo(provider, job_key, index, job_id))
    refundedWei = tx.events["LogRefundRequest"]["refundedWei"]
    log(f"refunded_wei={refundedWei}", "bold")
    assert _cost["computational"] + _cost["data_transfer"] + _cost[
        "cache"] == refundedWei
    storage_cost_sum = 0
    storage_payment = []
    for code_hash in job.code_hashes:
        deposit, *_ = ebb.getStorageInfo(provider, requester, code_hash)
        storage_payment.append(deposit)

    job.is_verified = [True, True]
    ebb.dataReceived(  # called by the provider
        job_key, index, job.code_hashes, job.cache_types, job.is_verified, {
            "from": provider,
            "gas": 4500000
        })
    for code_hash in job.code_hashes:
        *_, output = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS, code_hash)
        log(output, "bold")

    with brownie.reverts(
    ):  # refundStorageDeposit should revert, because it is already used by the provider
        for code_hash in job.code_hashes:
            tx = ebb.refundStorageDeposit(provider, requester, code_hash, {
                "from": requester,
                "gas": 4500000
            })

        tx = ebb.depositStorage(requester, job.code_hashes[0], {
            "from": provider,
            "gas": 4500000
        })

    mine(cfg.BLOCK_DURATION_1_HOUR)
    # after deadline (1 hr) is completed to store the data, provider could obtain the money
    for idx, code_hash in enumerate(job.code_hashes):
        tx = ebb.depositStorage(requester, code_hash, {
            "from": provider,
            "gas": 4500000
        })
        amount = tx.events["LogDepositStorage"]["payment"]
        withdraw(provider, amount)
        assert storage_payment[idx] == amount
コード例 #9
0
def test_submit_job():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider()
    register_requester(requester)
    fname = f"{cwd}/files/test.txt"
    # fname = f"{cwd}/files/test_.txt"
    log(f"==> registered_provider_addresses={ebb.getProviders()}")
    provider_price_info = ebb.getProviderInfo(accounts[0], 0)
    # block_read_from = provider_price_info[0]
    _provider_price_info = provider_price_info[1]
    # availableCoreNum = _provider_price_info[0]
    # commitmentBlockDuration = _provider_price_info[1]
    price_core_min = _provider_price_info[2]
    # price_data_transfer = _provider_price_info[3]
    # price_storage = _provider_price_info[4]
    # price_cache = _provider_price_info[5]
    log(f"provider_available_core={available_core}")
    log(f"provider_price_core_min={price_core_min}")
    log(provider_price_info)
    job_price_sum = 0
    job_id = 0
    index = 0
    with open(fname) as f:
        for line in f:
            arguments = line.rstrip("\n").split(" ")
            storage_hour = 1
            core_min = int(arguments[1]) - int(arguments[0])
            core = int(arguments[2])
            job.cores = [core]
            job.run_time = [core_min]
            # time.sleep(1)
            # rpc.mine(int(arguments[0]))

            job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
            data_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
            code_hash = ipfs_to_bytes32(data_key)
            # log("Client Balance before: " + str(web3.eth.balanceOf(account)))
            # log("Contract Balance before: " + str(web3.eth.balanceOf(accounts[0])))
            job.code_hashes = [code_hash]
            job.storage_hours = [storage_hour]
            job.data_transfer_ins = [100]
            job.data_transfer_out = 100
            job.data_prices_set_block_numbers = [0]
            job.storage_ids = [StorageID.IPFS.value]
            job.cache_types = [CacheType.PUBLIC.value]
            args = [
                provider,
                ebb.getProviderSetBlockNumbers(accounts[0])[-1],
                job.storage_ids,
                job.cache_types,
                job.data_prices_set_block_numbers,
                job.cores,
                job.run_time,
                job.data_transfer_out,
            ]

            # log(code_hashes[0])
            job_price, _cost = job.cost(provider, requester)
            job_price_sum += job_price
            data_transfer_ins = [100]
            job_key = job.storage_hours[0]
            tx = ebb.submitJob(
                job_key,
                data_transfer_ins,
                args,
                job.storage_hours,
                job.code_hashes,
                {
                    "from": requester,
                    "value": web3.toWei(job_price, "wei")
                },
            )
            # log('submitJob => GasUsed:' + str(tx.__dict__['gas_used']) + '| blockNumber=' + str(tx.block_number))
            log(f"job_index={tx.events['LogJob']['index']}", "bold")
            # log("Contract Balance after: " + str(web3.eth.balanceOf(accouts[0])))
            # log("Client Balance after: " + str(web3.eth.balanceOf(accounts[8])))
            # sys.stdout.write('jobInfo: ')
            # sys.stdout.flush()
            log(ebb.getJobInfo(provider, job_key, index, job_id))
            index += 1

    log(f"total_paid={job_price_sum}")
    # log(block_read_from)
    # rpc.mine(100)
    # log(web3.eth.blockNumber)
    job_id = 0
    with open(fname) as f:
        for index, line in enumerate(f):
            arguments = line.rstrip("\n").split(" ")
            tx = ebb.setJobStatusRunning(job_key, index, job_id,
                                         int(arguments[0]),
                                         {"from": accounts[0]})
            if index == 0:
                with brownie.reverts():
                    tx = ebb.setJobStatusRunning(job_key, index, job_id,
                                                 int(arguments[0]) + 1,
                                                 {"from": accounts[0]})

    console_ruler()
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")
    with open(fname) as f:
        for index, line in enumerate(f):
            arguments = line.rstrip("\n").split(" ")
            if index == 0:
                data_transfer_in_sum = 90
                job.data_transfer_out = 100
            else:
                data_transfer_in_sum = 0
                job.data_transfer_out = 100

            core_min = int(arguments[1]) - int(arguments[0])
            core = int(arguments[2])
            job.cores = [core]
            job.run_time = [core_min]
            log(f"contract_balance={ebb.getContractBalance()}", "bold")
            job_id = 0
            execution_time = int(arguments[1]) - int(arguments[0])
            end_time = int(arguments[1])
            args = [
                index,
                job_id,
                end_time,
                data_transfer_in_sum,
                job.data_transfer_out,
                job.cores,
                job.run_time,
                True,
            ]
            tx = ebb.processPayment(job_key, args, execution_time,
                                    result_ipfs_hash, {"from": accounts[0]})
            # code_hashes
            received = tx.events["LogProcessPayment"]["receivedWei"]
            refunded = tx.events["LogProcessPayment"]["refundedWei"]
            withdraw(accounts[0], received)
            withdraw(requester, refunded)
            log(f"received={received} | refunded={refunded}", "bold")

    log(f"contract_balance={ebb.getContractBalance()}", "bold")
    for idx in range(0, ebb.getProviderReceiptSize(provider)):
        # prints finalize version of the linked list
        log(ebb.getProviderReceiptNode(provider, idx))

    console_ruler()
    log(f"==> storage_duration for job={job_key}")
    *_, job_storage_info = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS,
                                              code_hash)
    ds = DataStorage(job_storage_info)
    log(f"receivedBlockNumber={ds.received_block} |"
        f"storage_duration(block numbers)={ds.storage_duration} | "
        f"is_private={ds.is_private} |"
        f"is_verified_Used={ds.is_verified_used}")
    received_storage_deposit, *_ = ebb.getStorageInfo(provider, requester,
                                                      code_hash)
    log(f"received_storage_deposit={received_storage_deposit}")
    console_ruler("DONE")