Exemplo n.º 1
0
def eudat_submit(job: Job, is_pass=False, required_confs=1):
    log("==> Submitting source code through [blue]EUDAT[/blue]")
    Ebb = cfg.Ebb
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    oc_client = "059ab6ba-4030-48bb-b81b-12115f531296"
    connect()
    try:
        job.check_account_status(requester)
    except Exception as e:
        print_tb(e)
        raise e

    login(oc_client, env.LOG_PATH.joinpath(".eudat_client.txt"), env.OC_CLIENT)
    if len(sys.argv) == 3:
        provider = str(sys.argv[1])
        tar_hash = sys.argv[2]
        log(f"==> provided_hash={tar_hash}")
    else:
        provider = Ebb.w3.toChecksumAddress(job.provider_addr)

    job.folders_to_share = job.paths
    check_link_folders(job.data_paths,
                       job.registered_data_files,
                       is_pass=is_pass)
    return submit(provider, requester, job, required_confs=required_confs)
Exemplo n.º 2
0
def submit_gdrive(job: Job, is_pass=False, required_confs=1):
    log("==> Submitting source code through [blue]GDRIVE[/blue]")
    pre_check()
    Ebb = cfg.Ebb
    job.folders_to_share = job.paths
    check_link_folders(job.data_paths,
                       job.registered_data_files,
                       is_pass=is_pass)
    _git.generate_git_repo(job.folders_to_share)
    job.clean_before_submit()
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    provider = Ebb.w3.toChecksumAddress(job.provider_addr)
    job = gdrive.submit(provider, requester, job)
    for folder_to_share in job.folders_to_share:
        if isinstance(folder_to_share, bytes):
            code_hash = folder_to_share
            job.code_hashes.append(code_hash)
            job.code_hashes_str.append(code_hash.decode("utf-8"))
        else:
            tar_hash = job.foldername_tar_hash[folder_to_share]
            #: required to send string as bytes == str_data.encode('utf-8')
            code_hash = Ebb.w3.toBytes(text=tar_hash)
            job.code_hashes.append(code_hash)
            job.code_hashes_str.append(code_hash.decode("utf-8"))

    tar_hash = job.foldername_tar_hash[job.folders_to_share[0]]
    key = job.keys[tar_hash]
    job.price, *_ = job.cost(provider, requester)
    try:
        tx_hash = Ebb.submit_job(provider,
                                 key,
                                 job,
                                 requester=requester,
                                 required_confs=required_confs)
        tx_receipt = get_tx_status(tx_hash)
        if tx_receipt["status"] == 1:
            processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(
                tx_receipt, errors=DISCARD)
            log(vars(processed_logs[0].args))
            try:
                log(f"{ok()} [bold]job_index={processed_logs[0].args['index']}"
                    )
            except IndexError:
                log(f"E: Tx({tx_hash}) is reverted")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    log()
    for k, v in job.tar_hashes.items():
        log(f"{k} [blue]=>[/blue] {v}")

    return tx_hash
Exemplo n.º 3
0
def main():
    job = Job()
    Ebb = cfg.Ebb
    job.base_dir = f"{env.HOME}/test_eblocbroker"
    job.source_code_dir = f"{job.base_dir}/source_code"
    job.data_1_dir = f"{job.base_dir}/datasets/BL06-camel-sml"

    job.folders_to_share.append(job.source_code_dir)
    job.folders_to_share.append(job.data_1_dir)

    path_from = f"{job.base_dir}/datasets"
    path_to = f"{env.LINK_PATH}/base/data_link"
    check_linked_data(path_from, path_to, job.folders_to_share[1:])

    # IMPORTANT: consider ignoring to push .git into the submitted folder
    # job.generate_git_repos()
    job.clean_before_submit()

    provider = "0xD118b6EF83ccF11b34331F1E7285542dDf70Bc49"  # home2-vm
    account_id = 1
    _from = Ebb.w3.toChecksumAddress(Ebb.w3.eth.accounts[account_id])
    job = gdrive.submit(provider, _from, job)
    job.run_time = [5]
    job.cores = [1]
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1

    job.storage_ids = [StorageID.GDRIVE, StorageID.GDRIVE]
    job.cache_types = [CacheType.PRIVATE, CacheType.PUBLIC]
    job.storage_hours = [1, 1]
    job.data_prices_set_block_numbers = [0, 0]

    for folder_to_share in job.folders_to_share:
        tar_hash = job.foldername_tar_hash[folder_to_share]
        # required to send string as bytes == str_data.encode('utf-8')
        job.code_hashes.append(Ebb.w3.toBytes(text=tar_hash))

    tar_hash = job.foldername_tar_hash[job.folders_to_share[0]]
    job_key = job.keys[tar_hash]
    try:
        job_price, _cost = job.cost(provider, _from)
        tx_hash = Ebb.submit_job(provider,
                                 job_key,
                                 job_price,
                                 job,
                                 requester=_from)
    except Exception as e:
        raise e

    for k, v in job.tar_hashes.items():
        log(f"{k} => {v}")

    log(f"==> code_hashes={job.code_hashes}")
    if job.analyze_tx_status(tx_hash):
        log("SUCCESS")
    else:
        log("FAILED")
Exemplo n.º 4
0
def submit_receipt(index,
                   cores,
                   start_time,
                   completion_time,
                   elapsed_time,
                   is_print=True):
    text = f"{start_time},{completion_time}"
    log(f"==> {br(text)} cores={cores}")
    job = Job()
    job.code_hashes = [b"8b3e98abb65d0c1aceea8d606fc55403"]
    job.key = job.code_hashes[0]
    job.index = index
    job._id = 0
    job.cores = cores
    job.run_time = [1]
    job.data_transfer_ins = [1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.EUDAT.value]
    job.cache_types = [CacheType.PUBLIC.value]
    job.storage_hours = [0]
    job.data_prices_set_block_numbers = [0]
    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(provider)[-1]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job.key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    tx = ebb.setJobStatusRunning(job.key, job.index, job._id, start_time,
                                 {"from": provider})
    rpc.sleep(60)

    mine(5)
    data_transfer_in = 0
    data_transfer_out = 0

    args = [
        job.index, job._id, completion_time, data_transfer_in,
        data_transfer_out, job.cores, [1], True
    ]
    tx = ebb.processPayment(job.key, args, elapsed_time, "",
                            {"from": provider})
    if is_print:
        log(f"==> process_payment received_gas_used={tx.__dict__['gas_used']}")
    # received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    # refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    # withdraw(provider, received_sum)
    # withdraw(requester, refunded_sum)
    check_list(is_print)
    if is_print:
        console_ruler(character="-=")

    return tx
Exemplo n.º 5
0
        job.check_account_status(requester)
    except Exception as e:
        print_tb(e)
        raise e

    login(oc_client, env.LOG_PATH.joinpath(".eudat_client.txt"), env.OC_CLIENT)
    if len(sys.argv) == 3:
        provider = str(sys.argv[1])
        tar_hash = sys.argv[2]
        log(f"==> provided_hash={tar_hash}")
    else:
        provider = Ebb.w3.toChecksumAddress(job.provider_addr)

    job.folders_to_share = job.paths
    check_link_folders(job.data_paths,
                       job.registered_data_files,
                       is_pass=is_pass)
    return submit(provider, requester, job, required_confs=required_confs)


if __name__ == "__main__":
    try:
        job = Job()
        fn = "job.yaml"
        job.set_config(fn)
        eudat_submit(job)
    except KeyboardInterrupt:
        sys.exit(1)
    except Exception as e:
        print_tb(str(e))
Exemplo n.º 6
0
def test_workflow():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider()
    register_requester(requester)
    job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
    code_hash = ipfs_to_bytes32(job_key)
    with brownie.reverts():
        ebb.updataDataPrice(code_hash, 20, 100, {"from": provider})

    ebb.registerData(code_hash, 20, cfg.BLOCK_DURATION_1_HOUR,
                     {"from": provider})
    ebb.removeRegisteredData(
        code_hash,
        {"from": provider})  # should submitJob fail if it is not removed

    code_hash1 = "0x68b8d8218e730fc2957bcb12119cb204"
    # "web3.toBytes(hexstr=ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve"))
    ebb.registerData(code_hash1, 20, cfg.BLOCK_DURATION_1_HOUR,
                     {"from": provider})
    mine(6)

    with brownie.reverts():
        ebb.registerData(code_hash1, 20, 1000, {"from": provider})

    ebb.updataDataPrice(code_hash1, 250, cfg.BLOCK_DURATION_1_HOUR + 1,
                        {"from": provider})

    data_block_numbers = ebb.getRegisteredDataBlockNumbers(
        provider, code_hash1)
    log(f"get_registered_data_block_numbers={data_block_numbers[1]}", "bold")
    get_block_number()
    data_prices = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={data_prices}", "bold")
    assert data_prices[0] == 20

    res = ebb.getRegisteredDataPrice(provider, code_hash1,
                                     data_block_numbers[1])
    log(f"register_data_price={res}", "bold")
    assert res[0] == 250
    mine(cfg.BLOCK_DURATION_1_HOUR - 9)

    res = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={res}", "bold")
    assert res[0] == 20
    mine(1)

    res = ebb.getRegisteredDataPrice(provider, code_hash1, 0)
    log(f"register_data_price={res}", "bold")
    assert res[0] == 250

    job.code_hashes = [code_hash,
                       code_hash1]  # Hashed of the data file in array
    job.storage_hours = [0, 0]
    job.data_transfer_ins = [100, 0]
    job.data_transfer_out = 100

    # job.data_prices_set_block_numbers = [0, 253]  # TODO: check this ex 253 exists or not
    job.data_prices_set_block_numbers = [
        0, data_block_numbers[1]
    ]  # TODO: check this ex 253 exists or not
    check_price_keys(job.data_prices_set_block_numbers, provider, code_hash1)
    job.cores = [2, 4, 2]
    job.run_time = [10, 15, 20]
    job.storage_ids = [StorageID.IPFS.value, StorageID.NONE.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    args = [
        provider,
        ebb.getProviderSetBlockNumbers(accounts[0])[-1],
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]

    job_price, _cost = job.cost(provider, requester)
    tx = ebb.submitJob(  # first submit
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    for idx in range(0, 3):
        log(ebb.getJobInfo(provider, job_key, 0, idx))

    console_ruler(character="-=")
    assert (
        tx.events["LogRegisteredDataRequestToUse"][0]["registeredDataHash"] ==
        "0x0000000000000000000000000000000068b8d8218e730fc2957bcb12119cb204"
    ), "registered data should be used"

    with brownie.reverts():
        log(ebb.getJobInfo(provider, job_key, 1, 2))
        log(ebb.getJobInfo(provider, job_key, 0, 3))

    # setJobStatus for the workflow:
    index = 0
    job_id = 0
    start_time = 10
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    index = 0
    job_id = 1
    start_time = 20
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    # process_payment for the workflow
    index = 0
    job_id = 0
    execution_time = 10
    data_transfer = [100, 0]
    end_time = 20
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")

    received_sums = []
    refunded_sums = []
    received_sum = 0
    refunded_sum = 0
    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, False
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    # log(tx.events['LogProcessPayment'])
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    # -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
    index = 0
    job_id = 1
    execution_time = 15
    data_transfer = [0, 0]
    end_time = 39
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")

    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, False
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    # -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
    index = 0
    job_id = 2
    execution_time = 20
    data_transfer = [0, 100]
    end_time = 39
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")
    with brownie.reverts(
    ):  # processPayment should revert, setRunning is not called for the job=2
        args = [
            index,
            job_id,
            end_time,
            data_transfer[0],
            data_transfer[1],
            job.cores,
            job.run_time,
            False,
        ]
        tx = ebb.processPayment(job_key, args, execution_time,
                                result_ipfs_hash, {"from": accounts[0]})

    index = 0
    job_id = 2
    start_time = 20
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})

    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, True
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    # log(tx.events['LogProcessPayment'])
    received_sums.append(tx.events["LogProcessPayment"]["receivedWei"])
    refunded_sums.append(tx.events["LogProcessPayment"]["refundedWei"])
    received_sum += tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum += tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}"
        )
    log(received_sums)
    log(refunded_sums)
    assert job_price - _cost["storage"] == received_sum + refunded_sum
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
Exemplo n.º 7
0
def test_multiple_data():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    requester_1 = accounts[2]
    register_provider()
    register_requester(requester)
    register_requester(requester_1)

    job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
    job.code_hashes.append(ipfs_to_bytes32(job_key))

    job_key_2 = "QmVqtWxuBdZQdLnLce6XCBMuqoazAcbmuxoJHQbfbuqDu2"
    job.code_hashes.append(ipfs_to_bytes32(job_key_2))

    job.data_transfer_ins = [100, 100]
    job.data_transfer_out = 100
    # provider's registered data won't be used
    job.storage_hours = [1, 1]
    job.data_prices_set_block_numbers = [0, 0]
    job.cores = [2]
    job.run_time = [10]
    provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    job.storage_ids = [StorageID.EUDAT.value, StorageID.IPFS.value]
    job.cache_types = [CacheType.PRIVATE.value, CacheType.PUBLIC.value]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]

    job_price, _cost = job.cost(provider, requester)
    # first time job is submitted with the data files
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(f"==> job_index={tx.events['LogJob']['index']}")
    log(tx.events["LogJob"]["jobKey"])
    assert _cost[
        "storage"] == 200, "Since it is not verified yet cost of storage should be 200"

    # second time job is wanted to send by the same user  with the same data files
    job_price, _cost = job.cost(provider, requester)
    assert _cost[
        "storage"] == 0, "Since cost of storage is already paid by the user it should be 0"

    # second time job is wanted to send by the differnt user  with the same data files
    job_price, _cost = job.cost(provider, requester_1)
    log(f"==> cost={_cost}")
    assert _cost[
        "storage"] == 200, "Since it is not verified yet cost of storage should be 200"
    # cluster verifies the given data files for the related job
    index = 0
    is_verified_list = [True, True]
    tx = ebb.dataReceived(
        job_key,
        index,
        job.code_hashes,
        job.cache_types,
        is_verified_list,
        {
            "from": provider,
            "gas": 4500000
        },
    )
    # second time job is wanted to send by the differnt user  with the same data files
    job_price, _cost = job.cost(provider, requester)
    assert _cost["storage"] == 0, "Since it is verified torageCost should be 0"
    # second time job is wanted to send by the differnt user  with the same data files
    job_price, _cost = job.cost(provider, requester_1)
    assert _cost[
        "storage"] == 100, "Since data1 is verified and publis, its cost of storage should be 0"
    # ds = scripts.DataStorage(provider, code_hashes[1], True)
    job_price, _cost = job.cost(provider, requester)
    assert _cost[
        "storage"] == 0, "Since it is paid on first job submittion it should be 0"
    assert _cost[
        "data_transfer"] == job.data_transfer_out, "cost of data_transfer should cover only data_transfer_out"
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(f"job_index={tx.events['LogJob']['index']}", "bold")
    # ===== provider side =====
    index = 0
    job_id = 0
    start_time = get_block_timestamp()
    execution_time = 10
    result_ipfs_hash = "0xabcd"
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    mine(60 * execution_time / cfg.BLOCK_DURATION)
    end_time = start_time + 60 * execution_time
    block_timestamp = get_block_timestamp()
    assert (
        end_time <= block_timestamp
    ), f"block timestamp is ahead of completion time, difference={block_timestamp - end_time}"
    args = [
        index,
        job_id,
        end_time,
        sum(job.data_transfer_ins),
        job.data_transfer_out,
        job.cores,
        job.run_time,
        False,
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} refunded_sum={refunded_sum}", "bold")
    assert received_sum == 320 and refunded_sum == 0
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
    data_transfer_in = 0  # already requested on index==0
    data_transfer_out = 100
    data_transfer = [data_transfer_in, data_transfer_out]
    index = 1
    job_id = 0
    start_time = get_block_timestamp()
    execution_time = 10
    result_ipfs_hash = "0xabcd"
    tx = ebb.setJobStatusRunning(job_key, index, job_id, start_time,
                                 {"from": accounts[0]})
    mine(60 * execution_time / cfg.BLOCK_DURATION)
    end_time = start_time + 60 * execution_time
    args = [
        index, job_id, end_time, data_transfer[0], data_transfer[1], job.cores,
        job.run_time, False
    ]
    tx = ebb.processPayment(job_key, args, execution_time, result_ipfs_hash,
                            {"from": accounts[0]})
    # log(tx.events['LogProcessPayment'])
    received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    log(f"received_sum={received_sum} refunded_sum={refunded_sum}", "bold")
    assert received_sum == 120 and refunded_sum == 0
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
Exemplo n.º 8
0
def test_storage_refund():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]

    register_provider()
    register_requester(requester)

    job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
    job.code_hashes.append(ipfs_to_bytes32(job_key))
    job.storage_hours.append(1)

    job_key_2 = "QmVqtWxuBdZQdLnLce6XCBMuqoazAcbmuxoJHQbfbuqDu2"
    job.code_hashes.append(ipfs_to_bytes32(job_key_2))
    job.storage_hours.append(1)
    job.data_transfer_ins = [100, 100]
    job.data_transfer_out = 100
    job.data_prices_set_block_numbers = [0, 0]
    job.cores = [2]
    job.run_time = [10]
    job.provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    job.storage_ids = [StorageID.EUDAT.value, StorageID.IPFS.value]
    job.cache_types = [CacheType.PRIVATE.value, CacheType.PUBLIC.value]

    # provider's registered data won't be used
    job.data_prices_set_block_numbers = [0, 0]

    job_price, _cost = job.cost(provider, requester)
    job_price += 1  # for test 1 wei extra is paid
    args = [
        provider,
        job.provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    refunded = tx.events["LogJob"]["refunded"]
    log(f"==> job_index={tx.events['LogJob']['index']}")
    log(f"refunded={refunded}", "bold")
    log(tx.events["LogJob"]["jobKey"])
    assert requester == tx.events["LogJob"]["owner"]
    withdraw(requester, refunded)  # check for extra payment is checked
    index = 0
    job_id = 0
    tx = ebb.refund(provider, job_key, index, job_id, job.cores, job.run_time,
                    {"from": provider})
    log(ebb.getJobInfo(provider, job_key, index, job_id))
    refundedWei = tx.events["LogRefundRequest"]["refundedWei"]
    log(f"refunded_wei={refundedWei}", "bold")
    withdraw(requester, refundedWei)
    # VM Exception while processing transaction: invalid opcode
    with brownie.reverts():
        ebb.getJobInfo(provider, job_key, 5, job_id)

    storage_cost_sum = 0
    for code_hash in job.code_hashes:
        _storage_cost_sum, *_ = ebb.getStorageInfo(provider, requester,
                                                   code_hash)
        storage_cost_sum += _storage_cost_sum

    assert _cost["storage"] == storage_cost_sum
    assert _cost["computational"] + _cost["data_transfer"] + _cost[
        "cache"] == refundedWei
    mine(cfg.BLOCK_DURATION_1_HOUR)
    tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0], {
        "from": requester,
        "gas": 4500000
    })
    refundedWei = tx.events["LogDepositStorage"]["payment"]
    log(f"refunded_wei={refundedWei}", "bold")
    withdraw(requester, refundedWei)
    with brownie.reverts():
        tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0],
                                      {
                                          "from": requester,
                                          "gas": 4500000
                                      })

    tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[1], {
        "from": requester,
        "gas": 4500000
    })
    refundedWei = tx.events["LogDepositStorage"]["payment"]
    paid_address = tx.events["LogDepositStorage"]["paidAddress"]
    withdraw(requester, refundedWei)
    with brownie.reverts():
        tx = ebb.refundStorageDeposit(provider, requester, job.code_hashes[0],
                                      {
                                          "from": requester,
                                          "gas": 4500000
                                      })

    assert requester == paid_address
    assert ebb.balanceOf(provider) == 0
    console_ruler("same job submitted after full refund", color="blue")
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(f"job_index={tx.events['LogJob']['index']}", "bold")
    log(tx.events["LogJob"]["jobKey"])
    index = 1
    job_id = 0
    tx = ebb.refund(provider, job_key, index, job_id, job.cores, job.run_time,
                    {"from": provider})
    log(ebb.getJobInfo(provider, job_key, index, job_id))
    refundedWei = tx.events["LogRefundRequest"]["refundedWei"]
    log(f"refunded_wei={refundedWei}", "bold")
    assert _cost["computational"] + _cost["data_transfer"] + _cost[
        "cache"] == refundedWei
    storage_cost_sum = 0
    storage_payment = []
    for code_hash in job.code_hashes:
        deposit, *_ = ebb.getStorageInfo(provider, requester, code_hash)
        storage_payment.append(deposit)

    job.is_verified = [True, True]
    ebb.dataReceived(  # called by the provider
        job_key, index, job.code_hashes, job.cache_types, job.is_verified, {
            "from": provider,
            "gas": 4500000
        })
    for code_hash in job.code_hashes:
        *_, output = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS, code_hash)
        log(output, "bold")

    with brownie.reverts(
    ):  # refundStorageDeposit should revert, because it is already used by the provider
        for code_hash in job.code_hashes:
            tx = ebb.refundStorageDeposit(provider, requester, code_hash, {
                "from": requester,
                "gas": 4500000
            })

        tx = ebb.depositStorage(requester, job.code_hashes[0], {
            "from": provider,
            "gas": 4500000
        })

    mine(cfg.BLOCK_DURATION_1_HOUR)
    # after deadline (1 hr) is completed to store the data, provider could obtain the money
    for idx, code_hash in enumerate(job.code_hashes):
        tx = ebb.depositStorage(requester, code_hash, {
            "from": provider,
            "gas": 4500000
        })
        amount = tx.events["LogDepositStorage"]["payment"]
        withdraw(provider, amount)
        assert storage_payment[idx] == amount
Exemplo n.º 9
0
def test_computational_refund():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider(100)
    register_requester(requester)
    job.code_hashes = [
        b"9b3e9babb65d9c1aceea8d606fc55403",
        b"9a4c0c1c9aadb203daf9367bd4df930b"
    ]
    job.cores = [1]
    job.run_time = [5]
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.EUDAT.value, StorageID.EUDAT.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    job.storage_hours = [0, 0]
    job.data_prices_set_block_numbers = [0, 0]
    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job.code_hashes[0],
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    index = 0
    job_id = 0
    start_time = 1579524978
    tx = ebb.setJobStatusRunning(job.code_hashes[0], index, job_id, start_time,
                                 {"from": accounts[0]})
    rpc.sleep(60)
    mine(5)
    args = [index, job_id, 1579524998, 2, 0, job.cores, [5], True]
    run_time = 1
    tx = ebb.processPayment(job.code_hashes[0], args, run_time, zero_bytes32,
                            {"from": accounts[0]})
    received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    log(f"{received_sum} {refunded_sum}")
    assert received_sum + refunded_sum == 505
    assert received_sum == 104 and refunded_sum == 401
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
Exemplo n.º 10
0
def test_data_info():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider(100)
    register_requester(requester)
    job_key = b"9b3e9babb65d9c1aceea8d606fc55403"
    job.code_hashes = [job_key, b"9a4c0c1c9aadb203daf9367bd4df930b"]
    job.cores = [1]
    job.run_time = [5]
    job.data_transfer_ins = [0, 1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.IPFS.value, StorageID.IPFS.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    job.storage_hours = [0, 1]
    job.data_prices_set_block_numbers = [0, 0]
    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    provider_price_info = ebb.getProviderInfo(provider, 0)
    price_cache = provider_price_info[1][4]
    storage_payment = []
    for idx, code_hash in enumerate(job.code_hashes):
        deposit, *_ = ebb.getStorageInfo(provider, requester, code_hash)
        storage_payment.append(deposit)
        assert storage_payment[idx] == job.storage_hours[idx] * price_cache

    job.is_verified = [False, True]
    ebb.dataReceived(  # called by the provider
        job_key,
        0,
        job.code_hashes,
        job.cache_types,
        job.is_verified,
        {
            "from": provider,
            "gas": 4500000
        },
    )
    for idx, code_hash in enumerate(job.code_hashes):
        *_, output = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS, code_hash)
        assert output[3] == job.is_verified[idx]
        # requester is data_owner

    for idx, code_hash in enumerate(job.code_hashes):
        with brownie.reverts():
            tx = ebb.depositStorage(requester, code_hash, {
                "from": provider,
                "gas": 4500000
            })

    mine(cfg.BLOCK_DURATION_1_HOUR)
    for idx, code_hash in enumerate(job.code_hashes):
        *_, output = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS, code_hash)
        if output[3]:
            tx = ebb.depositStorage(requester, code_hash, {
                "from": provider,
                "gas": 4500000
            })
            print(tx.events["LogDepositStorage"])
Exemplo n.º 11
0
def test_stored_data_usage():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    requester_1 = accounts[2]
    register_provider(100)
    register_requester(requester)
    register_requester(requester_1)
    job_key = "1v12W1CJwSKE-SPFiq86pGpF74WPNRBD2"
    job.code_hashes.append(b"050e6cc8dd7e889bf7874689f1e1ead6")
    job.code_hashes.append(b"b6aaf03752dc68d625fc57b451faa2bf")
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1
    # provider's registered data won't be used
    job.storage_hours = [1, 1]
    job.data_prices_set_block_numbers = [0, 0]
    job.cores = [1]
    job.run_time = [5]
    job.provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    job.storage_ids = [StorageID.GDRIVE.value, StorageID.GDRIVE.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PRIVATE.value]
    args = [
        provider,
        job.provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]

    job_price, _cost = job.cost(provider, requester)
    # first time job is submitted with the data files
    # https://stackoverflow.com/a/12468284/2402577
    tx = ebb.submitJob(
        job_key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(tx.events["LogDataStorageRequest"]["owner"])
    log(f"==> job_index={tx.events['LogJob']['index']}")
    log(tx.events["LogJob"]["jobKey"])
    assert _cost["storage"] == 2
    job_price, _cost = job.cost(provider, requester)
    log(f"==> job_index={tx.events['LogJob']['index']}")
    log(tx.events["LogJob"]["jobKey"])
    assert _cost[
        "storage"] == 0, "Since it is not verified yet cost of storage should be 2"
    assert _cost["data_transfer"] == 1
    with brownie.reverts():
        job_price_revert = 500  # data_transfer_in cost is ignored
        tx = ebb.submitJob(
            job.code_hashes[0],
            job.data_transfer_ins,
            args,
            job.storage_hours,
            job.code_hashes,
            {
                "from": requester,
                "value": web3.toWei(job_price_revert, "wei")
            },
        )

    tx = ebb.submitJob(
        job.code_hashes[0],
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    assert "LogDataStorageRequest" not in tx.events
    mine(cfg.BLOCK_DURATION_1_HOUR)
    job_price, _cost = job.cost(provider, requester)
    # first time job is submitted with the data files
    tx = ebb.submitJob(
        job.code_hashes[0],
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )

    log(tx.events["LogDataStorageRequest"]["owner"])
Exemplo n.º 12
0
def test_submit_job():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider()
    register_requester(requester)
    fname = f"{cwd}/files/test.txt"
    # fname = f"{cwd}/files/test_.txt"
    log(f"==> registered_provider_addresses={ebb.getProviders()}")
    provider_price_info = ebb.getProviderInfo(accounts[0], 0)
    # block_read_from = provider_price_info[0]
    _provider_price_info = provider_price_info[1]
    # availableCoreNum = _provider_price_info[0]
    # commitmentBlockDuration = _provider_price_info[1]
    price_core_min = _provider_price_info[2]
    # price_data_transfer = _provider_price_info[3]
    # price_storage = _provider_price_info[4]
    # price_cache = _provider_price_info[5]
    log(f"provider_available_core={available_core}")
    log(f"provider_price_core_min={price_core_min}")
    log(provider_price_info)
    job_price_sum = 0
    job_id = 0
    index = 0
    with open(fname) as f:
        for line in f:
            arguments = line.rstrip("\n").split(" ")
            storage_hour = 1
            core_min = int(arguments[1]) - int(arguments[0])
            core = int(arguments[2])
            job.cores = [core]
            job.run_time = [core_min]
            # time.sleep(1)
            # rpc.mine(int(arguments[0]))

            job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
            data_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud"
            code_hash = ipfs_to_bytes32(data_key)
            # log("Client Balance before: " + str(web3.eth.balanceOf(account)))
            # log("Contract Balance before: " + str(web3.eth.balanceOf(accounts[0])))
            job.code_hashes = [code_hash]
            job.storage_hours = [storage_hour]
            job.data_transfer_ins = [100]
            job.data_transfer_out = 100
            job.data_prices_set_block_numbers = [0]
            job.storage_ids = [StorageID.IPFS.value]
            job.cache_types = [CacheType.PUBLIC.value]
            args = [
                provider,
                ebb.getProviderSetBlockNumbers(accounts[0])[-1],
                job.storage_ids,
                job.cache_types,
                job.data_prices_set_block_numbers,
                job.cores,
                job.run_time,
                job.data_transfer_out,
            ]

            # log(code_hashes[0])
            job_price, _cost = job.cost(provider, requester)
            job_price_sum += job_price
            data_transfer_ins = [100]
            job_key = job.storage_hours[0]
            tx = ebb.submitJob(
                job_key,
                data_transfer_ins,
                args,
                job.storage_hours,
                job.code_hashes,
                {
                    "from": requester,
                    "value": web3.toWei(job_price, "wei")
                },
            )
            # log('submitJob => GasUsed:' + str(tx.__dict__['gas_used']) + '| blockNumber=' + str(tx.block_number))
            log(f"job_index={tx.events['LogJob']['index']}", "bold")
            # log("Contract Balance after: " + str(web3.eth.balanceOf(accouts[0])))
            # log("Client Balance after: " + str(web3.eth.balanceOf(accounts[8])))
            # sys.stdout.write('jobInfo: ')
            # sys.stdout.flush()
            log(ebb.getJobInfo(provider, job_key, index, job_id))
            index += 1

    log(f"total_paid={job_price_sum}")
    # log(block_read_from)
    # rpc.mine(100)
    # log(web3.eth.blockNumber)
    job_id = 0
    with open(fname) as f:
        for index, line in enumerate(f):
            arguments = line.rstrip("\n").split(" ")
            tx = ebb.setJobStatusRunning(job_key, index, job_id,
                                         int(arguments[0]),
                                         {"from": accounts[0]})
            if index == 0:
                with brownie.reverts():
                    tx = ebb.setJobStatusRunning(job_key, index, job_id,
                                                 int(arguments[0]) + 1,
                                                 {"from": accounts[0]})

    console_ruler()
    result_ipfs_hash = ipfs_to_bytes32(
        "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve")
    with open(fname) as f:
        for index, line in enumerate(f):
            arguments = line.rstrip("\n").split(" ")
            if index == 0:
                data_transfer_in_sum = 90
                job.data_transfer_out = 100
            else:
                data_transfer_in_sum = 0
                job.data_transfer_out = 100

            core_min = int(arguments[1]) - int(arguments[0])
            core = int(arguments[2])
            job.cores = [core]
            job.run_time = [core_min]
            log(f"contract_balance={ebb.getContractBalance()}", "bold")
            job_id = 0
            execution_time = int(arguments[1]) - int(arguments[0])
            end_time = int(arguments[1])
            args = [
                index,
                job_id,
                end_time,
                data_transfer_in_sum,
                job.data_transfer_out,
                job.cores,
                job.run_time,
                True,
            ]
            tx = ebb.processPayment(job_key, args, execution_time,
                                    result_ipfs_hash, {"from": accounts[0]})
            # code_hashes
            received = tx.events["LogProcessPayment"]["receivedWei"]
            refunded = tx.events["LogProcessPayment"]["refundedWei"]
            withdraw(accounts[0], received)
            withdraw(requester, refunded)
            log(f"received={received} | refunded={refunded}", "bold")

    log(f"contract_balance={ebb.getContractBalance()}", "bold")
    for idx in range(0, ebb.getProviderReceiptSize(provider)):
        # prints finalize version of the linked list
        log(ebb.getProviderReceiptNode(provider, idx))

    console_ruler()
    log(f"==> storage_duration for job={job_key}")
    *_, job_storage_info = ebb.getStorageInfo(provider, cfg.ZERO_ADDRESS,
                                              code_hash)
    ds = DataStorage(job_storage_info)
    log(f"receivedBlockNumber={ds.received_block} |"
        f"storage_duration(block numbers)={ds.storage_duration} | "
        f"is_private={ds.is_private} |"
        f"is_verified_Used={ds.is_verified_used}")
    received_storage_deposit, *_ = ebb.getStorageInfo(provider, requester,
                                                      code_hash)
    log(f"received_storage_deposit={received_storage_deposit}")
    console_ruler("DONE")
Exemplo n.º 13
0
def test_simple_submit():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    price_core_min = 100
    register_provider(price_core_min)
    register_requester(requester)
    job.code_hashes = [
        b"9b3e9babb65d9c1aceea8d606fc55403",
        b"9a4c0c1c9aadb203daf9367bd4df930b"
    ]
    job.key = job.code_hashes[0]
    job.cores = [2]
    job.run_time = [1]
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.EUDAT.value, StorageID.EUDAT.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    job.storage_hours = [0, 0]
    job.data_prices_set_block_numbers = [0, 0]

    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]

    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]

    tx = ebb.submitJob(
        job.key,
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    log(f"submitJob_gas_used={tx.__dict__['gas_used']}")
    index = 0
    job_id = 0
    start_time = 1579524978
    tx = ebb.setJobStatusRunning(job.key, index, job_id, start_time,
                                 {"from": provider})
    rpc.sleep(60)
    mine(5)

    completion_time = 1579524998
    data_transfer_in = 0
    data_transfer_out = 0.01
    args = [
        index, job_id, completion_time, data_transfer_in, data_transfer_out,
        job.cores, [1], True
    ]
    elapsed_time = 1
    out_hash = b"[46\x17\x98r\xc2\xfc\xe7\xfc\xb8\xdd\n\xd6\xe8\xc5\xca$fZ\xebVs\xec\xff\x06[\x1e\xd4f\xce\x99"
    tx = ebb.processPayment(job.key, args, elapsed_time, out_hash,
                            {"from": accounts[0]})
    # tx = ebb.processPayment(job.code_hashes[0], args, elapsed_time, zero_bytes32, {"from": accounts[0]})
    received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    # log(str(received_sum) + " " + str(refunded_sum))
    assert received_sum == job.cores[0] * price_core_min and refunded_sum == 5
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
Exemplo n.º 14
0
def submit_ipfs(job: Job, is_pass=False, required_confs=1):
    Ebb = cfg.Ebb
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    provider = Ebb.w3.toChecksumAddress(job.provider_addr)
    pre_check(job, requester)
    log("==> Attemptting to submit a job")
    main_storage_id = job.storage_ids[0]
    job.folders_to_share = job.paths
    check_link_folders(job.data_paths, job.registered_data_files, is_pass=is_pass)
    if main_storage_id == StorageID.IPFS:
        log("==> Submitting source code through [blue]IPFS[/blue]")
    elif main_storage_id == StorageID.IPFS_GPG:
        log("==> Submitting source code through [blue]IPFS_GPG[/blue]")
    else:
        log("E: Please provide IPFS or IPFS_GPG storage type for the source code")
        sys.exit(1)

    targets = []
    try:
        provider_info = Ebb.get_provider_info(provider)
    except Exception as e:
        print_tb(e)
        sys.exit(1)

    for idx, folder in enumerate(job.folders_to_share):
        if isinstance(folder, Path):
            target = folder
            if job.storage_ids[idx] == StorageID.IPFS_GPG:
                provider_gpg_finderprint = provider_info["gpg_fingerprint"]
                if not provider_gpg_finderprint:
                    log("E: Provider did not register any GPG fingerprint")
                    sys.exit(1)

                log(f"==> provider_gpg_finderprint={provider_gpg_finderprint}")
                try:
                    # target is updated
                    target = cfg.ipfs.gpg_encrypt(provider_gpg_finderprint, target)
                    log(f"==> gpg_file={target}")
                except Exception as e:
                    print_tb(e)
                    sys.exit(1)

            try:
                ipfs_hash = cfg.ipfs.add(target)
                # ipfs_hash = ipfs.add(folder, True)  # True includes .git/
                run(["ipfs", "refs", ipfs_hash])
            except Exception as e:
                print_tb(e)
                sys.exit(1)

            if idx == 0:
                key = ipfs_hash

            job.code_hashes.append(ipfs_to_bytes32(ipfs_hash))
            job.code_hashes_str.append(ipfs_hash)
            log(f"==> ipfs_hash={ipfs_hash} | md5sum={generate_md5sum(target)}")
            if main_storage_id == StorageID.IPFS_GPG:
                # created gpg file will be removed since its already in ipfs
                targets.append(target)
        else:
            code_hash = folder
            if isinstance(code_hash, bytes):
                job.code_hashes.append(code_hash)
                job.code_hashes_str.append(code_hash.decode("utf-8"))

            # TODO: if its ipfs
            # if isinstance(code_hash, bytes):
            #     code_hash = code_hash.decode("utf-8")

            # if len(code_hash) == 32:
            #     value = cfg.w3.toBytes(text=code_hash)
            #     job.code_hashes.append(value)
            #     job.code_hashes_str.append(value.decode("utf-8"))
            # else:
            #     job.code_hashes.append(ipfs_to_bytes32(code_hash))
            #     job.code_hashes_str.append(code_hash)

        # if idx != len(job.folders_to_share) - 1:
        #     log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-", "cyan")

    # requester inputs for testing purpose
    job.price, *_ = job.cost(provider, requester)
    try:
        tx_hash = Ebb.submit_job(provider, key, job, requester=requester, required_confs=required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(tx_receipt, errors=DISCARD)
                try:
                    if processed_logs:
                        log("job_info:", "bold yellow")
                        log(vars(processed_logs[0].args))

                    for target in targets:
                        if ".tar.gz.gpg" in str(target):
                            _remove(target)
                except IndexError:
                    log(f"E: Tx={tx_hash} is reverted")
        else:
            pass
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    return tx_hash
Exemplo n.º 15
0
def main():
    job = Job()
    job.set_config(Path.home() / "ebloc-broker" / "broker" / "ipfs" / "job_simple.yaml")
    submit_ipfs(job)