Ejemplo n.º 1
0
def _generate_git_repo(folder):
    log(folder, "green")
    try:
        initialize_check(folder)
        commit_changes(folder)
    except Exception as e:
        raise e
Ejemplo n.º 2
0
def _dump_dict_to_file(filename, job_keys):
    try:
        log("==> meta_data.json file is updated in the parent folder")
        dump_dict_to_file(filename, job_keys)
    except Exception as e:
        print_tb(e)
        raise e
Ejemplo n.º 3
0
def appy_patch(base_dir, patch_fn):
    r"""Apply path file.

    cmd: perl -pe 's/\x1b.*?[mGKH]//g' alper.patch > good.patch
    """
    patch_file = f"{base_dir}/{patch_fn}"
    base_name = patch_fn.replace(".gz", "")
    diff_file_name = f"{base_dir}/{base_name}"
    if not os.path.isfile(diff_file_name):
        if not os.path.isfile(patch_file):
            print(f"E: {patch_file} file does not exist")
            sys.exit(1)

        if patch_file.endswith(".diff.gz"):
            extract_gzip(patch_file)
    else:
        log(f"==> [magenta]{diff_file_name}[/magenta] exists")

    try:
        git.apply_patch(base_dir, patch_file.replace(".gz", ""), is_gpg=False)
    except Exception as e:
        print_tb(e)
        try:
            good_patch = f"{base_dir}/good.patch"
            sep = "~"
            popen_communicate(
                [
                    "perl", "-pe", "s/\x1b.*?[mGKH]//g",
                    str(Path(patch_file)).replace(f"{sep}", f"\{sep}")
                ],
                stdout_fn=good_patch,
            )
            git.apply_patch(base_dir, good_patch, is_gpg=False)
        except Exception as e1:
            print_tb(e1)
Ejemplo n.º 4
0
def test_computational_refund():
    job = Job()
    provider = accounts[0]
    requester = accounts[1]
    register_provider(100)
    register_requester(requester)
    job.code_hashes = [
        b"9b3e9babb65d9c1aceea8d606fc55403",
        b"9a4c0c1c9aadb203daf9367bd4df930b"
    ]
    job.cores = [1]
    job.run_time = [5]
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1
    job.storage_ids = [StorageID.EUDAT.value, StorageID.EUDAT.value]
    job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value]
    job.storage_hours = [0, 0]
    job.data_prices_set_block_numbers = [0, 0]
    job_price, _cost = job.cost(provider, requester)
    provider_price_block_number = ebb.getProviderSetBlockNumbers(
        accounts[0])[-1]
    args = [
        provider,
        provider_price_block_number,
        job.storage_ids,
        job.cache_types,
        job.data_prices_set_block_numbers,
        job.cores,
        job.run_time,
        job.data_transfer_out,
    ]
    tx = ebb.submitJob(
        job.code_hashes[0],
        job.data_transfer_ins,
        args,
        job.storage_hours,
        job.code_hashes,
        {
            "from": requester,
            "value": web3.toWei(job_price, "wei")
        },
    )
    index = 0
    job_id = 0
    start_time = 1579524978
    tx = ebb.setJobStatusRunning(job.code_hashes[0], index, job_id, start_time,
                                 {"from": accounts[0]})
    rpc.sleep(60)
    mine(5)
    args = [index, job_id, 1579524998, 2, 0, job.cores, [5], True]
    run_time = 1
    tx = ebb.processPayment(job.code_hashes[0], args, run_time, zero_bytes32,
                            {"from": accounts[0]})
    received_sum = tx.events["LogProcessPayment"]["receivedWei"]
    refunded_sum = tx.events["LogProcessPayment"]["refundedWei"]
    log(f"{received_sum} {refunded_sum}")
    assert received_sum + refunded_sum == 505
    assert received_sum == 104 and refunded_sum == 401
    withdraw(accounts[0], received_sum)
    withdraw(requester, refunded_sum)
Ejemplo n.º 5
0
def is_contract_exists():
    try:
        Ebb = cfg.Ebb
    except Exception as e:
        print_tb(e)

    log(f"==> is_contract_exists={Ebb.is_contract_exists()}", "bold")
Ejemplo n.º 6
0
def get_data_key_ids(results_folder_prev) -> bool:
    filename = f"{results_folder_prev}/meta_data.json"
    log(f"==> meta_data_path={filename}")
    try:
        meta_data = read_json(filename)
    except Exception as e:
        print_tb(e)

    return meta_data
Ejemplo n.º 7
0
def is_repo(folders):
    for folder in folders:
        if not isinstance(folder, bytes):
            with cd(folder):
                if not is_initialized(folder):
                    log(f"warning: .git does not exits in {folder}. Applying: git init ",
                        end="")
                    run(["git", "init", "--initial-branch=master"])
                    log(ok())
Ejemplo n.º 8
0
def username_check(check):
    """Check if username exists."""
    try:
        pwd.getpwnam(check)
        log("## user %s exists" % (check))
        return False
    except KeyError:
        log("user %s does not exist. Continuing... %s" % (check, check))
        return True
Ejemplo n.º 9
0
def _submit(provider, requester, job, required_confs=1):
    job.Ebb.is_requester_valid(requester)
    job.Ebb.is_eth_account_locked(requester)
    provider = cfg.w3.toChecksumAddress(provider)
    provider_info = job.Ebb.get_provider_info(provider)
    requester_name = hashlib.md5(requester.lower().encode("utf-8")).hexdigest()[:16]
    log(f"==> provider_fid=[magenta]{provider_info['f_id']}")
    try:
        _git.is_repo(job.folders_to_share)
    except:
        print_tb()
        sys.exit(1)

    for idx, folder in enumerate(job.folders_to_share):
        if not isinstance(folder, bytes):
            if idx != 0:
                print("")

            log(f"==> folder_to_share={folder}")
            try:
                _git.initialize_check(folder)
                _git.commit_changes(folder)
                folder_hash = initialize_folder(folder, requester_name)
            except Exception as e:
                print_tb(e)
                sys.exit(1)

            if idx == 0:
                job_key = folder_hash

            # required to send string as bytes
            value = cfg.w3.toBytes(text=folder_hash)
            job.code_hashes.append(value)
            job.code_hashes_str.append(value.decode("utf-8"))
            _folder = f"{folder_hash}_{requester_name}"
            if not share_single_folder(_folder, provider_info["f_id"]):
                sys.exit(1)

            time.sleep(0.25)
        else:
            code_hash = folder
            job.code_hashes.append(code_hash)
            job.code_hashes_str.append(code_hash.decode("utf-8"))

    job.price, *_ = job.cost(provider, requester)
    # print(job.code_hashes)
    try:
        return job.Ebb.submit_job(provider, job_key, job, requester, required_confs=required_confs)
    except QuietExit:
        sys.exit(1)
    except Exception as e:
        print_tb(e)
        # log(f"E: Unlock your Ethereum Account({requester})")
        # log("#> In order to unlock an account you can use: ~/eBlocPOA/client.sh")
        sys.exit(1)
Ejemplo n.º 10
0
def upload(folder_to_share, tmp_dir, job_key_flag=False):
    tar_hashes = {}
    is_already_uploaded = False
    log(f"==> job_key_flag={job_key_flag}, tar.gz file is inside the base folder"
        )
    dir_path = os.path.dirname(folder_to_share)
    tar_hash, _ = compress_folder(folder_to_share, is_exclude_git=True)
    tar_hashes[folder_to_share] = tar_hash
    path_to_move = f"{dir_path}/{tar_hash}"
    _from = f"{dir_path}/{tar_hash}.tar.gz"
    _to = f"{path_to_move}/{tar_hash}.tar.gz"
    mkdir(path_to_move)
    shutil.move(_from, _to)
    if job_key_flag:
        shutil.copyfile(f"{tmp_dir}/meta_data.json",
                        f"{path_to_move}/meta_data.json")

    is_file_exist = _list(tar_hash, is_folder=True)
    if not is_file_exist:
        key = _upload(dir_path, tar_hash, is_folder=True)
        log(f"{_list(tar_hash)}", "bold green")
    else:
        log(f"## requested folder {tar_hash} is already uploaded", "bold blue")
        log(is_file_exist, "bold green")
        key = is_file_exist.partition("\n")[0].split()[0]
        is_already_uploaded = True

    _remove(f"{dir_path}/{tar_hash}")  # created .tar.gz file is removed
    return key, is_already_uploaded, tar_hash, tar_hashes
Ejemplo n.º 11
0
def commit_changes(path):
    with cd(path):
        repo = git.Repo(".", search_parent_directories=True)
        try:
            output = run(["ls", "-l", ".git/refs/heads"])
        except Exception as e:
            raise Exception("E: Problem on git.commit_changes()") from e

        if output == "total 0":
            logging.warning("There is no first commit")
        else:
            changed_files = [item.a_path for item in repo.index.diff(None)]
            if len(changed_files) > 0:
                log("==> adding changed files:")
                for _file in changed_files:
                    log(_file, "bold")

                repo.git.add(A=True)

            if len(repo.index.diff("HEAD")) == 0:
                log(f"==> {path}\n    is committed with the given changes using git"
                    )

        try:
            add_all(repo)
        except Exception as e:
            log(f"E: {e}")
            raise e
Ejemplo n.º 12
0
def add_all(repo=None):
    """Add all into git."""
    try:
        if not repo:
            repo = git.Repo(".", search_parent_directories=True)

        log("all files in the entire working tree are updated in the Git repository ",
            end="")
        repo.git.add(A=True)
        log(ok())
        try:
            #: git diff HEAD --name-only | wc -l
            changed_file_len = len(
                repo.index.diff("HEAD",
                                ignore_blank_lines=True,
                                ignore_space_at_eol=True))
        except:
            # if it is the first commit HEAD might not exist
            changed_file_len = len(
                repo.git.diff("--cached", "--ignore-blank-lines",
                              "--ignore-space-at-eol",
                              "--name-only").split("\n"))

        if changed_file_len > 0:
            log("Record changes to the repository ", end="")
            repo.git.commit("-m", "update")
            log(ok())
    except Exception as e:
        print_tb(e)
        raise e
Ejemplo n.º 13
0
def share_single_folder(folder_name, f_id) -> bool:
    try:
        # folder_names = os.listdir(env.OWNCLOUD_PATH)
        # fID = '*****@*****.**'
        if not config.oc.is_shared(folder_name):
            config.oc.share_file_with_user(folder_name, f_id, remote_user=True, perms=31)
            log(f"sharing with [yellow]{f_id}[/yellow] {ok()}", "bold")
            return True

        log("## Requester folder is already shared")
        return True
    except Exception as e:
        print_tb(e)
        return False
Ejemplo n.º 14
0
def main():
    try:
        config.env = config.ENV()
    except Exception as e:
        print_tb(e)
        log("E: env.IPFS_LOG is not set")
        sys.exit(1)

    if not is_ipfs_on():
        cfg.ipfs.remove_lock_files()
        run()
    else:
        log(f"## [green]IPFS[/green] daemon is already running {ok()}")
        sys.exit(100)
Ejemplo n.º 15
0
def main():
    job = Job()
    Ebb = cfg.Ebb
    job.base_dir = f"{env.HOME}/test_eblocbroker"
    job.source_code_dir = f"{job.base_dir}/source_code"
    job.data_1_dir = f"{job.base_dir}/datasets/BL06-camel-sml"

    job.folders_to_share.append(job.source_code_dir)
    job.folders_to_share.append(job.data_1_dir)

    path_from = f"{job.base_dir}/datasets"
    path_to = f"{env.LINK_PATH}/base/data_link"
    check_linked_data(path_from, path_to, job.folders_to_share[1:])

    # IMPORTANT: consider ignoring to push .git into the submitted folder
    # job.generate_git_repos()
    job.clean_before_submit()

    provider = "0xD118b6EF83ccF11b34331F1E7285542dDf70Bc49"  # home2-vm
    account_id = 1
    _from = Ebb.w3.toChecksumAddress(Ebb.w3.eth.accounts[account_id])
    job = gdrive.submit(provider, _from, job)
    job.run_time = [5]
    job.cores = [1]
    job.data_transfer_ins = [1, 1]
    job.data_transfer_out = 1

    job.storage_ids = [StorageID.GDRIVE, StorageID.GDRIVE]
    job.cache_types = [CacheType.PRIVATE, CacheType.PUBLIC]
    job.storage_hours = [1, 1]
    job.data_prices_set_block_numbers = [0, 0]

    for folder_to_share in job.folders_to_share:
        tar_hash = job.foldername_tar_hash[folder_to_share]
        # required to send string as bytes == str_data.encode('utf-8')
        job.code_hashes.append(Ebb.w3.toBytes(text=tar_hash))

    tar_hash = job.foldername_tar_hash[job.folders_to_share[0]]
    job_key = job.keys[tar_hash]
    try:
        job_price, _cost = job.cost(provider, _from)
        tx_hash = Ebb.submit_job(provider,
                                 job_key,
                                 job_price,
                                 job,
                                 requester=_from)
    except Exception as e:
        raise e

    for k, v in job.tar_hashes.items():
        log(f"{k} => {v}")

    log(f"==> code_hashes={job.code_hashes}")
    if job.analyze_tx_status(tx_hash):
        log("SUCCESS")
    else:
        log("FAILED")
Ejemplo n.º 16
0
def diff_patch(path: Path, source_code_hash, index, target_path):
    """Apply diff patch.

    "git diff HEAD" for detecting all the changes:
    Shows all the changes between the working directory and HEAD (which includes changes in the index).
    This shows all the changes since the last commit, whether or not they have been staged for commit
    or not.
    """
    is_file_empty = False
    with cd(path):
        log(f"==> Navigate to {path}")
        """TODO
        if not is_initialized(path):
            upload everything, changed files!
        """
        repo = git.Repo(".", search_parent_directories=True)
        try:
            repo.git.config("core.fileMode",
                            "false")  # git config core.fileMode false
            # first ignore deleted files not to be added into git
            run([env.BASH_SCRIPTS_PATH / "git_ignore_deleted.sh"])
            head_commit_id = repo.rev_parse("HEAD")
            sep = "~"  # separator in between the string infos
            patch_name = f"patch{sep}{head_commit_id}{sep}{source_code_hash}{sep}{index}.diff"
        except:
            return False

        patch_upload_name = f"{patch_name}.gz"  # file to be uploaded as zip
        patch_file = f"{target_path}/{patch_upload_name}"
        logging.info(f"patch_path={patch_upload_name}")
        try:
            repo.git.add(A=True)
            diff_and_gzip(patch_file)
        except:
            return False

    time.sleep(0.25)
    if is_gzip_file_empty(patch_file):
        log("==> Created patch file is empty, nothing to upload")
        with suppress(Exception):
            os.remove(patch_upload_name)

        os.remove(patch_file)
        is_file_empty = True

    return patch_upload_name, patch_file, is_file_empty
Ejemplo n.º 17
0
def initialize_folder(folder_to_share, requester_name) -> str:
    dir_path = os.path.dirname(folder_to_share)
    tar_hash, *_ = compress_folder(folder_to_share)
    tar_source = f"{dir_path}/{tar_hash}.tar.gz"
    try:
        config.oc.mkdir(f"{tar_hash}_{requester_name}")
    except Exception as e:
        if "405" not in str(e):
            if not os.path.exists(f"{env.OWNCLOUD_PATH}/{tar_hash}_{requester_name}"):
                try:
                    os.makedirs(f"{env.OWNCLOUD_PATH}/{tar_hash}_{requester_name}")
                except Exception as e:
                    raise e
            else:
                log("==> folder is already created")
        else:
            log("==> folder is already created")

    try:
        tar_dst = f"{tar_hash}_{requester_name}/{tar_hash}.tar.gz"
        log("## uploading into [green]EUDAT B2DROP[/green] this may take some time depending on the file size...")
        is_already_uploaded = False
        with suppress(Exception):
            # File is first time created
            file_info = config.oc.file_info(f"./{tar_dst}")
            size = calculate_size(tar_source, _type="bytes")
            log(file_info, "bold")
            if float(file_info.attributes["{DAV:}getcontentlength"]) == size:
                # check is it already uploaded or not via its file size
                log(f"## {tar_source} is already uploaded into [green]EUDAT B2DROP")
                is_already_uploaded = True

        if not is_already_uploaded:
            config.oc.put_file(f"./{tar_dst}", tar_source)

        os.remove(tar_source)
    except Exception as e:
        if type(e).__name__ == "HTTPResponseError":
            try:
                shutil.copyfile(tar_source, f"{env.OWNCLOUD_PATH}/{tar_dst}")
            except Exception as e:
                raise e
        else:
            raise Exception("oc could not connected in order to upload the file")  # noqa

    return tar_hash
Ejemplo n.º 18
0
def test_update_provider():
    mine(5)
    provider_registered_bn = register_provider()
    fid = "*****@*****.**"
    ebb.updateProviderInfo(GPG_FINGERPRINT, provider_email, fid, ipfs_address,
                           {"from": accounts[0]})
    log(ebb.getUpdatedProviderPricesBlocks(accounts[0]))
    available_core = 64
    prices = [2, 2, 2, 2]
    ebb.updateProviderPrices(available_core, COMMITMENT_BLOCK_NUM, prices,
                             {"from": accounts[0]})
    prices_set_block_number = ebb.getUpdatedProviderPricesBlocks(
        accounts[0])[1]
    provider_info = ebb.getProviderInfo(accounts[0], prices_set_block_number)
    assert 2 == provider_info[1][2] == provider_info[1][3] == provider_info[1][
        4] == provider_info[1][5]
    available_core = 128
    ebb.updateProviderPrices(available_core, COMMITMENT_BLOCK_NUM, prices,
                             {"from": accounts[0]})

    prices_set_block_number = ebb.getUpdatedProviderPricesBlocks(
        accounts[0])[1]
    assert ebb.getProviderInfo(accounts[0],
                               prices_set_block_number)[1][0] == 128

    available_core = 16
    ebb.updateProviderPrices(available_core, COMMITMENT_BLOCK_NUM, prices,
                             {"from": accounts[0]})

    prices_set_block_number = ebb.getUpdatedProviderPricesBlocks(
        accounts[0])[1]
    assert ebb.getProviderInfo(accounts[0],
                               prices_set_block_number)[1][0] == 16
    mine(cfg.BLOCK_DURATION_1_HOUR)

    available_core = 32
    ebb.updateProviderPrices(available_core, COMMITMENT_BLOCK_NUM, prices,
                             {"from": accounts[0]})

    log(ebb.getUpdatedProviderPricesBlocks(accounts[0]))
    assert ebb.getUpdatedProviderPricesBlocks(
        accounts[0])[2] == COMMITMENT_BLOCK_NUM * 2 + provider_registered_bn

    provider_price_info = ebb.getProviderInfo(accounts[0], 0)
    block_read_from = provider_price_info[0]
    assert block_read_from == COMMITMENT_BLOCK_NUM + provider_registered_bn
Ejemplo n.º 19
0
def initialize_check(path):
    """Validate if .git/ folder exist within the target folder."""
    with cd(path):
        if not is_initialized(path):
            try:
                log(f"## git_repo={path}")
                log("Creating an empty Git repository ", end="")
                run(["git", "init", "--initial-branch=master"])
                log(ok())
                add_all()
            except Exception as e:
                log(f"E: {e}")
                raise e
Ejemplo n.º 20
0
def _upload_results(encoded_share_token, output_file_name):
    r"""Upload results into Eudat using curl.

    * How to upload files into shared b2drop.eudat(owncloud) repository using curl?
    __ https://stackoverflow.com/a/44556541/2402577

    * commands(s):
    curl -X PUT -H \'Content-Type: text/plain\' -H \'Authorization: Basic \'$encoded_share_token\'==\' \
        --data-binary \'@result-\'$providerID\'-\'$index\'.tar.gz\' \
        https://b2drop.eudat.eu/public.php/webdav/result-$providerID-$index.tar.gz

    curl --fail -X PUT -H 'Content-Type: text/plain' -H 'Authorization: Basic \
        'SjQzd05XM2NNcFoybk.Write'==' --data-binary '@0b2fe6dd7d8e080e84f1aa14ad4c9a0f_0.txt' \
        https://b2drop.eudat.eu/public.php/webdav/result.txt
    """
    cmd = [
        "curl",
        "--fail",
        "-X",
        "PUT",
        "-H",
        "Content-Type: text/plain",
        "-H",
        f"Authorization: Basic {encoded_share_token}",
        "--data-binary",
        f"@{output_file_name}",
        f"https://b2drop.eudat.eu/public.php/webdav/{output_file_name}",
        "-w",
        "%{http_code}\n"
        # "-v"  # verbose
    ]

    # some arguments requires "" for curl to work
    cmd_temp = cmd.copy()
    cmd_temp[5] = f'"{cmd[5]}" \   \n    '
    cmd_temp[7] = f'"{cmd[7]}" \   \n    '
    cmd_temp[9] = f'"{cmd[9]}" \   \n    '
    cmd_temp[10] = f'"{cmd[10]}" \ \n    '
    cmd_str = " ".join(cmd_temp)
    log(f"==> cmd:\n{cmd_str}")
    return popen_communicate(cmd)
Ejemplo n.º 21
0
def login(user, password_path: Path, fname: str) -> None:
    if not user:
        log("E: Given user is empty string")
        terminate()

    if os.path.isfile(fname):
        f = open(fname, "rb")
        config.oc = pickle.load(f)
        try:
            status_str = (
                f"[bold]Login into owncloud from the dumped_object=[magenta]{fname}[/magenta] [yellow]...[/yellow] "
            )
            with cfg.console.status(status_str):
                config.oc.get_config()

            log(f" {status_str} {ok()}")
        except subprocess.CalledProcessError as e:
            logging.error(f"FAILED. {e.output.decode('utf-8').strip()}")
            _login(fname, user, password_path)
    else:
        _login(fname, user, password_path)
Ejemplo n.º 22
0
def is_initialized(path) -> bool:
    """Check whether given the path is initialized with git.

    __ https://stackoverflow.com/a/16925062/2402577
    """
    with cd(path):
        try:
            *_, output, err = popen_communicate(
                ["git", "rev-parse", "--is-inside-work-tree"])  # noqa
            if output == "true":
                #: checks is the give path top git folder
                git.Repo(".", search_parent_directories=False)
                return True
        except InvalidGitRepositoryError as e:
            log(f"warning: InvalidGitRepositoryError at path {e}")
            return False
        except Exception as e:
            log(f"warning: {e}")
            return False

        return output == "true"
Ejemplo n.º 23
0
def _login(fname, user, password_path) -> None:
    sleep_duration = 15
    config.oc = owncloud.Client("https://b2drop.eudat.eu/")
    with open(password_path, "r") as content_file:
        password = content_file.read().strip()

    for _ in range(config.RECONNECT_ATTEMPTS):
        try:
            status_str = f"Trying to login into owncloud user={user} ..."
            with cfg.console.status(status_str):
                # may take few minutes to connect
                config.oc.login(user, password)

            password = ""
            f = open(fname, "wb")
            pickle.dump(config.oc, f)
            f.close()
            log(f"  {status_str} {ok()}")
            return
        except Exception as e:
            log(str(e))
            if "Errno 110" in str(e) or "Connection timed out" in str(e):
                log(f"warning: sleeping for {sleep_duration} seconds to overcome the max retries that exceeded")
                sleep_timer(sleep_duration)
            else:
                terminate("Could not connect into [blue]eudat using config.oc.login()[/blue]")

    logging.error("E: user is None object")
    terminate()
Ejemplo n.º 24
0
def register_provider(price_core_min=1):
    """Register Provider"""
    ebb = config.ebb
    mine(1)
    web3.eth.defaultAccount = accounts[0]
    prices = [price_core_min, price_data_transfer, price_storage, price_cache]
    tx = config.ebb.registerProvider(
        GPG_FINGERPRINT,
        provider_email,
        fid,
        ipfs_address,
        available_core,
        prices,
        COMMITMENT_BLOCK_NUM,
        {"from": accounts[0]},
    )
    provider_registered_bn = tx.block_number
    log(
        f"block number when the provider is registered={provider_registered_bn}",
        "bold")
    gpg_fingerprint = remove_zeros_gpg_fingerprint(
        tx.events["LogProviderInfo"]["gpgFingerprint"])
    assert gpg_fingerprint == GPG_FINGERPRINT
    log(f"==> gpg_fingerprint={gpg_fingerprint}")
    orc_id = "0000-0001-7642-0442"
    orc_id_as_bytes = str.encode(orc_id)
    assert not ebb.isOrcIDVerified(
        accounts[0]), "orc_id initial value should be false"
    ebb.authenticateOrcID(accounts[0], orc_id_as_bytes, {"from": accounts[0]})
    assert ebb.isOrcIDVerified(accounts[0]), "isOrcIDVerified is failed"

    # orc_id should only set once for the same user
    with brownie.reverts():
        ebb.authenticateOrcID(accounts[0], orc_id_as_bytes,
                              {"from": accounts[0]})

    assert orc_id == ebb.getOrcID(accounts[0]).decode("utf-8").replace(
        "\x00", ""), "orc_id set false"
    return provider_registered_bn
Ejemplo n.º 25
0
def run():
    """Run ipfs daemon.

    cmd: ipfs daemon  # --mount
    __ https://stackoverflow.com/a/8375012/2402577
    __ https://gist.github.com/SomajitDey/25f2f7f2aae8ef722f77a7e9ea40cc7c#gistcomment-4022998
    """
    IPFS_BIN = "/usr/local/bin/ipfs"
    log("==> Running [green]IPFS[/green] daemon")
    if not os.path.isfile(config.env.IPFS_LOG):
        open(config.env.IPFS_LOG, "a").close()

    with daemon.DaemonContext():
        if cfg.IS_PRIVATE_IPFS:
            _env = {
                "LIBP2P_FORCE_PNET": "1",
                "IPFS_PATH": Path.home().joinpath(".ipfs")
            }
        else:
            _env = {"IPFS_PATH": Path.home().joinpath(".ipfs")}

        popen_communicate([IPFS_BIN, "daemon", "--routing=none"],
                          stdout_fn=config.env.IPFS_LOG,
                          _env=_env)
Ejemplo n.º 26
0
def submit(provider, requester, job, required_confs=1):
    try:
        tx_hash = _submit(provider, requester, job, required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(tx_receipt, errors=DISCARD)
                log(vars(processed_logs[0].args))
                try:
                    log(f"{ok()} [bold]job_index={processed_logs[0].args['index']}")
                except IndexError:
                    log(f"E: Tx({tx_hash}) is reverted")
        else:
            log(f"tx_hash={tx_hash}", "bold")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    return tx_hash
Ejemplo n.º 27
0
def share_folder(folder_to_share,
                 provider_to_share,
                 tmp_dir,
                 job_key_flag=False):
    log(f"## folder_to_share={folder_to_share}")
    log(f"## provider_to_share=[magenta]{provider_to_share}")
    key, *_, tar_hash, tar_hashes = upload(folder_to_share, tmp_dir,
                                           job_key_flag)
    cmd = [
        "gdrive", "share", key, "--role", "writer", "--type", "user",
        "--email", provider_to_share
    ]
    log(f"share_output=[magenta]{run(cmd)}", "bold")
    return key, tar_hash, tar_hashes
Ejemplo n.º 28
0
def upload_results(encoded_share_token, output_file_name, path, max_retries=1):
    """Implement wrapper for the _upload_results function."""
    with cd(path):
        for _ in range(max_retries):
            p, output, error = _upload_results(encoded_share_token, output_file_name)
            if error:
                log(error)

            if "warning: Couldn't read data from file" in error:
                raise Exception("E: EUDAT repository did not successfully uploaded")

            if p.returncode != 0 or "<d:error" in output:
                log("E: EUDAT repository did not successfully uploaded")
                log(f"   curl is failed. {p.returncode} => {br(error)} {output}")
                time.sleep(1)  # wait 1 second for next step retry to upload
            else:  # success on upload
                return True

        raise Exception(f"Upload results into cloud failed after {max_retries} tries")
Ejemplo n.º 29
0
def apply_patch(git_folder, patch_file, is_gpg=False):
    """Apply git patch.

    output = repo.git.apply("--reject", "--whitespace=fix",
               "--ignore-space-change", "--ignore-whitespace", "--verbose", patch_file)

    __ https://stackoverflow.com/a/15375869/2402577
    """
    if is_gpg:
        cfg.ipfs.decrypt_using_gpg(patch_file)

    with cd(git_folder):
        base_name = path_leaf(patch_file)
        log(f"==> [magenta]{base_name}")
        # folder_name = base_name_split[2]
        #
        # base_name_split = base_name.split("_")
        # git_hash = base_name_split[1]
        # run(["git", "checkout", git_hash])
        # run(["git", "reset", "--hard"])
        # run(["git", "clean", "-f"])
        # echo "\n" >> patch_file.txt seems like fixing it
        #
        # with open(patch_file, "a") as myfile:
        #     myfile.write("\n")
        cmd = [
            "git",
            "apply",
            "--reject",
            "--whitespace=fix",
            "--ignore-space-change",
            "--ignore-whitespace",
            "--verbose",
            patch_file,
        ]  # ,is_quiet=True,
        cmd_summary = cmd.copy()
        cmd_summary.insert(3, "--summary")
        output = run(cmd_summary)
        log(output)
        output = run(cmd)
        log(output)
Ejemplo n.º 30
0
#!/usr/bin/env python3

import sys

from web3.logs import DISCARD

from broker import cfg
from broker.utils import log

if __name__ == "__main__":
    if len(sys.argv) == 2:
        tx_hash = str(sys.argv[1])
        event = "LogJob"
    else:
        tx_hash = "0xe7f0bdc249458d36105120cf1a0fa5036a9368c5fd13aa37448dae5993d92a33"
        event = "LogReceipt"

    tx_receipt = cfg.w3.eth.get_transaction_receipt(tx_hash)
    if event == "LogJob":
        processed_logs = cfg.Ebb.eBlocBroker.events.LogJob().processReceipt(
            tx_receipt, errors=DISCARD)
        log(vars(processed_logs[0].args))
        log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-")

    if event == "LogReceipt":
        processed_logs = cfg.Ebb.eBlocBroker.events.LogReceipt(
        ).processReceipt(tx_receipt, errors=DISCARD)
        log(vars(processed_logs[0].args))
        log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-")