Beispiel #1
0
def set_folder_permission(path, user_name, slurm_user):
    # block others and people in the same group to do read/write/execute
    run(["sudo", "chmod", "700", path])

    # give Read/Write/Execute access to USER on the give folder
    give_rwe_access(user_name, path)

    # give Read/Write/Execute access to root user on the give folder
    give_rwe_access(slurm_user, path)

    # Inserting user into the eblocbroker group
    # cmd: sudo usermod -a -G eblocbroker ebdf86b0ad4765fda68158489cec9908
    run(["sudo", "usermod", "-L", "-a", "-G", "eblocbroker", user_name])
Beispiel #2
0
    def eudat_download_folder(self, results_folder_prev, folder_name):
        """Download corresponding folder from the EUDAT.

        Always assumes job is sent as .tar.gz file
        """
        # TODO: check hash of the downloaded file is correct or not
        cached_tar_file = f"{results_folder_prev}/{folder_name}.tar.gz"
        log("#> downloading [green]output.zip[/green] for:", end="")
        log(f"{folder_name} => {cached_tar_file} ", "bold")
        key = folder_name
        share_key = f"{folder_name}_{self.requester_id[:16]}"
        for attempt in range(1):
            try:
                log("## Trying [blue]wget[/blue] approach...")
                token = self.share_id[share_key]["share_token"]
                if token:
                    download_fn = f"{cached_tar_file.replace('.tar.gz', '')}_{self.requester_id}.download"
                    cmd = [
                        "wget",
                        "-O",
                        download_fn,
                        "-c",
                        f"https://b2drop.eudat.eu/s/{token}/download",
                        "-q",
                        "--show-progres",
                        "--progress=bar:force",
                    ]
                    log(" ".join(cmd), is_code=True, color="yellow")
                    run(cmd)
                    with cd(results_folder_prev):
                        run(["unzip", "-o", "-j", download_fn])

                    _remove(download_fn)
                    self.tar_downloaded_path[folder_name] = cached_tar_file
                    log(f"## download file from eudat {ok()}")
                    return
            except:
                log("E: Failed to download eudat file via wget.\nTrying config.oc.get_file() approach..."
                    )
                if config.oc.get_file(f"/{key}/{folder_name}.tar.gz",
                                      cached_tar_file):
                    self.tar_downloaded_path[folder_name] = cached_tar_file
                    log(ok())
                    return
                else:
                    logging.error(
                        f"E: Something is wrong, oc could not retrieve the file [attempt:{attempt}]"
                    )

        raise Exception("Eudat download error")
Beispiel #3
0
    def remove_source_code(self):
        """Client's initial downloaded files are removed."""
        timestamp_file = f"{self.results_folder_prev}/timestamp.txt"
        try:
            cmd = [
                "find", self.results_folder, "-type", "f", "!", "-newer",
                timestamp_file
            ]
            files_to_remove = run(cmd)
            if files_to_remove:
                log(f"## Files to be removed: \n{files_to_remove}\n")
        except Exception as e:
            print_tb(e)
            sys.exit()

        run([
            "find", self.results_folder, "-type", "f", "!", "-newer",
            timestamp_file, "-delete"
        ])
Beispiel #4
0
def remove_user(user_name, user_dir=None):
    """Remove user from Slurm.

    # for test purposes
    sudo userdel $USERNAME
    sudo rm -rf $BASEDIR/$USERNAME
    sacctmgr remove user where user=$USERNAME --immediate
    """
    run(["sudo", "userdel", "--force", user_name])
    cmd = [
        "sacctmgr", "remove", "user", "where", f"user={user_name}",
        "--immediate"
    ]
    p, output, *_ = popen_communicate(cmd)
    if p.returncode != 0 and "Nothing deleted" not in output:
        raise Exception(f"E: sacctmgr remove error: {output}")

    # remove_user(user)
    if user_dir:
        _remove(user_dir)
Beispiel #5
0
def is_oc_mounted() -> bool:
    output = None
    try:
        output = run(["findmnt", "--noheadings", "-lo", "source", env.OWNCLOUD_PATH])
    except:
        return False

    if "b2drop.eudat.eu/remote.php/webdav/" not in output:
        print(
            "Mount a folder in order to access EUDAT(https://b2drop.eudat.eu/remote.php/webdav/).\n"
            "Please do: \n"
            "sudo mkdir -p /oc \n"
            "sudo mount.davfs https://b2drop.eudat.eu/remote.php/webdav/ /oc"
        )
        return False
    else:
        return True
Beispiel #6
0
def user_add(user_address, basedir, slurm_user):
    user_address = user_address.lower()
    log(f"#> adding user=[magenta]{user_address}[/magenta]", end="")
    try:  # convert ethereum user address into 32-bits
        user_name = hashlib.md5(user_address.encode("utf-8")).hexdigest()
        log(f" | user_name={user_name}", "bold")
    except Exception as e:
        log()
        log(f"warning: user_address={user_address}")
        raise e

    user_dir = f"{basedir}/{user_name}"
    add_user_to_slurm(user_name)
    if username_check(user_name):
        run([
            "sudo", "useradd", "-d", user_dir, "-m", user_name, "--shell",
            "/bin/bash"
        ])
        log(f"{user_address} => {user_name}) is added as user", "yellow")
        try:
            set_folder_permission(user_dir, user_name, slurm_user)
            add_user_to_slurm(user_name)
            mkdir(f"{user_dir}/cache")
        except:
            run(["sudo", "userdel", "--force", user_name])
    else:
        if not os.path.isdir(user_dir):
            log(
                f"{user_address} => {user_name} does not exist. Attempting to read the user",
                "bold yellow")
            run(["sudo", "userdel", "--force", user_name])
            run(["sudo", "useradd", "-d", user_dir, "-m", user_name])
            set_folder_permission(user_dir, user_name, slurm_user)
            log(f"{user_address} => {user_name} is created", "yellow")
            add_user_to_slurm(user_name)  # force to add user to slurm
            mkdir(f"{user_dir}/cache")
        else:
            log(f"{user_address} => {user_name} has already been created",
                "bold yellow")
Beispiel #7
0
def _list(tar_hash, is_folder=False):
    r"""Query list from gdrive.

     cmd: run(['gdrive', 'list', '--query', 'name contains \'' + tar_hash + '.tar.gz' + '\'', '--no-header'])
    __https://developers.google.com/drive/api/v3/reference/query-ref
    """
    if is_folder:
        filename = f"name='{tar_hash}'"
    else:
        filename = f"name='{tar_hash}.tar.gz'"

    output = run([
        "gdrive",
        "list",
        "--query",
        f"{filename} and trashed=false",
        "--no-header",
    ])

    return output
Beispiel #8
0
    def get_data(self, key, _id, is_job_key=False):
        try:
            mime_type, name = self.get_data_init(key, _id, is_job_key)
        except Exception as e:
            print_tb(e)
            raise e

        if is_job_key:
            gdrive_info = self.pre_data_check(key)
            name = gdrive.get_file_info(gdrive_info, "Name")
            mime_type = gdrive.get_file_info(gdrive_info, "Mime")

        # folder is already stored by its source_code_hash
        source_code_hash = name.replace(".tar.gz", "")
        log(f"==> name={name}")
        log(f"==> mime_type=[magenta]{mime_type}")
        if _id == 0:
            # source code folder, ignore downloading result-*
            name = f"{name}.tar.gz"
            try:
                output = gdrive.get_file_id(key)
            except Exception as e:
                print_tb(e)
                raise e

            key = echo_grep_awk(output, name, "1")
            mime_type = "gzip"

        if "gzip" in mime_type:
            try:
                cmd = ["gdrive", "info", "--bytes", key, "-c", env.GDRIVE_METADATA]
                gdrive_info = subprocess_call(cmd, 10)
            except Exception as e:
                print_tb(e)
                raise e

            source_code_hash = gdrive.get_file_info(gdrive_info, "Md5sum")
            self.md5sum_dict[key] = source_code_hash
            log(f"==> md5sum={self.md5sum_dict[key]}")

            # recieved job is in folder tar.gz
            self.folder_type_dict[source_code_hash] = "gzip"
            try:
                self.cache(_id, name, source_code_hash, key, is_job_key)
            except Exception as e:
                print_tb(e)
                raise e

            if is_job_key:
                target = self.results_folder
            else:
                target = f"{self.results_data_folder}/{source_code_hash}"
                mkdir(target)

            try:
                cache_folder = self.folder_path_to_download[source_code_hash]
                untar(f"{cache_folder}/{name}", target)
                return target
            except Exception as e:
                print_tb(e)
                raise e

            self.remove_downloaded_file(source_code_hash, _id, f"{cache_folder}/{name}")
        elif "folder" in mime_type:
            #: received job is in folder format
            self.folder_type_dict[source_code_hash] = "folder"
            try:
                self.cache(_id, name, source_code_hash, key, is_job_key)
            except Exception as e:
                raise e

            cache_folder = self.folder_path_to_download[source_code_hash]
            cmd = [
                "rsync",
                "-avq",
                "--partial-dir",
                "--omit-dir-times",
                f"{cache_folder}/{name}/",
                self.results_folder,
            ]
            try:
                output = run(cmd)
            except Exception as e:
                print_tb(e)
                raise e

            self.remove_downloaded_file(source_code_hash, _id, f"{cache_folder}/{name}/")
            tar_file = f"{self.results_folder}/{name}.tar.gz"
            try:
                untar(tar_file, self.results_folder)
                _remove(tar_file)
                return target
            except Exception as e:
                print_tb(e)
                raise e
        else:
            raise Exception("Neither folder or gzip type given.")
Beispiel #9
0
#!/usr/bin/env python3

import time
from contextlib import suppress

from broker import cfg
from broker.config import env
from broker.lib import run

if __name__ == "__main__":
    run(["python3", f"{env.EBLOCPATH}/_daemons/ganache_cli.py"])
    for count in range(10):
        time.sleep(2)
        with suppress(Exception):
            print(cfg.ipfs.connect_to_bootstrap_node())
            break
Beispiel #10
0
def give_rwe_access(user, path):
    """Give Read/Write/Execute access to the given user on the give folder."""
    run(["sudo", "setfacl", "-R", "-m", f"user:{user}:rwx", path])
Beispiel #11
0
def submit_ipfs(job: Job, is_pass=False, required_confs=1):
    Ebb = cfg.Ebb
    requester = Ebb.w3.toChecksumAddress(job.requester_addr)
    provider = Ebb.w3.toChecksumAddress(job.provider_addr)
    pre_check(job, requester)
    log("==> Attemptting to submit a job")
    main_storage_id = job.storage_ids[0]
    job.folders_to_share = job.paths
    check_link_folders(job.data_paths, job.registered_data_files, is_pass=is_pass)
    if main_storage_id == StorageID.IPFS:
        log("==> Submitting source code through [blue]IPFS[/blue]")
    elif main_storage_id == StorageID.IPFS_GPG:
        log("==> Submitting source code through [blue]IPFS_GPG[/blue]")
    else:
        log("E: Please provide IPFS or IPFS_GPG storage type for the source code")
        sys.exit(1)

    targets = []
    try:
        provider_info = Ebb.get_provider_info(provider)
    except Exception as e:
        print_tb(e)
        sys.exit(1)

    for idx, folder in enumerate(job.folders_to_share):
        if isinstance(folder, Path):
            target = folder
            if job.storage_ids[idx] == StorageID.IPFS_GPG:
                provider_gpg_finderprint = provider_info["gpg_fingerprint"]
                if not provider_gpg_finderprint:
                    log("E: Provider did not register any GPG fingerprint")
                    sys.exit(1)

                log(f"==> provider_gpg_finderprint={provider_gpg_finderprint}")
                try:
                    # target is updated
                    target = cfg.ipfs.gpg_encrypt(provider_gpg_finderprint, target)
                    log(f"==> gpg_file={target}")
                except Exception as e:
                    print_tb(e)
                    sys.exit(1)

            try:
                ipfs_hash = cfg.ipfs.add(target)
                # ipfs_hash = ipfs.add(folder, True)  # True includes .git/
                run(["ipfs", "refs", ipfs_hash])
            except Exception as e:
                print_tb(e)
                sys.exit(1)

            if idx == 0:
                key = ipfs_hash

            job.code_hashes.append(ipfs_to_bytes32(ipfs_hash))
            job.code_hashes_str.append(ipfs_hash)
            log(f"==> ipfs_hash={ipfs_hash} | md5sum={generate_md5sum(target)}")
            if main_storage_id == StorageID.IPFS_GPG:
                # created gpg file will be removed since its already in ipfs
                targets.append(target)
        else:
            code_hash = folder
            if isinstance(code_hash, bytes):
                job.code_hashes.append(code_hash)
                job.code_hashes_str.append(code_hash.decode("utf-8"))

            # TODO: if its ipfs
            # if isinstance(code_hash, bytes):
            #     code_hash = code_hash.decode("utf-8")

            # if len(code_hash) == 32:
            #     value = cfg.w3.toBytes(text=code_hash)
            #     job.code_hashes.append(value)
            #     job.code_hashes_str.append(value.decode("utf-8"))
            # else:
            #     job.code_hashes.append(ipfs_to_bytes32(code_hash))
            #     job.code_hashes_str.append(code_hash)

        # if idx != len(job.folders_to_share) - 1:
        #     log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-", "cyan")

    # requester inputs for testing purpose
    job.price, *_ = job.cost(provider, requester)
    try:
        tx_hash = Ebb.submit_job(provider, key, job, requester=requester, required_confs=required_confs)
        if required_confs >= 1:
            tx_receipt = get_tx_status(tx_hash)
            if tx_receipt["status"] == 1:
                processed_logs = Ebb._eBlocBroker.events.LogJob().processReceipt(tx_receipt, errors=DISCARD)
                try:
                    if processed_logs:
                        log("job_info:", "bold yellow")
                        log(vars(processed_logs[0].args))

                    for target in targets:
                        if ".tar.gz.gpg" in str(target):
                            _remove(target)
                except IndexError:
                    log(f"E: Tx={tx_hash} is reverted")
        else:
            pass
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)

    return tx_hash
Beispiel #12
0
def update_meta_data_gdrive(key, path):
    output = get_file_id(key)
    meta_data_key = fetch_grive_output(output, "meta_data.json")
    log(f"`gdrive update {meta_data_key} {path}` ", end="")
    run(["gdrive", "update", meta_data_key, path])
Beispiel #13
0
def get_file_id(key):
    return run(
        ["gdrive", "list", "--query", f"'{key}' in parents and trashed=false"])
Beispiel #14
0
def check_user(_user):
    output = run(["gdrive", "about"])
    user = output.partition("\n")[0].split(", ")[1]
    return user == _user, user