Beispiel #1
0
def get_job_code_hashes(self,
                        provider,
                        job_key,
                        index,
                        received_block_number=0):
    """code_hashes of the completed job is obtained from its event."""
    # job_info["received_block_number"]
    self.set_job_received_block_number(received_block_number)
    try:
        event_filter = self._eBlocBroker.events.LogJob.createFilter(
            argument_filters={"provider": str(provider)},
            fromBlock=int(self.job_info["received_block_number"]),
            toBlock=self.to_block,
        )
        for logged_job in event_filter.get_all_entries():
            if logged_job.args["jobKey"] == job_key and logged_job.args[
                    "index"] == int(index):
                self.job_info.update(
                    {"code_hashes": logged_job.args["sourceCodeHash"]})
                break

        return self.job_info
    except Exception as e:
        logging.error(f"E: Failed to run get_job_source_code_hash(): {e}")
        raise e
Beispiel #2
0
def _login(fname, user, password_path) -> None:
    sleep_duration = 15
    config.oc = owncloud.Client("https://b2drop.eudat.eu/")
    with open(password_path, "r") as content_file:
        password = content_file.read().strip()

    for _ in range(config.RECONNECT_ATTEMPTS):
        try:
            status_str = f"Trying to login into owncloud user={user} ..."
            with cfg.console.status(status_str):
                # may take few minutes to connect
                config.oc.login(user, password)

            password = ""
            f = open(fname, "wb")
            pickle.dump(config.oc, f)
            f.close()
            log(f"  {status_str} {ok()}")
            return
        except Exception as e:
            log(str(e))
            if "Errno 110" in str(e) or "Connection timed out" in str(e):
                log(f"warning: sleeping for {sleep_duration} seconds to overcome the max retries that exceeded")
                sleep_timer(sleep_duration)
            else:
                terminate("Could not connect into [blue]eudat using config.oc.login()[/blue]")

    logging.error("E: user is None object")
    terminate()
Beispiel #3
0
    def process_logged_job(self, idx):
        """Process logged job one by one."""
        self.received_block = []
        self.storage_duration = []
        wait_until_idle_core_available()
        self.is_provider_received_job = True
        console_ruler(idx, character="-")
        # sourceCodeHash = binascii.hexlify(logged_job.args['sourceCodeHash'][0]).decode("utf-8")[0:32]
        job_key = self.logged_job.args["jobKey"]
        index = self.logged_job.args["index"]
        self.job_block_number = self.logged_job["blockNumber"]
        self.cloud_storage_id = self.logged_job.args["cloudStorageID"]
        log(f"## job_key=[magenta]{job_key}[/magenta] | index={index}")
        log(
            f"received_block_number={self.job_block_number} \n"
            f"transactionHash={self.logged_job['transactionHash'].hex()} | "
            f"log_index={self.logged_job['logIndex']} \n"
            f"provider={self.logged_job.args['provider']} \n"
            f"received={self.logged_job.args['received']}",
            "bold yellow",
        )
        if self.logged_job["blockNumber"] > self.latest_block_number:
            self.latest_block_number = self.logged_job["blockNumber"]

        try:
            run([env.BASH_SCRIPTS_PATH / "is_str_valid.sh", job_key])
        except:
            logging.error("E: Filename contains an invalid character")
            return

        try:
            job_id = 0  # main job_id
            self.job_info = eblocbroker_function_call(
                partial(self.Ebb.get_job_info, env.PROVIDER_ID, job_key, index, job_id, self.job_block_number),
                max_retries=10,
            )
            cfg.Ebb.get_job_code_hashes(env.PROVIDER_ID, job_key, index, self.job_block_number)
            self.requester_id = self.job_info["job_owner"]
            self.job_info.update({"received_block": self.received_block})
            self.job_info.update({"storage_duration": self.storage_duration})
            self.job_info.update({"cacheType": self.logged_job.args["cacheType"]})
            cfg.Ebb.analyze_data(job_key, env.PROVIDER_ID)
            self.job_infos.append(self.job_info)
            log(f"==> requester={self.requester_id}")
            log("==> [yellow]job_info:", "bold")
            log(self.job_info)
        except Exception as e:
            print_tb(e)
            return

        for job in range(1, len(self.job_info["core"])):
            with suppress(Exception):
                self.job_infos.append(  # if workflow is given then add jobs into list
                    self.Ebb.get_job_info(env.PROVIDER_ID, job_key, index, job, self.job_block_number)
                )

        self.check_requested_job()
Beispiel #4
0
    def eudat_download_folder(self, results_folder_prev, folder_name):
        """Download corresponding folder from the EUDAT.

        Always assumes job is sent as .tar.gz file
        """
        # TODO: check hash of the downloaded file is correct or not
        cached_tar_file = f"{results_folder_prev}/{folder_name}.tar.gz"
        log("#> downloading [green]output.zip[/green] for:", end="")
        log(f"{folder_name} => {cached_tar_file} ", "bold")
        key = folder_name
        share_key = f"{folder_name}_{self.requester_id[:16]}"
        for attempt in range(1):
            try:
                log("## Trying [blue]wget[/blue] approach...")
                token = self.share_id[share_key]["share_token"]
                if token:
                    download_fn = f"{cached_tar_file.replace('.tar.gz', '')}_{self.requester_id}.download"
                    cmd = [
                        "wget",
                        "-O",
                        download_fn,
                        "-c",
                        f"https://b2drop.eudat.eu/s/{token}/download",
                        "-q",
                        "--show-progres",
                        "--progress=bar:force",
                    ]
                    log(" ".join(cmd), is_code=True, color="yellow")
                    run(cmd)
                    with cd(results_folder_prev):
                        run(["unzip", "-o", "-j", download_fn])

                    _remove(download_fn)
                    self.tar_downloaded_path[folder_name] = cached_tar_file
                    log(f"## download file from eudat {ok()}")
                    return
            except:
                log("E: Failed to download eudat file via wget.\nTrying config.oc.get_file() approach..."
                    )
                if config.oc.get_file(f"/{key}/{folder_name}.tar.gz",
                                      cached_tar_file):
                    self.tar_downloaded_path[folder_name] = cached_tar_file
                    log(ok())
                    return
                else:
                    logging.error(
                        f"E: Something is wrong, oc could not retrieve the file [attempt:{attempt}]"
                    )

        raise Exception("Eudat download error")
Beispiel #5
0
    def search_token(self,
                     f_id,
                     share_list,
                     folder_name,
                     is_silent=False) -> bool:
        """Search for the share_token from the shared folder."""
        share_key = f"{folder_name}_{self.requester_id[:16]}"
        if not is_silent:
            log(f"## searching share tokens for the related source_code_folder={folder_name}"
                )

        for idx in range(len(share_list) - 1, -1, -1):
            # starts iterating from last item to the first one
            input_folder_name = share_list[idx]["name"]
            input_folder_name = input_folder_name[
                1:]  # removes '/' at the beginning
            share_id = share_list[idx]["id"]
            # input_owner = share_list[i]['owner']
            input_user = f"{share_list[idx]['user']}@b2drop.eudat.eu"
            if input_folder_name == share_key and input_user == f_id:
                self.share_token = str(share_list[idx]["share_token"])
                self.share_id[share_key] = {
                    "share_id": int(share_id),
                    "share_token": self.share_token,
                }
                if Ebb.mongo_broker.add_item_share_id(share_key, share_id,
                                                      self.share_token):
                    # adding into mongoDB for future uses
                    log(f"#> Added into mongoDB {ok()}")
                else:
                    logging.error(
                        "E: Something is wrong, not added into mongoDB")

                log(f"==> name={folder_name} | share_id={share_id} | share_token={self.share_token} {ok()}"
                    )
                try:
                    config.oc.accept_remote_share(int(share_id))
                    log(f"## share_id={share_id} is accepted")
                except Exception as e:
                    print_tb(e)

                self.accept_flag += 1
                return True

        return False
Beispiel #6
0
def login(user, password_path: Path, fname: str) -> None:
    if not user:
        log("E: Given user is empty string")
        terminate()

    if os.path.isfile(fname):
        f = open(fname, "rb")
        config.oc = pickle.load(f)
        try:
            status_str = (
                f"[bold]Login into owncloud from the dumped_object=[magenta]{fname}[/magenta] [yellow]...[/yellow] "
            )
            with cfg.console.status(status_str):
                config.oc.get_config()

            log(f" {status_str} {ok()}")
        except subprocess.CalledProcessError as e:
            logging.error(f"FAILED. {e.output.decode('utf-8').strip()}")
            _login(fname, user, password_path)
    else:
        _login(fname, user, password_path)
Beispiel #7
0
    def add(self, path: str, is_hidden=False) -> str:
        """Add file or folder into ipfs.

        :param is_hidden: boolean if it is true hidden files/foders are included such as .git
        """
        if os.path.isdir(path):
            cmd = [
                "ipfs", "add", "-r", "--quieter", "--progress", "--offline",
                path
            ]
            if is_hidden:
                # include files that are hidden such as .git/.
                # Only takes effect on recursive add
                cmd.insert(3, "--hidden")
        elif os.path.isfile(path):
            cmd = ["ipfs", "add", "--quiet", "--progress", path]
        else:
            raise_error(f"E: Requested path {path} does not exist")

        for attempt in range(10):
            try:
                result_ipfs_hash = run_with_output(cmd)
                if not result_ipfs_hash and not self.is_valid(
                        result_ipfs_hash):
                    logging.error(
                        f"E: Generated new hash returned empty. Trying again. Try count: {attempt}"
                    )
                    time.sleep(5)
                elif not self.is_valid(result_ipfs_hash):
                    logging.error(
                        f"E: Generated new hash is not valid. Trying again. Try count: {attempt}"
                    )
                    time.sleep(5)

                break
            except:
                logging.error(
                    f"E: Generated new hash returned empty. Trying again. Try count: {attempt}"
                )
                time.sleep(5)
        else:
            raise Exception("Failed all the attempts to generate ipfs hash")

        return result_ipfs_hash
Beispiel #8
0
    def gpg_encrypt(self, user_gpg_finderprint, target):
        is_delete = False
        if os.path.isdir(target):
            try:
                *_, encrypt_target = compress_folder(target)
                encrypted_file_target = f"{encrypt_target}.gpg"
                is_delete = True
            except Exception as e:
                print_tb(e)
                sys.exit(1)
        else:
            if not os.path.isfile(target):
                logging.error(f"{target} does not exist")
                sys.exit(1)
            else:
                encrypt_target = target
                encrypted_file_target = f"{target}.gpg"
                is_delete = True

        if os.path.isfile(encrypted_file_target):
            log(f"## gpg_file: {encrypted_file_target} is already created")
            return encrypted_file_target

        for attempt in range(5):
            try:
                cmd = [
                    "gpg", "--keyserver", "hkps://keyserver.ubuntu.com",
                    "--recv-key", user_gpg_finderprint
                ]
                log(f"{br(attempt)} cmd: [magenta]{' '.join(cmd)}", "bold")
                run(
                    cmd
                )  # this may not work if it is requested too much in a short time
                break
            except Exception as e:
                log(f"warning: {e}")
                time.sleep(30)
        try:
            cmd = [
                "gpg",
                "--batch",
                "--yes",
                "--recipient",
                user_gpg_finderprint,
                "--trust-model",
                "always",
                "--output",
                encrypted_file_target,
                "--encrypt",
                encrypt_target,
            ]
            run(cmd)
            log(f"==> gpg_file=[magenta]{encrypted_file_target}")
            return encrypted_file_target
        except Exception as e:
            print_tb(e)
            if "encryption failed: Unusable public key" in str(e):
                log("#> Check solution: https://stackoverflow.com/a/34132924/2402577"
                    )
        finally:
            if is_delete:
                _remove(encrypt_target)
Beispiel #9
0
 def check_run_sh(self) -> bool:
     if not os.path.isfile(self.run_path):
         logging.error(f"E: {self.run_path} file does not exist")
         return False
     return True
Beispiel #10
0
def is_dir(path) -> bool:
    if not os.path.isdir(path):
        logging.error(f"{path} folder does not exist")
        return False

    return True
Beispiel #11
0
        cores = sys.argv[6]  # type: List[str]  # noqa
        elapsed_time = sys.argv[7]  # type: List[str]  # noqa
    else:
        provider = Ebb.w3.toChecksumAddress(env.PROVIDER_ID)
        _from = Ebb.w3.toChecksumAddress(env.PROVIDER_ID)
        job_key = "QmXFVGtxUBLfR2cYPNQtUjRxMv93yzUdej6kYwV1fqUD3U"
        index = 0
        job_id = 0
        cores = ["1"]
        elapsed_time = ["5"]

    try:
        tx_hash = Ebb.refund(provider, _from, job_key, index, job_id, cores,
                             elapsed_time)
        receipt = get_tx_status(tx_hash)
        if receipt["status"] == 1:
            processed_logs = Ebb._eBlocBroker.events.LogRefundRequest(
            ).processReceipt(receipt, errors=DISCARD)
            log(vars(processed_logs[0].args))
            try:
                logging.info(
                    f"refunded_wei={processed_logs[0].args['refundedWei']}")
                log("SUCCESS", "green")
            except Exception as e:
                logging.error(f"E: Transaction is reverted. {e}")
    except QuietExit:
        pass
    except Exception as e:
        print_tb(e)
        sys.exit(1)