예제 #1
0
def get_data_key_ids(results_folder_prev) -> bool:
    filename = f"{results_folder_prev}/meta_data.json"
    log(f"==> meta_data_path={filename}")
    try:
        meta_data = read_json(filename)
    except Exception as e:
        print_tb(e)

    return meta_data
예제 #2
0
def connect_into_eblocbroker() -> None:
    """Connect into ebloc-broker contract in the given blockchain."""
    if config.ebb:
        return

    if not cfg.w3:
        connect_into_web3()

    if not env.EBLOCPATH:
        log("E: EBLOCPATH variable is empty")
        raise QuietExit

    try:
        abi_file = env.EBLOCPATH / "broker" / "eblocbroker_scripts" / "abi.json"
        abi = read_json(abi_file, is_dict=False)
    except Exception as e:
        raise Exception(
            f"E: could not read the abi.json file: {abi_file}") from e

    try:
        if env.IS_BLOXBERG:
            if not cfg.IS_BROWNIE_TEST:
                from brownie import network, project

                try:
                    network.connect("bloxberg")
                except Exception as e:
                    print_tb(e)
                    add_bloxberg_into_network_config.main()
                    # network.connect("bloxberg")
                    try:
                        log("warning: [green]bloxberg[/green] key is added into the "
                            "[magenta]~/.brownie/network-config.yaml[/magenta] yaml file. Please try again."
                            )
                        network.connect("bloxberg")
                    except KeyError:
                        sys.exit(1)

                project = project.load(env.CONTRACT_PROJECT_PATH)
                config.ebb = project.eBlocBroker.at(env.CONTRACT_ADDRESS)
                config.ebb.contract_address = cfg.w3.toChecksumAddress(
                    env.CONTRACT_ADDRESS)
                #: for the contract's events
                config._eBlocBroker = cfg.w3.eth.contract(env.CONTRACT_ADDRESS,
                                                          abi=abi)
        elif env.IS_EBLOCPOA:
            config.ebb = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=abi)
            config._eBlocBroker = config.ebb
            config.ebb.contract_address = cfg.w3.toChecksumAddress(
                env.CONTRACT_ADDRESS)
    except Exception as e:
        print_tb(e)
        raise e
예제 #3
0
def main():
    fn = env.LOG_PATH + "/" + "cachingRecord.json"
    print(fn)
    if not os.path.isfile(fn):
        data = {}
    else:
        try:
            data = read_json(fn)
        except:
            pass

    add_element(data, "jobKey", ["local", "userName", "timestamp", "keepTime"])
    add_element(data, "ipfs_hash", "timestamp")

    with open("data.json", "w") as outfile:
        json.dump(data, outfile)

    if "jobKey" in data:
        print(data["jobKey"][0])
        print(data["jobKey"])

    remove_element(data, "ipfs_hash")
    with open(fn, "w") as data_file:
        json.dump(data, data_file)
예제 #4
0
    def get_shared_tokens(self):
        with suppress(Exception):
            share_ids = read_json(
                f"{self.private_dir}/{self.job_key}_share_id.json")

        for source_code_hash in self.code_hashes_to_process:
            try:
                share_token = share_ids[source_code_hash]["share_token"]
                self.share_tokens[source_code_hash] = share_token
                self.encoded_share_tokens[source_code_hash] = base64.b64encode(
                    (f"{share_token}:").encode("utf-8")).decode("utf-8")
            except KeyError:
                try:
                    shared_id = Ebb.mongo_broker.find_shareid_item(
                        f"{self.job_key}_{self.requester_id_address[:16]}")
                    share_token = shared_id["share_token"]
                    self.share_tokens[source_code_hash] = share_token
                    self.encoded_share_tokens[
                        source_code_hash] = base64.b64encode((
                            f"{share_token}:").encode("utf-8")).decode("utf-8")
                except Exception as e:
                    log(f"E: share_id cannot be detected from key={self.job_key}"
                        )
                    raise e

        for key in share_ids:
            value = share_ids[key]
            try:
                encoded_value = self.encoded_share_tokens[key]
            except:
                _share_token = share_ids[key]["share_token"]
                encoded_value = base64.b64encode(
                    (f"{_share_token}:").encode("utf-8")).decode("utf-8")

            log(f"## shared_tokens: {key} => {value['share_token']} | encoded={encoded_value}"
                )
예제 #5
0
    def _sbatch_call(self) -> None:
        """Run sbatch on the cluster.

        * unshare: work fine for terminal programs.
        cmd: `unshare -r -n ping google.com`

        __ https://askubuntu.com/a/600996/660555
        """
        job_key = self.logged_job.args["jobKey"]
        index = self.logged_job.args["index"]
        source_code_idx = 0  # 0 indicated maps to source_sode
        main_cloud_storage_id = self.logged_job.args["cloudStorageID"][
            source_code_idx]
        job_info = self.job_infos[0]
        job_id = 0  # base job_id for them workflow
        job_block_number = self.logged_job.blockNumber
        date = (
            subprocess.
            check_output(  # cmd: date --date=1 seconds +%b %d %k:%M:%S %Y
                ["date", "--date=" + "1 seconds", "+%b %d %k:%M:%S %Y"],
                env={
                    "LANG": "en_us_88591"
                },
            ).decode("utf-8").strip())
        log(f" * {date} ", end="")
        write_to_file(self.results_folder_prev / "modified_date.txt", date)
        # cmd: echo date | date +%s
        p1 = subprocess.Popen(["echo", date], stdout=subprocess.PIPE)
        p2 = subprocess.Popen(["date", "+%s"],
                              stdin=p1.stdout,
                              stdout=subprocess.PIPE)
        p1.stdout.close()  # noqa
        timestamp = p2.communicate()[0].decode("utf-8").strip()
        log(f"timestamp={timestamp}, ", "bold", end="")
        write_to_file(self.results_folder_prev / "timestamp.txt", timestamp)
        log(f"job_received_block_number={job_block_number}", "bold")
        log("## Adding recevied job into the mongoDB database")
        self.Ebb.mongo_broker.add_item(
            job_key,
            self.index,
            self.code_hashes_str,
            self.requester_id,
            timestamp,
            main_cloud_storage_id,
            job_info,
        )
        # TODO: update as used_data_transfer_in value
        data_transfer_in_json = self.results_folder_prev / "data_transfer_in.json"
        try:
            data = read_json(data_transfer_in_json)
        except:
            data = {}
            data["data_transfer_in"] = self.data_transfer_in_to_download_mb
            with open(data_transfer_in_json, "w") as outfile:
                json.dump(data, outfile)

            time.sleep(0.25)

        # seperator character is *
        run_file = f"{self.results_folder}/run.sh"
        sbatch_file_path = self.results_folder / f"{job_key}~{index}~{job_block_number}.sh"  # separator(~)
        with open(f"{self.results_folder}/run_wrapper.sh", "w") as f:
            f.write("#!/bin/bash\n")
            f.write("#SBATCH -o slurm.out  # STDOUT\n")
            f.write("#SBATCH -e slurm.err  # STDERR\n")
            f.write("#SBATCH --mail-type=ALL\n\n")
            f.write(f"/usr/bin/unshare -r -n {run_file}")

        copyfile(f"{self.results_folder}/run_wrapper.sh", sbatch_file_path)
        job_core_num = str(job_info["core"][job_id])
        # client's requested seconds to run his/her job, 1 minute additional given
        execution_time_second = timedelta(
            seconds=int((job_info["run_time"][job_id] + 1) * 60))
        d = datetime(1, 1, 1) + execution_time_second
        time_limit = str(int(d.day) - 1) + "-" + str(d.hour) + ":" + str(
            d.minute)
        log(f"## time_limit={time_limit} | requested_core_num={job_core_num}")
        # give permission to user that will send jobs to Slurm
        subprocess.check_output(
            ["sudo", "chown", "-R", self.requester_id, self.results_folder])
        slurm_job_id = self.scontrol_update(job_core_num, sbatch_file_path,
                                            time_limit)
        if not slurm_job_id.isdigit():
            log("E: Detects an error on the sbatch. slurm_job_id is not a digit"
                )

        with suppress(Exception):
            _squeue()
예제 #6
0
    def run(self):
        try:
            data = read_json(
                f"{self.results_folder_prev}/data_transfer_in.json")
            self.data_transfer_in = data["data_transfer_in"]
            log(f"==> data_transfer_in={self.data_transfer_in} MB -> rounded={int(self.data_transfer_in)} MB"
                )
        except:
            log("E: data_transfer_in.json file does not exist")

        try:
            self.modified_date = read_file(
                f"{self.results_folder_prev}/modified_date.txt")
            log(f"==> modified_date={self.modified_date}")
        except:
            log("E: modified_date.txt file could not be read")

        self.requester_gpg_fingerprint = self.requester_info["gpg_fingerprint"]
        log("\njob_owner's info\n================", "bold green")
        log(f"==> email=[white]{self.requester_info['email']}")
        log(f"==> gpg_fingerprint={self.requester_gpg_fingerprint}")
        log(f"==> ipfs_id={self.requester_info['ipfs_id']}")
        log(f"==> f_id={self.requester_info['f_id']}")
        if self.job_info["stateCode"] == str(state.code["COMPLETED"]):
            self.get_job_info()
            log(":beer: job is already completed and its money is received",
                "bold green")
            raise QuietExit

        run_time = self.job_info["run_time"]
        log(f"==> requested_run_time={run_time[self.job_id]} minutes")
        try:
            if self.job_status_running_tx:
                Ebb._wait_for_transaction_receipt(self.job_status_running_tx)
            else:
                log("warning: job_status_running_tx is empty")

            self.get_job_info(is_log_print=False)  # re-fetch job info
            self.attemp_get_job_info()
        except Exception as e:
            print_tb(e)
            raise e

        log("## Received running job status successfully", "bold green")
        try:
            self.job_info = eblocbroker_function_call(
                lambda: Ebb.get_job_code_hashes(
                    env.PROVIDER_ID,
                    self.job_key,
                    self.index,
                    # self.job_id,
                    self.received_block_number,
                ),
                max_retries=10,
            )
        except Exception as e:
            print_tb(e)
            sys.exit(1)

        self.code_hashes = self.job_info["code_hashes"]
        self.set_code_hashes_to_process()
        self.sacct_result()
        self.end_time_stamp = slurm.get_job_end_time(self.slurm_job_id)
        self.elapsed_time = slurm.get_elapsed_time(self.slurm_job_id)
        if self.elapsed_time > int(run_time[self.job_id]):
            self.elapsed_time = run_time[self.job_id]

        log(f"finalized_elapsed_time={self.elapsed_time}", "green")
        _job_info = pprint.pformat(self.job_info)
        log("## job_info:", "bold magenta")
        log(_job_info, "bold")
        try:
            self.get_cloud_storage_class(0).initialize(self)
            self.upload_driver()
        except Exception as e:
            print_tb(e)
            sys.exit(1)

        data_transfer_sum = self.data_transfer_in + self.data_transfer_out
        log(f"==> data_transfer_in={self.data_transfer_in} MB -> rounded={int(self.data_transfer_in)} MB"
            )
        log(f"==> data_transfer_out={self.data_transfer_out} MB -> rounded={int(self.data_transfer_out)} MB"
            )
        log(f"==> data_transfer_sum={data_transfer_sum} MB -> rounded={int(data_transfer_sum)} MB"
            )
        tx_hash = self.process_payment_tx()
        time.sleep(1)
        self._get_tx_status(tx_hash)
        self.get_job_info()
        log("SUCCESS")
예제 #7
0
def submit(provider, _from, job):
    try:
        job.check_account_status(_from)
        job.Ebb.is_provider_valid(provider)
        job.Ebb.is_requester_valid(_from)
    except Exception as e:
        raise e

    try:
        provider_info = job.Ebb.get_provider_info(provider)
        log(f"==> Provider's available_core_num={provider_info['available_core_num']}"
            )
        log(f"==> Provider's price_core_min={provider_info['price_core_min']}")
    except Exception as e:
        raise QuietExit from e

    provider = job.Ebb.w3.toChecksumAddress(provider)
    provider_to_share = provider_info["email"]
    data_files_json_path = f"{job.tmp_dir}/meta_data.json"
    try:
        if len(job.folders_to_share) > 1:
            for folder_to_share in job.folders_to_share[1:]:
                if not isinstance(folder_to_share, bytes):
                    # starting from the first element ignoring source_folder
                    # attempting to share the data folder
                    job_key, tar_hash, job.tar_hashes = share_folder(
                        folder_to_share, provider_to_share, job.tmp_dir)
                    job.foldername_tar_hash[folder_to_share] = tar_hash
                    job.keys[tar_hash] = job_key

            if job.tmp_dir == "":
                print_tb("job.tmp_dir is empty")
                sys.exit()

            _dump_dict_to_file(data_files_json_path, job.keys)
            data_json = read_json(data_files_json_path)
            if data_json:
                log("## meta_data:")
                log(data_json)

            with suppress(Exception):
                data_json = read_json(data_files_json_path)
                if job.keys == data_json:
                    log(f"## meta_data.json file matches with the given data keys {ok()}"
                        )
                else:
                    log("warning: meta_data.json file does not match with the given data keys"
                        )

        folder_to_share = job.folders_to_share[0]
        if not isinstance(folder_to_share, bytes):
            job_key, tar_hash, job.tar_hashes = share_folder(folder_to_share,
                                                             provider_to_share,
                                                             job.tmp_dir,
                                                             job_key_flag=True)
            job.foldername_tar_hash[folder_to_share] = tar_hash
            # add an element to the beginning of the dict since Python
            # 3.7. dictionaries are now ordered by insertion order.
            job.keys_final[tar_hash] = job_key
            job.keys_final.update(job.keys)
            job.keys = job.keys_final

        return job
    except Exception as e:
        print_tb(e)
        raise e
    finally:
        _dump_dict_to_file(data_files_json_path, job.keys)
        data_json = read_json(data_files_json_path)
        if data_json:
            log("## meta_data:")
            log(data_json)

        _id = None
        for (*_, v) in data_json.items():
            _id = v
            break

        if _id:
            log("## updating meta_data ", end="")
            update_meta_data_gdrive(_id, data_files_json_path)
            log(ok())
예제 #8
0
    def eudat_get_share_token(self, f_id):
        """Check key is already shared or not."""
        folder_token_flag = {}
        if not os.path.isdir(self.private_dir):
            raise Exception(f"{self.private_dir} does not exist")

        share_id_file = f"{self.private_dir}/{self.job_key}_share_id.json"
        for idx, source_code_hash_text in enumerate(
                self.code_hashes_to_process):
            if self.cloudStorageID[idx] != StorageID.NONE:
                folder_name = source_code_hash_text
                self.folder_type_dict[folder_name] = None
                source_fn = f"{folder_name}/{folder_name}.tar.gz"
                if os.path.isdir(env.OWNCLOUD_PATH / f"{folder_name}"):
                    log(f"## eudat shared folder({folder_name}) is already accepted and "
                        "exists on the eudat's mounted folder")
                    if os.path.isfile(f"{env.OWNCLOUD_PATH}/{source_fn}"):
                        self.folder_type_dict[folder_name] = "tar.gz"
                    else:
                        self.folder_type_dict[folder_name] = "folder"

                try:
                    info = config.oc.file_info(f"/{source_fn}")
                    logging.info("shared folder is already accepted")
                    size = info.attributes["{DAV:}getcontentlength"]
                    folder_token_flag[folder_name] = True
                    log(f"==> index={br(idx)}: /{source_fn} => {size} bytes")
                except:
                    log(f"warning: shared_folder{br(source_code_hash_text, 'green')} is not accepted yet"
                        )
                    folder_token_flag[folder_name] = False

        try:  # TODO: add pass on template
            data = read_json(share_id_file)
            if isinstance(data, dict) and bool(data):
                self.share_id = data
        except:
            pass

        if self.share_id:
            log("==> share_id:")
            log(self.share_id, "bold")

        for share_key, value in self.share_id.items(
        ):  # there is only single item
            try:
                # TODO: if added before or some do nothing
                if Ebb.mongo_broker.add_item_share_id(share_key,
                                                      value["share_id"],
                                                      value["share_token"]):
                    # adding into mongoDB for future usage
                    log(f"#> [green]{share_key}[/green] is added into mongoDB {ok()}"
                        )
            except Exception as e:
                print_tb(e)
                log(f"E: {e}")

        for attempt in range(config.RECONNECT_ATTEMPTS):
            try:
                share_list = config.oc.list_open_remote_share()
                break
            except Exception as e:
                log(f"E: Failed to list_open_remote_share eudat [attempt={attempt}]"
                    )
                print_tb(e)
                time.sleep(1)
            else:
                break
        else:
            return False

        self.accept_flag = 0
        for idx, source_code_hash_text in enumerate(
                self.code_hashes_to_process):
            if self.cloudStorageID[idx] == StorageID.NONE:
                self.accept_flag += 1
            else:
                # folder should not be registered data on the provider
                #: search_token is priority
                if not self.search_token(f_id, share_list,
                                         source_code_hash_text):
                    try:
                        share_key = f"{source_code_hash_text}_{self.requester_id[:16]}"
                        shared_id = Ebb.mongo_broker.find_shareid_item(
                            key=share_key)
                        self.share_id[share_key] = {
                            "share_id": shared_id["share_id"],
                            "share_token": shared_id["share_token"],
                        }
                        self.accept_flag += 1
                    except Exception as e:
                        if "warning: " not in str(e):
                            log(f"E: {e}")
                        else:
                            log(str(e))

                        if folder_token_flag[folder_name] and bool(
                                self.share_id):
                            self.accept_flag += 1
                        else:
                            self.search_token(f_id, share_list, folder_name)

                if self.accept_flag is len(self.code_hashes):
                    break
        else:
            if self.accept_flag is len(self.code_hashes):
                logging.info("shared token already exists on mongoDB")
            # else:
            #     raise Exception(f"E: could not find a shared file. Found ones are:\n{self.share_id}")

        if bool(self.share_id):
            with open(share_id_file, "w") as f:
                json.dump(self.share_id, f)
        else:
            raise Exception("E: share_id is empty")