def sent_job_to_storage_class(self): """Submit job's information into related thread.""" user_add(self.requester_id, env.PROGRAM_PATH, env.SLURMUSER) requester_md5_id = eth_address_to_md5(self.requester_id) slurm.pending_jobs_check() main_cloud_storage_id = self.logged_job.args["cloudStorageID"][0] kwargs = { "logged_job": self.logged_job, "job_infos": self.job_infos, "requester_id": requester_md5_id, "is_cached": self.is_cached, } if main_cloud_storage_id in (StorageID.IPFS, StorageID.IPFS_GPG): storage_class = IpfsClass(**kwargs) elif main_cloud_storage_id == StorageID.EUDAT: if not config.oc: try: eudat.login(env.OC_USER, f"{env.LOG_PATH}/.eudat_client.txt", env.OC_CLIENT) except Exception as e: print_tb(e) sys.exit(1) storage_class = EudatClass(**kwargs) elif main_cloud_storage_id == StorageID.GDRIVE: storage_class = GdriveClass(**kwargs) # run_storage_process(storage_class) if cfg.IS_THREADING_ENABLED: run_storage_thread(storage_class) else: storage_class.run()
def timeout_wrapper(self, method, *args): for _ in range(self.max_retries): self.ops = { "from": self._from, "gas": self.gas, "gas_price": f"{self.gas_price} gwei", "allow_revert": True, "required_confs": self.required_confs, } try: return self.timeout(method, *args) except ValueError as e: log(f"E: Tx: {e}") if "Execution reverted" in str(e) or "Insufficient funds" in str(e): print_tb(e) raise QuietExit from e if "Request has been rejected because of queue limit" in str(e): if self.ops["allow_revert"]: self.ops["allow_revert"] = False try: return self.timeout(method, *args) except Exception as e1: log(str(e1), is_align=True) raise QuietExit from e1 raise QuietExit from e if "Transaction cost exceeds current gas limit" in str(e): self.gas -= 10000 except KeyboardInterrupt: log("warning: Timeout Awaiting Transaction in the mempool") self.gas_price *= 1.13
def appy_patch(base_dir, patch_fn): r"""Apply path file. cmd: perl -pe 's/\x1b.*?[mGKH]//g' alper.patch > good.patch """ patch_file = f"{base_dir}/{patch_fn}" base_name = patch_fn.replace(".gz", "") diff_file_name = f"{base_dir}/{base_name}" if not os.path.isfile(diff_file_name): if not os.path.isfile(patch_file): print(f"E: {patch_file} file does not exist") sys.exit(1) if patch_file.endswith(".diff.gz"): extract_gzip(patch_file) else: log(f"==> [magenta]{diff_file_name}[/magenta] exists") try: git.apply_patch(base_dir, patch_file.replace(".gz", ""), is_gpg=False) except Exception as e: print_tb(e) try: good_patch = f"{base_dir}/good.patch" sep = "~" popen_communicate( [ "perl", "-pe", "s/\x1b.*?[mGKH]//g", str(Path(patch_file)).replace(f"{sep}", f"\{sep}") ], stdout_fn=good_patch, ) git.apply_patch(base_dir, good_patch, is_gpg=False) except Exception as e1: print_tb(e1)
def check_account_status(self, _from): try: if isinstance(_from, int): _from = self.Ebb.account_id_to_address(_from) if not env.IS_BLOXBERG and is_geth_account_locked(_from): log(f"E: Account({_from}) is locked") raise QuietExit if not self.Ebb.does_requester_exist(_from): log(f"E: Requester's Ethereum address {_from} is not registered" ) sys.exit(1) *_, orcid = self.Ebb.get_requester_info(_from) if not self.Ebb.is_orcid_verified(_from): if orcid != empty_bytes32: log(f"E: Requester({_from})'s orcid: {orcid.decode('UTF')} is not verified" ) else: log(f"E: Requester({_from})'s orcid is not registered") raise QuietExit except QuietExit: sys.exit(1) except: print_tb() sys.exit(1)
def swarm_connect(self, ipfs_id: str): """Swarm connect into the ipfs node.""" if not is_ipfs_on(): raise IpfsNotConnected # TODO: check is valid IPFS id try: log(f" * trying to connect into {ipfs_id}") cmd = ["/usr/local/bin/ipfs", "swarm", "connect", ipfs_id] p, output, e = popen_communicate(cmd) if p.returncode != 0: log() e = e.replace("[/", "/").replace("]", "").replace("e: ", "").rstrip() if "failure: dial to self attempted" in e: log(f"E: {e}") if not cfg.IS_FULL_TEST and not question_yes_no( "#> Would you like to continue?"): raise QuietExit else: log("E: connection into provider's IPFS node via swarm is not accomplished" ) raise Exception(e) else: log(f"{output} {ok()}") except Exception as e: print_tb(e) raise e
def update_job_cores(self, provider, job_key, index=0, received_bn=0) -> int: """Update job cores.""" self.set_job_received_block_number(received_bn) try: event_filter = self._eBlocBroker.events.LogJob.createFilter( argument_filters={"provider": str(provider)}, fromBlock=int(self.job_info["received_block_number"]), toBlock=self.to_block, ) for logged_job in event_filter.get_all_entries(): if logged_job.args["jobKey"] == job_key and logged_job.args[ "index"] == int(index): self.job_info["received_block_number"] = received_bn = int( logged_job["blockNumber"]) self.job_info.update({"core": logged_job.args["core"]}) self.job_info.update({"run_time": logged_job.args["runTime"]}) self.job_info.update( {"cloudStorageID": logged_job.args["cloudStorageID"]}) self.job_info.update( {"cacheType": logged_job.args["cacheType"]}) break else: log(f"E: failed to find job({job_key}) to update") return received_bn except Exception as e: print_tb(f"E: Failed to update_job_cores.\n{e}") raise e
def get_requester_info(self, requester): """Return requester information.""" try: requester = self.w3.toChecksumAddress(requester) if not self.does_requester_exist(requester): log( f"E: Requester({requester}) is not registered.\n" "Please try again with registered Ethereum Address as requester. \n" "You can register your requester using: [blue]./broker/eblocbroker_scripts/register_requester.py", ) raise QuietExit block_read_from, orc_id = self._get_requester_info(requester) event_filter = self._eBlocBroker.events.LogRequester.createFilter( fromBlock=int(block_read_from), toBlock=int(block_read_from) + 1 ) gpg_fingerprint = event_filter.get_all_entries()[0].args["gpgFingerprint"].rstrip(b"\x00").hex()[24:].upper() requester_info = { "address": requester.lower(), "block_read_from": block_read_from, "email": event_filter.get_all_entries()[0].args["email"], "gpg_fingerprint": gpg_fingerprint, "ipfs_id": event_filter.get_all_entries()[0].args["ipfsID"], "f_id": event_filter.get_all_entries()[0].args["fID"], "is_orcid_verified": self.is_orcid_verified(requester), } if not is_byte_str_zero(orc_id): requester_info["orc_id"] = orc_id.decode("utf-8").replace("\x00", "") return requester_info except Exception as e: print_tb(e) raise e
def _main(given_bn): lock = None try: is_driver_on(process_count=1, is_print=False) try: lock = zc.lockfile.LockFile(env.DRIVER_LOCKFILE, content_template=str(pid)) except PermissionError: print_tb("E: PermissionError is generated for the locked file") give_rwe_access(env.WHOAMI, "/tmp/run") lock = zc.lockfile.LockFile(env.DRIVER_LOCKFILE, content_template=str(pid)) run_driver(given_bn) except HandlerException: pass except QuietExit as e: log(e, is_err=True) except zc.lockfile.LockError: log(f"E: Driver cannot lock the file {env.DRIVER_LOCKFILE}, the pid file is in use") except Terminate as e: terminate(str(e), lock) except Exception as e: print_tb(e) breakpoint() # DEBUG: end of program pressed CTRL-c finally: with suppress(Exception): if lock: lock.close()
def main(): try: test_1() test_2() test_3() test_4() except Exception as e: print_tb(e)
def balance(): from broker._utils._log import log try: balance = cfg.Ebb.get_balance(args.eth_address) log(f"## balance={balance}") except Exception as e: print_tb(e)
def main(): owner_address = Ebb.get_owner() for user in users: try: tx_hash = Ebb.authenticate_orc_id(user, "0000-0001-7642-0552", owner_address) if tx_hash: get_tx_status(tx_hash) except Exception as e: print_tb(e)
def process_logged_job(self, idx): """Process logged job one by one.""" self.received_block = [] self.storage_duration = [] wait_until_idle_core_available() self.is_provider_received_job = True console_ruler(idx, character="-") # sourceCodeHash = binascii.hexlify(logged_job.args['sourceCodeHash'][0]).decode("utf-8")[0:32] job_key = self.logged_job.args["jobKey"] index = self.logged_job.args["index"] self.job_block_number = self.logged_job["blockNumber"] self.cloud_storage_id = self.logged_job.args["cloudStorageID"] log(f"## job_key=[magenta]{job_key}[/magenta] | index={index}") log( f"received_block_number={self.job_block_number} \n" f"transactionHash={self.logged_job['transactionHash'].hex()} | " f"log_index={self.logged_job['logIndex']} \n" f"provider={self.logged_job.args['provider']} \n" f"received={self.logged_job.args['received']}", "bold yellow", ) if self.logged_job["blockNumber"] > self.latest_block_number: self.latest_block_number = self.logged_job["blockNumber"] try: run([env.BASH_SCRIPTS_PATH / "is_str_valid.sh", job_key]) except: logging.error("E: Filename contains an invalid character") return try: job_id = 0 # main job_id self.job_info = eblocbroker_function_call( partial(self.Ebb.get_job_info, env.PROVIDER_ID, job_key, index, job_id, self.job_block_number), max_retries=10, ) cfg.Ebb.get_job_code_hashes(env.PROVIDER_ID, job_key, index, self.job_block_number) self.requester_id = self.job_info["job_owner"] self.job_info.update({"received_block": self.received_block}) self.job_info.update({"storage_duration": self.storage_duration}) self.job_info.update({"cacheType": self.logged_job.args["cacheType"]}) cfg.Ebb.analyze_data(job_key, env.PROVIDER_ID) self.job_infos.append(self.job_info) log(f"==> requester={self.requester_id}") log("==> [yellow]job_info:", "bold") log(self.job_info) except Exception as e: print_tb(e) return for job in range(1, len(self.job_info["core"])): with suppress(Exception): self.job_infos.append( # if workflow is given then add jobs into list self.Ebb.get_job_info(env.PROVIDER_ID, job_key, index, job, self.job_block_number) ) self.check_requested_job()
def submit(): from broker.submit_base import SubmitBase try: base = SubmitBase(args.path) base.submit() except QuietExit: pass except Exception as e: print_tb(e)
def main(): # noqa try: globals()[args.command]() except KeyError: print( f"ebloc-broker v{cfg.__version__} - Blockchain based autonomous computational resource broker\n" ) parser.print_help() except Exception as e: print_tb(e)
def list_registered_data_files(eth_address): """Return registered data files of the given provider. :param str eth_address: Ethereum address of the provider """ t1.join() try: cfg.Ebb.get_data_info(eth_address) except Exception as e: print_tb(e)
def eblocbroker_function_call(func, max_retries): for _ in range(max_retries): try: return func() except Web3NotConnected: time.sleep(1) except Exception as e: print_tb(e) raise e raise Exception("E: eblocbroker_function_call completed all the attempts.")
def read_file(fname): try: file = open(fname, "r") return file.read().rstrip() except IOError as e: print_tb(e) raise e else: # else clause instead of finally for things that # only happen if there was no exception file.close()
def register_provider(yaml_fn): """Return provider info. :param str yaml_fn: Full file path of Yaml file that contains the provider info """ t1.join() try: register_provider_wrapper(yaml_fn) except QuietExit: pass except Exception as e: print_tb(e)
def connect_into_eblocbroker() -> None: """Connect into ebloc-broker contract in the given blockchain.""" if config.ebb: return if not cfg.w3: connect_into_web3() if not env.EBLOCPATH: log("E: EBLOCPATH variable is empty") raise QuietExit try: abi_file = env.EBLOCPATH / "broker" / "eblocbroker_scripts" / "abi.json" abi = read_json(abi_file, is_dict=False) except Exception as e: raise Exception( f"E: could not read the abi.json file: {abi_file}") from e try: if env.IS_BLOXBERG: if not cfg.IS_BROWNIE_TEST: from brownie import network, project try: network.connect("bloxberg") except Exception as e: print_tb(e) add_bloxberg_into_network_config.main() # network.connect("bloxberg") try: log("warning: [green]bloxberg[/green] key is added into the " "[magenta]~/.brownie/network-config.yaml[/magenta] yaml file. Please try again." ) network.connect("bloxberg") except KeyError: sys.exit(1) project = project.load(env.CONTRACT_PROJECT_PATH) config.ebb = project.eBlocBroker.at(env.CONTRACT_ADDRESS) config.ebb.contract_address = cfg.w3.toChecksumAddress( env.CONTRACT_ADDRESS) #: for the contract's events config._eBlocBroker = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=abi) elif env.IS_EBLOCPOA: config.ebb = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=abi) config._eBlocBroker = config.ebb config.ebb.contract_address = cfg.w3.toChecksumAddress( env.CONTRACT_ADDRESS) except Exception as e: print_tb(e) raise e
def check(self): try: assert len(self.cores) == len(self.run_time) assert len(self.code_hashes) == len(self.storage_hours) assert len(self.storage_hours) == len(self.storage_ids) assert len(self.cache_types) == len(self.storage_ids) for idx, storage_id in enumerate(self.storage_ids): assert storage_id <= 4 if storage_id == StorageID.IPFS: assert self.cache_types[idx] == CacheType.PUBLIC except Exception as e: print_tb(e) raise e
def get_deployed_block_number(self) -> int: """Return contract's deployed block number.""" try: contract = self._get_contract_yaml() except Exception as e: print_tb(e) return False block_number = self.w3.eth.get_transaction(contract["tx_hash"]).blockNumber if block_number is None: raise Exception("E: Contract is not available on the blockchain, is it synced?") return self.w3.eth.get_transaction(contract["tx_hash"]).blockNumber
def submit_job(yaml_fn): """Return provider info. :param str yaml_fn: Full file path of Yaml file that contains the job info """ t1.join() try: base = SubmitBase(yaml_fn) base.submit() except QuietExit: pass except Exception as e: print_tb(e)
def _setup(self, is_brownie=False): if is_brownie: from brownie import web3 self.w3 = web3 else: try: from broker.imports import connect self.eBlocBroker, self.w3, self._eBlocBroker = connect() except Exception as e: print_tb(e) sys.exit(1)
def main(): try: config.env = config.ENV() except Exception as e: print_tb(e) log("E: env.IPFS_LOG is not set") sys.exit(1) if not is_ipfs_on(): cfg.ipfs.remove_lock_files() run() else: log(f"## [green]IPFS[/green] daemon is already running {ok()}") sys.exit(100)
def add_to_ipfs(results_folder): """Add result folder into ipfs repo.""" try: result_ipfs_hash = cfg.ipfs.add(results_folder) print(result_ipfs_hash) except Exception as e: print_tb(e) sys.exit() if os.path.isdir(results_folder): basename = os.path.basename(os.path.normpath(results_folder)) filepath = os.path.dirname(results_folder) print(filepath) print(basename)
def _try(func): """Call given function inside try and except. Example called: _try(lambda: f()) Returns status and output of the function :param func: yield function :raises: Exception: Explanation here. """ try: return func() except Exception as e: print_tb(e) raise e
def connect(): """Connect into web3 and ebloc_broker objects.""" if config.ebb and cfg.w3: return config.ebb, cfg.w3, config._eBlocBroker try: if not cfg.w3.isConnected(): connect_into_web3() if not config.ebb: connect_into_eblocbroker() except Exception as e: print_tb(e) return config.ebb, cfg.w3, config._eBlocBroker
def decrypt_using_gpg(self, gpg_file, extract_target=None): """Decrypt compresses file using gpg. This function is specific for using on driver.ipfs to decript tar file, specific for "tar.gz" file types. cmd: gpg --verbose --output={tar_file} --pinentry-mode loopback \ --passphrase-file=f"{env.LOG_PATH}/gpg_pass.txt" \ --decrypt {gpg_file_link} """ if not os.path.isfile(f"{gpg_file}.gpg"): os.symlink(gpg_file, f"{gpg_file}.gpg") gpg_file_link = f"{gpg_file}.gpg" tar_file = f"{gpg_file}.tar.gz" cmd = [ "gpg", "--verbose", "--batch", "--yes", f"--output={tar_file}", "--pinentry-mode", "loopback", f"--passphrase-file={env.GPG_PASS_FILE}", "--decrypt", gpg_file_link, ] try: run(cmd) log(f"==> GPG decrypt {ok()}") _remove(gpg_file) os.unlink(gpg_file_link) except Exception as e: print_tb(e) raise e # finally: # os.unlink(gpg_file_link) if extract_target: try: untar(tar_file, extract_target) except Exception as e: raise Exception( "E: Could not extract the given tar file") from e finally: cmd = None _remove(f"{extract_target}/.git") _remove(tar_file)
def register_requester(yaml_fn): """Return provider info. :param str yaml_fn: Full file path of Yaml file that contains the requester info """ t1.join() try: tx_hash = Ebb.register_requester(yaml_fn) if tx_hash: get_tx_status(tx_hash) else: log() except QuietExit: pass except Exception as e: print_tb(e)
def analyze_tx_status(self, tx_hash) -> bool: try: tx_receipt = get_tx_status(tx_hash) try: if not self.Ebb: log("warning: self.Ebb is empty object") processed_logs = self.Ebb.eBlocBroker.events.LogJob( ).processReceipt(tx_receipt, errors=self.w3.DISCARD) log(vars(processed_logs[0].args)) log(f"==> job_index={processed_logs[0].args['index']}") except IndexError: log("E: Transaction is reverted") return True except Exception as e: print_tb(e) return False