def appy_patch(base_dir, patch_fn): r"""Apply path file. cmd: perl -pe 's/\x1b.*?[mGKH]//g' alper.patch > good.patch """ patch_file = f"{base_dir}/{patch_fn}" base_name = patch_fn.replace(".gz", "") diff_file_name = f"{base_dir}/{base_name}" if not os.path.isfile(diff_file_name): if not os.path.isfile(patch_file): print(f"E: {patch_file} file does not exist") sys.exit(1) if patch_file.endswith(".diff.gz"): extract_gzip(patch_file) else: log(f"==> [magenta]{diff_file_name}[/magenta] exists") try: git.apply_patch(base_dir, patch_file.replace(".gz", ""), is_gpg=False) except Exception as e: print_tb(e) try: good_patch = f"{base_dir}/good.patch" sep = "~" popen_communicate( [ "perl", "-pe", "s/\x1b.*?[mGKH]//g", str(Path(patch_file)).replace(f"{sep}", f"\{sep}") ], stdout_fn=good_patch, ) git.apply_patch(base_dir, good_patch, is_gpg=False) except Exception as e1: print_tb(e1)
def get_data_init(self, key, _id, is_job_key=False): try: cmd = ["gdrive", "info", "--bytes", key, "-c", env.GDRIVE_METADATA] _p, gdrive_output, *_ = popen_communicate(cmd) if _p.returncode != 0: raise Exception(gdrive_output) except Exception as e: raise e mime_type = gdrive.get_file_info(gdrive_output, _type="Mime") folder_name = gdrive.get_file_info(gdrive_output, _type="Name") log(f"==> mime_type=[magenta]{mime_type}") if is_job_key: # key for the sourceCode tar.gz file is obtained try: self.data_transfer_in_to_download, self.job_key_list, key = gdrive.size( key, mime_type, folder_name, gdrive_output, self.results_folder_prev, self.code_hashes, self.job_infos[0]["is_cached"], ) except Exception as e: print_tb(e) raise e return mime_type, folder_name
def subprocess_call(cmd, attempt=1, sleep_time=1): """Run subprocess.""" error_msg = "" cmd = list(map(str, cmd)) # always should be type: str for count in range(attempt): try: p, output, error_msg = popen_communicate(cmd) if p.returncode != 0: if count == 0: _cmd = " ".join(cmd) log(f"\n$ {_cmd}", "bold red") log(f"{error_msg} ", "bold", end="") log(WHERE()) if attempt > 1 and count + 1 != attempt: log(f"{br(f'attempt={count}')} ", end="") time.sleep(sleep_time) else: return output except Exception as e: # https://stackoverflow.com/a/1156048/2402577 for line in traceback.format_stack(): log(line.strip()) raise e raise Exception(error_msg)
def swarm_connect(self, ipfs_id: str): """Swarm connect into the ipfs node.""" if not is_ipfs_on(): raise IpfsNotConnected # TODO: check is valid IPFS id try: log(f" * trying to connect into {ipfs_id}") cmd = ["/usr/local/bin/ipfs", "swarm", "connect", ipfs_id] p, output, e = popen_communicate(cmd) if p.returncode != 0: log() e = e.replace("[/", "/").replace("]", "").replace("e: ", "").rstrip() if "failure: dial to self attempted" in e: log(f"E: {e}") if not cfg.IS_FULL_TEST and not question_yes_no( "#> Would you like to continue?"): raise QuietExit else: log("E: connection into provider's IPFS node via swarm is not accomplished" ) raise Exception(e) else: log(f"{output} {ok()}") except Exception as e: print_tb(e) raise e
def run_stdout_to_file(cmd, path, mode="w") -> None: """Run command pipe output into give file.""" p, output, error = popen_communicate(cmd, stdout_fn=path, mode=mode) if p.returncode != 0 or (isinstance(error, str) and "error:" in error): _cmd = " ".join(cmd) log(f"\n{_cmd}", "red") raise Exception(f"E: scontrol error:\n{output}") # log(f"## writing into path({path}) is completed") run(["sed", "-i", "s/[ \t]*$//", path]) # remove trailing whitespaces with sed
def run(port=8547, hardfork_name="istanbul"): """Run ganache daemon on the background. https://stackoverflow.com/a/8375012/2402577 """ print(f"## Running Ganache CLI on port={port}") with daemon.DaemonContext(): cmd = [ "ganache-cli", "--port", port, "--hardfork", hardfork_name, "--gasLimit", "6721975", "--accounts", "10", # "--blockTime", # cfg.BLOCK_DURATION, "--allowUnlimitedContractSize", ] popen_communicate(cmd, env.GANACHE_LOG)
def run(): """Run ipfs daemon. cmd: ipfs daemon # --mount __ https://stackoverflow.com/a/8375012/2402577 __ https://gist.github.com/SomajitDey/25f2f7f2aae8ef722f77a7e9ea40cc7c#gistcomment-4022998 """ IPFS_BIN = "/usr/local/bin/ipfs" log("==> Running [green]IPFS[/green] daemon") if not os.path.isfile(config.env.IPFS_LOG): open(config.env.IPFS_LOG, "a").close() with daemon.DaemonContext(): if cfg.IS_PRIVATE_IPFS: _env = { "LIBP2P_FORCE_PNET": "1", "IPFS_PATH": Path.home().joinpath(".ipfs") } else: _env = {"IPFS_PATH": Path.home().joinpath(".ipfs")} popen_communicate([IPFS_BIN, "daemon", "--routing=none"], stdout_fn=config.env.IPFS_LOG, _env=_env)
def remove_user(user_name, user_dir=None): """Remove user from Slurm. # for test purposes sudo userdel $USERNAME sudo rm -rf $BASEDIR/$USERNAME sacctmgr remove user where user=$USERNAME --immediate """ run(["sudo", "userdel", "--force", user_name]) cmd = [ "sacctmgr", "remove", "user", "where", f"user={user_name}", "--immediate" ] p, output, *_ = popen_communicate(cmd) if p.returncode != 0 and "Nothing deleted" not in output: raise Exception(f"E: sacctmgr remove error: {output}") # remove_user(user) if user_dir: _remove(user_dir)
def _upload_results(encoded_share_token, output_file_name): r"""Upload results into Eudat using curl. * How to upload files into shared b2drop.eudat(owncloud) repository using curl? __ https://stackoverflow.com/a/44556541/2402577 * commands(s): curl -X PUT -H \'Content-Type: text/plain\' -H \'Authorization: Basic \'$encoded_share_token\'==\' \ --data-binary \'@result-\'$providerID\'-\'$index\'.tar.gz\' \ https://b2drop.eudat.eu/public.php/webdav/result-$providerID-$index.tar.gz curl --fail -X PUT -H 'Content-Type: text/plain' -H 'Authorization: Basic \ 'SjQzd05XM2NNcFoybk.Write'==' --data-binary '@0b2fe6dd7d8e080e84f1aa14ad4c9a0f_0.txt' \ https://b2drop.eudat.eu/public.php/webdav/result.txt """ cmd = [ "curl", "--fail", "-X", "PUT", "-H", "Content-Type: text/plain", "-H", f"Authorization: Basic {encoded_share_token}", "--data-binary", f"@{output_file_name}", f"https://b2drop.eudat.eu/public.php/webdav/{output_file_name}", "-w", "%{http_code}\n" # "-v" # verbose ] # some arguments requires "" for curl to work cmd_temp = cmd.copy() cmd_temp[5] = f'"{cmd[5]}" \ \n ' cmd_temp[7] = f'"{cmd[7]}" \ \n ' cmd_temp[9] = f'"{cmd[9]}" \ \n ' cmd_temp[10] = f'"{cmd[10]}" \ \n ' cmd_str = " ".join(cmd_temp) log(f"==> cmd:\n{cmd_str}") return popen_communicate(cmd)
def is_initialized(path) -> bool: """Check whether given the path is initialized with git. __ https://stackoverflow.com/a/16925062/2402577 """ with cd(path): try: *_, output, err = popen_communicate( ["git", "rev-parse", "--is-inside-work-tree"]) # noqa if output == "true": #: checks is the give path top git folder git.Repo(".", search_parent_directories=False) return True except InvalidGitRepositoryError as e: log(f"warning: InvalidGitRepositoryError at path {e}") return False except Exception as e: log(f"warning: {e}") return False return output == "true"
def start_call(job_key, index, slurm_job_id): """Run when slurm job launches. * cmd1: scontrol show job slurm_job_id | \ grep 'StartTime'| grep -o -P '(?<=StartTime=).*(?= E)' * cmd2: date -d 2018-09-09T18:38:29 +"%s" """ Ebb = cfg.Ebb _log.ll.LOG_FILENAME = env.LOG_PATH / "transactions" / env.PROVIDER_ID / f"{job_key}_{index}.txt" _log.ll.IS_PRINT = False log(f"~/ebloc-broker/broker/start_code.py {job_key} {index} {slurm_job_id}") job_id = 0 # TODO: should be obtained from the user's input _, _, error = popen_communicate(["scontrol", "show", "job", slurm_job_id]) if "slurm_load_jobs error: Invalid job id specified" in str(error): log(f"E: {error}") sys.exit(1) p1 = Popen(["scontrol", "show", "job", slurm_job_id], stdout=PIPE) p2 = Popen(["grep", "StartTime"], stdin=p1.stdout, stdout=PIPE) p1.stdout.close() p3 = Popen( ["grep", "-o", "-P", "(?<=StartTime=).*(?= E)"], stdin=p2.stdout, stdout=PIPE, ) p2.stdout.close() date = p3.communicate()[0].decode("utf-8").strip() start_time = check_output(["date", "-d", date, "+'%s'"]).strip().decode("utf-8").strip("'") log(f"{env.EBLOCPATH}/broker/eblocbroker_scripts/set_job_status_running.py {job_key} {index} {job_id} {start_time}") for attempt in range(10): if attempt > 0: log(f"warning: sleeping for {cfg.BLOCK_DURATION * 2} ...") time.sleep(cfg.BLOCK_DURATION * 2) try: tx = Ebb.set_job_status_running(job_key, index, job_id, start_time) tx_hash = Ebb.tx_id(tx) log(f"tx_hash={tx_hash}", "bold") d = datetime.now().strftime("%Y-%m-%d %H:%M:%S") log(f"==> set_job_status_running_started {start_time} | attempt_date={d}") log("## mongo.set_job_status_running_tx ", end="") if Ebb.mongo_broker.set_job_status_running_tx(str(job_key), int(index), str(tx_hash)): log(ok()) else: log(br("FAILED")) return except Exception as e: log(f"## attempt={attempt}: {e}") if ( "Execution reverted" in str(e) or "Transaction with the same hash was already imported" in str(e) or "If you wish to broadcast, include `allow_revert:True`" in str(e) ): log(f"warning: {e}") sys.exit(1) log("E: start_code.py failed at all the attempts, abort") sys.exit(1)