def create_nas_job_script(is_small=False): """Create NPB3.3-SER slurm job script to be submitted.""" benchmark_names = ["bt", "cg", "ep", "is", "lu", "sp", "ua"] benchmark_name = random.choice(benchmark_names) output_fn = "output.log" hash_str = random.getrandbits(128) fn = Path.home() / "test_eblocbroker" / "NPB3.3-SER_source_code" / "run.sh" f = open(fn, "w+") f.write("#!/bin/bash\n") f.write("#SBATCH -o slurm.out # STDOUT\n") f.write("#SBATCH -e slurm.err # STDERR\n") f.write("#SBATCH --mail-type=ALL\n\n") f.write(f"make {benchmark_name} CLASS=A > {output_fn}\n") f.write(f"/usr/bin/time -v bin/{benchmark_name}.A.x >> {output_fn}\n") if not is_small: f.write(f"make {benchmark_name} CLASS=B >> {output_fn}\n") f.write(f"/usr/bin/time -v bin/{benchmark_name}.B.x >> {output_fn}\n") f.write(f"make {benchmark_name} CLASS=C >> {output_fn}\n") f.write(f"/usr/bin/time -v bin/{benchmark_name}.C.x >> {output_fn}\n") f.write(f"# {hash_str}\n") f.close() run(["sed", "-i", r"s/\x0//g", fn]) # remove NULL characters from the SBATCH file return benchmark_name
def extract_gzip(filename): try: args = shlex.split(f"gunzip --force {filename}") run(args) except: args = shlex.split(f"zcat {filename}") base_dir = os.path.dirname(filename) base_name = os.path.basename(filename).replace(".gz", "") popen_communicate(args, f"{base_dir}/{base_name}")
def umount(self, data_hashes): for data_hash in data_hashes: if isinstance(data_hash, bytes): data_hash = data_hash.decode("utf-8") destination = f"{self.path_to}/{data_hash}" if os.path.isdir(destination): with suppress(Exception): run(["sudo", "umount", "-f", destination], is_quiet=True)
def generate_md5sum(path: str) -> str: if os.path.isdir(path): return run([env.BASH_SCRIPTS_PATH / "generate_md5sum_for_folder.sh", path]) if os.path.isfile(path): tar_hash = check_output(["md5sum", path]).decode("utf-8").strip() return tar_hash.split(" ", 1)[0] else: raise Exception(f"{path} does not exist")
def untar(tar_file, extract_to): """Untar given tar file. umask can be ignored by using the -p (--preserve) option --no-overwrite-dir: preserve metadata of existing directories tar interprets the next argument after -f as the file name of the tar file. Put the p before the f: """ filename = os.path.basename(tar_file) accept_files = [".git", filename] if not is_dir_empty(extract_to): for name in os.listdir(extract_to): # if tar itself already exist inside the same directory along with # `.git` file if name not in accept_files: log(f"==> {tar_file} is already extracted into\n {extract_to}") return # tar --warning=no-timestamp cmd = ["tar", "--warning=no-timestamp", "-xvpf", tar_file, "-C", extract_to, "--no-overwrite-dir", "--strip", "1"] run(cmd)
def terminate_killall(msg="", is_traceback=True, lock=None): """Terminate the Driver python script and all the dependent python programs to it.""" if msg: log(f"{WHERE(1)} Terminated: ", "bold red", end="") log(msg, "bold") if is_traceback: print_tb() if lock: with suppress(Exception): lock.close() if config.driver_cancel_process: # Following line is added, in case ./killall.sh does not work due to # sudo It sends the kill signal to all the process groups, pid is # obtained from the global variable os.killpg(os.getpgid(config.driver_cancel_process.pid), signal.SIGTERM) try: # kill all the dependent processes and exit run([env.BASH_SCRIPTS_PATH / "killall.sh"]) except: sys.exit(1)
def run_ipfs_daemon(_is_print=False): """Check that does IPFS run on the background or not.""" if is_ipfs_on(_is_print): return True log("warning: [green]IPFS[/green] does not work on the background") log("#> Starting [green]IPFS daemon[/green] on the background") output = run(["python3", env.EBLOCPATH / "broker" / "python_scripts" / "run_ipfs_daemon.py"]) while True: time.sleep(1) with open(env.IPFS_LOG, "r") as content_file: log(content_file.read().rstrip(), "bold blue") time.sleep(5) # in case sleep for 5 seconds if output: log(output.rstrip(), "bold blue") if is_ipfs_on(is_print=True): return True return False
def link(self, path, destination, is_read_only=False): """Make links between folders. You can create a read-only bind-mount(https://lwn.net/Articles/281157/). mount --bind /path/to/source/ /path/to/dest/ mount -o bind,remount,ro /path/to/dest __ https://askubuntu.com/a/243390/660555 """ if is_read_only: mkdir(destination) run(["sudo", "mount", "--bind", path, destination]) run(["sudo", "mount", "-o", "bind,remount,ro", destination]) else: run(["ln", "-sfn", path, destination]) log(f" ┌── {path}", "bold green") log(f" └─> {destination}", "bold yellow")
def create_cppr_job_script(): """Create cppr slurm job script to be submitted.""" registered_data_hashes_small = [ "b6aaf03752dc68d625fc57b451faa2bf", "f1de03edab51f281815c3c1e5ecb88c6", "082d2a71d86a64250f06be14c55ca27e", "03919732a417cb1d14049844b9de0f47", "983b9fe8a85b543dd5a4a75d031f1091", "f71df9d36cd519d80a3302114779741d", "c0fee5472f3c956ba759fd54f1fe843e", "63ffd1da6122e3fe9f63b1e7fcac1ff5", "9e8918ff9903e3314451bf2943296d31", "eaf488aea87a13a0bea5b83a41f3d49a", "e62593609805db0cd3a028194afb43b1", "3b0f75445e662dc87e28d60a5b13cd43", "ebe53bd498a9f6446cd77d9252a9847c", "f82aa511f8631bfc9a82fe6fa30f4b52", "761691119cedfb9836a78a08742b14cc", "f93b9a9f63447e0e086322b8416d4a39", ] registered_data_hashes_medium = [ "050e6cc8dd7e889bf7874689f1e1ead6", "9d5d892a63b5758090258300a59eb389", "779745f315060d1bc0cd44b7266fb4da", "fe801973c5b22ef6861f2ea79dc1eb9c", "0d6c3288ef71d89fb93734972d4eb903", "4613abc322e8f2fdeae9a5dd10f17540", "dd0fbccccf7a198681ab838c67b68fbf", "45281dfec4618e5d20570812dea38760", "fa64e96bcee96dbc480a1495bddbf53c", "8f6faf6cfd245cae1b5feb11ae9eb3cf", "1bfca57fe54bc46ba948023f754521d6", ] hash_small_data = random.choice(registered_data_hashes_small) hash_med_data = random.choice(registered_data_hashes_medium) fn = Path.home() / "test_eblocbroker" / "run_cppr" / "run.sh" f = open(fn, "w+") f.write("#!/bin/bash\n") f.write("#SBATCH -o slurm.out # STDOUT\n") f.write("#SBATCH -e slurm.err # STDERR\n") f.write("#SBATCH --mail-type=ALL\n\n") f.write("export OMP_NUM_THREADS=1\n") f.write("current_date=$(LANG=en_us_88591; date)\n") f.write(f"DATA_HASH='{hash_small_data}'\n") f.write("DATA1_DIR='../data_link/'$DATA_HASH'/'\n") f.write("echo ' * '$current_date > output.log\n") f.write( "find $DATA1_DIR -name '*.max' -print0 | while read -d $'\\0' file\n") f.write("do\n") f.write(" echo $file >> output.log\n") f.write(" (/usr/bin/time -v cppr -a pr $file) >> output.log 2>&1\n") f.write("done\n") f.write(f"DATA_HASH='{hash_med_data}'\n") f.write("DATA2_DIR='../data_link/'$DATA_HASH'/'\n") f.write("echo ' * '$current_date >> output.log\n") f.write( "find $DATA2_DIR -name '*.max' -print0 | while read -d $'\\0' file\n") f.write("do\n") f.write(" echo $file >> output.log\n") f.write(" (/usr/bin/time -v cppr -a pr $file) >> output.log 2>&1\n") f.write("done\n") # adding cppr to run with data hashes f.write("DATA_HASH='change_folder_hash'\n") f.write("if [[ '$DATA_HASH' != 'change_folder_hash' ]]; then\n") f.write(" DATA3_DIR='../data_link/'$DATA_HASH'/'\n") f.write(" echo ' * '$current_date >> output.log\n") f.write( " find $DATA3_DIR -name '*.max' -print0 | while read -d $'\\0' file\n" ) f.write(" do\n") f.write(" echo $file >> output.log\n") f.write(" (/usr/bin/time -v cppr -a pr $file) >> output.log 2>&1\n") f.write(" done\n") f.write("fi\n") f.write("echo ' [ DONE ] ' >> output.log\n") f.close() run(["sed", "-i", r"s/\x0//g", fn]) # remove NULL characters from the SBATCH file return hash_small_data, hash_med_data
def is_program_valid(cmd): try: run(cmd) except Exception as e: terminate(f"Please install {cmd[0]} or check its path.\n{e}", is_traceback=False)
def is_dpkg_installed(package_name) -> bool: try: run(["dpkg", "-s", package_name]) return True except: return False
def is_npm_installed(package_name) -> bool: output = run(["npm", "list", "-g", "--depth=0"]) return package_name in output
def is_bin_installed(name): try: run(["which", name]) except Exception as e: log(f"E: [green]{name}[/green] is not instelled") raise e
def run(self): tools.run(["python3", env.EBLOCPATH / "broker" / "run.py"])