def __call__(self, *args, **kwargs): progress_comment_re = re.compile(""".+prog_num=(?P<progress>\d+).+\s+$""") py_batch_with_timings = self.path_to_py_batch.with_suffix(".timings.py") last_progress_reported = 0 with utils.utf8_open_for_read(self.path_to_py_batch) as rfd, utils.utf8_open_for_write(py_batch_with_timings, "w") as wfd: for line in rfd.readlines(): line_to_print = line match = progress_comment_re.fullmatch(line) if match: progress_num = int(match.group("progress")) if progress_num > last_progress_reported: # some items have the same progress num, so report only the first last_progress_reported = progress_num progress_time = pybatch.PythonBatchCommandBase.runtime_duration_by_progress.get(progress_num, None) if progress_time is not None: progress_time_str = convertSeconds(progress_time) else: progress_time_str = '?' line_to_print = f"""{line.rstrip()} # {progress_time_str}\n""" wfd.write(line_to_print) sync_timing_config_var_name = f"__TIMING_SYNC_SEC__" if sync_timing_config_var_name in config_vars: bytes_to_download = config_vars['__NUM_BYTES_TO_DOWNLOAD__'].int() if bytes_to_download: download_time_sec = config_vars[sync_timing_config_var_name].float() bytes_per_second = int(bytes_to_download / download_time_sec) sync_timing_line = f"# downloaded {bytes_to_download} bytes in {convertSeconds(download_time_sec)}, {bytes_per_second} bytes per second\n" wfd.write(sync_timing_line) for stage in ('copy', 'remove', 'doit'): stage_timing_config_var_name = f"__TIMING_{stage}_SEC__".upper() if stage_timing_config_var_name in config_vars: stage_time_sec = config_vars[stage_timing_config_var_name].float() stage_timing_line = f"# {stage} time {convertSeconds(stage_time_sec)}\n" wfd.write(stage_timing_line)
def run_batch_file(self): if self.out_file_realpath.endswith(".py"): with utils.utf8_open_for_read(self.out_file_realpath, 'r') as rfd: py_text = rfd.read() py_compiled = compile(py_text, os.fspath(self.out_file_realpath), mode='exec', flags=0, dont_inherit=False, optimize=2) exec(py_compiled, globals()) else: from subprocess import Popen p = Popen([self.out_file_realpath], executable=self.out_file_realpath, shell=False) stdout, stderr = p.communicate() if stdout: print(stdout) if stderr: print(stderr, file=sys.stderr) return_code = p.returncode if return_code != 0: raise SystemExit(self.out_file_realpath + " returned exit code " + str(return_code))
def read_help_file(self, help_file_path): with utils.utf8_open_for_read(help_file_path) as open_file: for a_node in yaml.compose_all(open_file): if a_node.isMapping(): for topic_name, topic_items_node in a_node.items(): for item_name, item_value_node in topic_items_node.items(): new_item = HelpItemYaml(item_name, item_value_node) self.add_item(new_item, topic_name)
def exec_script_file(self, file_name): with self.transaction("exec_script_file_" + file_name) as curs: if os.path.isfile(file_name): script_file_path = Path(file_name) else: script_file_path = self.ddl_files_dir.joinpath(file_name) with utils.utf8_open_for_read(script_file_path, "r") as rfd: ddl_text = rfd.read() curs.executescript(ddl_text)
def __call__(self, *args, **kwargs) -> None: pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs) if self.config_files is not None: reader = ConfigVarYamlReader(config_vars) for config_file in self.config_files: reader.read_yaml_file(config_file) with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = config_vars.resolve_str(text_to_resolve) with utils.utf8_open_for_write(self.resolved_file, "w") as wfd: wfd.write(resolved_text)
def __call__(self, *args, **kwargs): PythonBatchCommandBase.__call__(self, *args, **kwargs) if self.config_files: for config_file in self.config_files: config_file = utils.ExpandAndResolvePath(config_file) self.read_yaml_file(config_file) self.python_file = utils.ExpandAndResolvePath(self.python_file) with utils.utf8_open_for_read(self.python_file, 'r') as rfd: py_text = rfd.read() py_compiled = compile(py_text, os.fspath(self.python_file), mode='exec', flags=0, dont_inherit=False, optimize=2) exec(py_compiled, globals())
def __call__(self, *args, **kwargs) -> None: # fill the iid_to_svn_item_t table self.info_map_table.populate_IIDToSVNItem() # get the list of info map file names info_map_to_item = dict() all_info_map_names = self.items_table.get_unique_detail_values('info_map') for infomap_file_name in all_info_map_names: info_map_file_path = self.work_folder.joinpath(infomap_file_name) if info_map_file_path.is_file(): log.info(f"{infomap_file_name} was found so no need to create it") # file already exists, probably copied from the "Common" repository # just checking that the fie is also zipped zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)") zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name) if not zip_info_map_file_path.is_file(): raise FileNotFoundError(f"found {info_map_file_path} but not {zip_info_map_file_path}") else: self.info_map_table.mark_items_required_by_infomap(infomap_file_name) info_map_items = self.info_map_table.get_required_items() info_map_to_item[infomap_file_name] = info_map_items files_to_add_to_default_info_map = list() # the named info_map files and their wzip version should be added to the default info_map # write each info map to file for infomap_file_name, info_map_items in info_map_to_item.items(): if info_map_items: # could be that no items are linked to the info map file info_map_file_path = self.work_folder.joinpath(infomap_file_name) self.info_map_table.write_to_file(in_file=info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map) files_to_add_to_default_info_map.append(info_map_file_path) zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)") zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name) with Wzip(info_map_file_path, self.work_folder, own_progress_count=0) as wzipper: wzipper() files_to_add_to_default_info_map.append(zip_info_map_file_path) # add the default info map default_info_map_file_name = str(config_vars["MAIN_INFO_MAP_FILE_NAME"]) default_info_map_file_path = self.work_folder.joinpath(default_info_map_file_name) info_map_items = self.info_map_table.get_items_for_default_infomap() self.info_map_table.write_to_file(in_file=default_info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map) with Wzip(default_info_map_file_path, self.work_folder, own_progress_count=0) as wzipper: wzipper() # add a line to default info map for each non default info_map created above with utils.utf8_open_for_read(default_info_map_file_path, "a") as wfd: for file_to_add in files_to_add_to_default_info_map: file_checksum = utils.get_file_checksum(file_to_add) file_size = file_to_add.stat().st_size # todo: make path relative line_for_main_info_map = f"instl/{file_to_add.name}, f, {config_vars['TARGET_REPO_REV'].str()}, {file_checksum}, {file_size}\n" wfd.write(line_for_main_info_map)
def prepare_command_list_from_file(self): command_lines = list() for config_file in config_vars["__CONFIG_FILE__"].list(): with utils.utf8_open_for_read(os.fspath(config_file), "r") as rfd: command_lines.extend(rfd.readlines()) command_list = list() for command_line in command_lines: resolved_command_line = config_vars.resolve_str( command_line.strip()) argv = shlex.split(resolved_command_line) command_list.append(argv) return command_list
def plugin_bundle(in_os, in_path: Path): retVal = None xml_path = in_path.joinpath('Contents', 'Info.xml') if xml_path.exists(): with utils.utf8_open_for_read(xml_path, "r") as rfd: info_xml = rfd.read() match = plugin_version_and_guid_re.match(info_xml) if match: retVal = (in_path, match['version'], match['guid']) else: if in_os == 'Mac': retVal = Mac_bundle(in_os, in_path) elif in_os == 'Win': retVal = Win_bundle(in_os, in_path) return retVal
def __call__(self, *args, **kwargs): PythonBatchCommandBase.__call__(self, *args, **kwargs) commands = list() resolved_config_file = utils.ExpandAndResolvePath(self.config_file) self.doing = f"""ParallelRun reading config file '{resolved_config_file}'""" with utils.utf8_open_for_read(resolved_config_file, "r") as rfd: for line in rfd: line = line.strip() if line and line[0] != "#": args = shlex.split(line) commands.append(args) try: self.doing = f"""ParallelRun, config file '{resolved_config_file}', running with {len(commands)} processes in parallel""" utils.run_processes_in_parallel(commands, self.shell) except SystemExit as sys_exit: if sys_exit.code != 0: raise
def __call__(self, *args, **kwargs): PythonBatchCommandBase.__call__(self, *args, **kwargs) commands = list() resolved_config_file = utils.ExpandAndResolvePath(self.config_file) self.doing = f"""{self.get_action_name()} reading config file '{resolved_config_file}'""" with utils.utf8_open_for_read(resolved_config_file, "r") as rfd: for line in rfd: line = line.strip() if line and line[0] != "#": args = shlex.split(line) commands.append(args) try: self.doing = f"""{self.get_action_name()}, config file '{resolved_config_file}', running with {len(commands)} processes in parallel""" utils.run_processes_in_parallel(commands, self.shell) except SystemExit as sys_exit: if sys_exit.code != 0: if "curl" in commands[0]: err_msg = utils.get_curl_err_msg(sys_exit.code) raise Exception(err_msg) else: raise finally: self.increment_progress()
def __call__(self, *args, **kwargs) -> None: pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs) with config_vars.push_scope_context() as scope_context: if self.temp_config_vars: config_vars.update(self.temp_config_vars) if self.config_files is not None: reader = ConfigVarYamlReader(config_vars) for config_file in self.config_files: reader.read_yaml_file(config_file) with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = config_vars.resolve_str(text_to_resolve) if self.raise_if_unresolved: unresolved_re = re.compile(r"""\$\(.*?\)""") all_unresolved = unresolved_re.findall(resolved_text) if all_unresolved: unresolved_references = ", ".join(list( set(all_unresolved))) raise ValueError( f"unresolved config_vars in {self.unresolved_file}: {unresolved_references}" ) with utils.utf8_open_for_write(self.resolved_file, "w") as wfd: wfd.write(resolved_text)
def __call__(self, *args, **kwargs) -> None: pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs) with utils.utf8_open_for_read(self.file_path_to_read, 'r') as f: value = f.read() value = value.strip() config_vars[self.var_name] = value
def do_run_process(self): """ run list of processes as specified in the input file input file can have two kinds of processes: 1. command line, e.g. ls /etc 2. echo statement, e.g. echo "a message" each line can also be followed by ">" or ">>" and path to a file, in which case output from the process or echo will go to that file. ">" will open the file in "w" mode, ">>" in "a" mode. if --abort-file argument is passed to run-process, the fiel specified will be watch and if and when it does not exist current running subprocess will be aborted and next processes will not be launched. """ self.setup_abort_file_monitoring() list_of_argv = list() if "__MAIN_INPUT_FILE__" in config_vars: # read commands from a file file_with_commands = config_vars["__MAIN_INPUT_FILE__"] with utils.utf8_open_for_read(file_with_commands, "r") as rfd: for line in rfd.readlines(): list_of_argv.append(shlex.split(line)) else: # read a command from argv list_of_argv.append(config_vars["RUN_PROCESS_ARGUMENTS"].list()) RunProcessInfo = namedtuple('RunProcessInfo', ['process_name', 'argv', 'redirect_open_mode', 'redirect_path', 'stderr_means_err']) list_of_process_to_run_with_redirects = list() # find redirects for run_process_info in list_of_argv: stderr_means_err = True if "2>&1" in run_process_info: stderr_means_err = False run_process_info.remove("2>&1") if len(run_process_info) >= 3 and run_process_info[-2] in (">", ">>"): list_of_process_to_run_with_redirects.append(RunProcessInfo(process_name=run_process_info[0].strip(), argv=run_process_info[1:-2], redirect_open_mode={">": "w", ">>": "a"}[run_process_info[-2]], redirect_path=run_process_info[-1], stderr_means_err=stderr_means_err)) else: list_of_process_to_run_with_redirects.append(RunProcessInfo(process_name=run_process_info[0].strip(), argv=run_process_info[1:], redirect_open_mode=None, redirect_path=None, stderr_means_err=stderr_means_err)) for run_process_info in list_of_process_to_run_with_redirects: redirect_file = None if run_process_info.redirect_path: redirect_file = open(run_process_info.redirect_path, run_process_info.redirect_open_mode) print(run_process_info) if run_process_info.process_name.lower() == "echo": str_to_echo = " ".join(run_process_info.argv) if redirect_file: redirect_file.write(f"{str_to_echo}\n") else: sys.stdout.write(f"{str_to_echo}\n") else: log.info(f"Start running {run_process_info.process_name} with argv {run_process_info.argv}") with Subprocess(run_process_info.process_name, *run_process_info.argv, out_file=redirect_file, stderr_means_err=run_process_info.stderr_means_err, own_progress_count=0) as sub_proc: sub_proc() log.info(f"Done running {run_process_info.process_name} with argv {run_process_info.argv}") if redirect_file: redirect_file.close()