def __call__(self, *args, **kwargs): progress_comment_re = re.compile(""".+prog_num=(?P<progress>\d+).+\s+$""") py_batch_with_timings = self.path_to_py_batch.with_suffix(".timings.py") last_progress_reported = 0 with utils.utf8_open_for_read(self.path_to_py_batch) as rfd, utils.utf8_open_for_write(py_batch_with_timings, "w") as wfd: for line in rfd.readlines(): line_to_print = line match = progress_comment_re.fullmatch(line) if match: progress_num = int(match.group("progress")) if progress_num > last_progress_reported: # some items have the same progress num, so report only the first last_progress_reported = progress_num progress_time = pybatch.PythonBatchCommandBase.runtime_duration_by_progress.get(progress_num, None) if progress_time is not None: progress_time_str = convertSeconds(progress_time) else: progress_time_str = '?' line_to_print = f"""{line.rstrip()} # {progress_time_str}\n""" wfd.write(line_to_print) sync_timing_config_var_name = f"__TIMING_SYNC_SEC__" if sync_timing_config_var_name in config_vars: bytes_to_download = config_vars['__NUM_BYTES_TO_DOWNLOAD__'].int() if bytes_to_download: download_time_sec = config_vars[sync_timing_config_var_name].float() bytes_per_second = int(bytes_to_download / download_time_sec) sync_timing_line = f"# downloaded {bytes_to_download} bytes in {convertSeconds(download_time_sec)}, {bytes_per_second} bytes per second\n" wfd.write(sync_timing_line) for stage in ('copy', 'remove', 'doit'): stage_timing_config_var_name = f"__TIMING_{stage}_SEC__".upper() if stage_timing_config_var_name in config_vars: stage_time_sec = config_vars[stage_timing_config_var_name].float() stage_timing_line = f"# {stage} time {convertSeconds(stage_time_sec)}\n" wfd.write(stage_timing_line)
def write_history(self): selected_tab = self.notebook.tab(self.notebook.select(), option='text') config_vars["SELECTED_TAB"] = selected_tab which_vars_for_yaml = config_vars.get("__GUI_CONFIG_FILE_VARS__", []).list() the_list_yaml_ready= config_vars.repr_for_yaml(which_vars=which_vars_for_yaml, resolve=False, ignore_unknown_vars=True) the_doc_yaml_ready = aYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True) with utils.utf8_open_for_write(config_vars["INSTL_GUI_CONFIG_FILE_NAME"].str(), "w") as wfd: aYaml.writeAsYaml(the_doc_yaml_ready, wfd)
def __call__(self, *args, **kwargs) -> None: if "REPO_REV_FILE_VARS" not in config_vars: # must have a list of variable names to write to the repo-rev file raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = list(config_vars["REPO_REV_FILE_VARS"]) # list of configVars to write to the repo-rev file # check that the variable names from REPO_REV_FILE_VARS do not contain # names that must not be made public dangerous_intersection = set(repo_rev_vars).intersection( {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"}) if dangerous_intersection: log.warning("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError(f"file REPO_REV_FILE_VARS {dangerous_intersection} and so is forbidden to upload") use_zlib = bool(config_vars.get("USE_ZLIB", "False")) # should we consider zipped files or not zip_extension = "" if use_zlib: zip_extension = config_vars.get("WZLIB_EXTENSION", ".wzip").str() revision_instl_folder_path = Path(config_vars["UPLOAD_REVISION_INSTL_FOLDER"]) # create checksum for the main info_map file, either wzipped or not main_info_map_file_name = "info_map.txt"+zip_extension main_info_map_file = revision_instl_folder_path.joinpath(main_info_map_file_name) main_info_map_checksum = utils.get_file_checksum(main_info_map_file) config_vars["INFO_MAP_FILE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+main_info_map_file_name config_vars["INFO_MAP_CHECKSUM"] = main_info_map_checksum # create checksum for the main index.yaml file, either wzipped or not index_file_name = "index.yaml"+zip_extension index_file_path = revision_instl_folder_path.joinpath(index_file_name) config_vars["INDEX_CHECKSUM"] = utils.get_file_checksum(index_file_path) config_vars["INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+index_file_name short_index_file_name = "short-index.yaml" short_index_file_path = revision_instl_folder_path.joinpath(short_index_file_name) config_vars["SHORT_INDEX_CHECKSUM"] = utils.get_file_checksum(short_index_file_path) config_vars["SHORT_INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+short_index_file_name config_vars["INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl" config_vars["REPO_REV_FOLDER_HIERARCHY"] = "$(__CURR_REPO_FOLDER_HIERARCHY__)" # check that all variables are present # <class 'list'>: ['INSTL_FOLDER_BASE_URL', 'REPO_REV_FOLDER_HIERARCHY', 'SYNC_BASE_URL'] missing_vars = [var for var in repo_rev_vars if var not in config_vars] if missing_vars: raise ValueError(f"{missing_vars} are missing cannot write repo rev file") # create yaml out of the variables variables_as_yaml = config_vars.repr_for_yaml(repo_rev_vars) repo_rev_yaml_doc = aYaml.YamlDumpDocWrap(variables_as_yaml, '!define', "", explicit_start=True, sort_mappings=True) repo_rev_file_path = config_vars["UPLOAD_REVISION_REPO_REV_FILE"] with utils.utf8_open_for_write(repo_rev_file_path, "w") as wfd: aYaml.writeAsYaml(repo_rev_yaml_doc, out_stream=wfd, indentor=None, sort=True) log.info(f"""create {repo_rev_file_path}""")
def write_copy_debug_info(self) -> None: try: if config_vars.defined('ECHO_LOG_FILE'): log_file_path = config_vars["ECHO_LOG_FILE"].str() log_folder, log_file = os.path.split(log_file_path) with utils.utf8_open_for_write(os.path.join(log_folder, "sync-folder-manifest.txt"), "w") as wfd: repo_sync_dir = config_vars["COPY_SOURCES_ROOT_DIR"].str() wfd.write(utils.disk_item_listing(repo_sync_dir)) except Exception: pass # if it did not work - forget it
def __call__(self, *args, **kwargs) -> None: pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs) if self.config_files is not None: reader = ConfigVarYamlReader(config_vars) for config_file in self.config_files: reader.read_yaml_file(config_file) with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = config_vars.resolve_str(text_to_resolve) with utils.utf8_open_for_write(self.resolved_file, "w") as wfd: wfd.write(resolved_text)
def __call__(self, *args, **kwargs) -> None: short_index_data = self.items_table.get_data_for_short_index( ) # iid, name, version_mac, version_win, install_guid, remove_guid short_index_dict = defaultdict(dict) builtin_iids = list(config_vars["SPECIAL_BUILD_IN_IIDS"]) for data_line in short_index_data: data_dict = dict(data_line) IID = data_dict['iid'] if IID not in builtin_iids: if data_dict['name']: short_index_dict[IID]['name'] = data_dict['name'] if data_dict['version_mac'] == data_dict['version_win']: short_index_dict[IID]['version'] = data_dict['version_mac'] else: if data_dict['version_mac']: short_index_dict[IID]['Mac'] = { 'version': data_dict['version_mac'] } if data_dict['version_win']: short_index_dict[IID]['Win'] = { 'version': data_dict['version_win'] } if data_dict['install_guid']: if data_dict['remove_guid'] != data_dict[ 'install_guid']: # found uninstall gui short_index_dict[IID]['guid'] = list( (data_dict['install_guid'], data_dict['remove_guid'])) else: short_index_dict[IID]['guid'] = data_dict[ 'install_guid'] defines_dict = config_vars.repr_for_yaml(which_vars=list( config_vars['SHORT_INDEX_FILE_VARS']), resolve=True, ignore_unknown_vars=False) defines_yaml_doc = aYaml.YamlDumpDocWrap(defines_dict, '!define', "Definitions", explicit_start=True, sort_mappings=True) index_yaml_doc = aYaml.YamlDumpDocWrap(value=short_index_dict, tag="!index", explicit_start=True, explicit_end=False, sort_mappings=True, include_comments=False) with utils.utf8_open_for_write(self.short_index_yaml_path, "w") as wfd: aYaml.writeAsYaml(defines_yaml_doc, wfd) aYaml.writeAsYaml(index_yaml_doc, wfd)
def get_run_args(self, run_args) -> None: the_lines = self.shell_command_list if isinstance(the_lines, str): the_lines = [the_lines] if sys.platform == 'darwin': the_lines.insert(0, "#!/usr/bin/env bash") batch_extension = ".command" elif sys.platform == "win32": batch_extension = ".bat" commands_text = "\n".join(the_lines) batch_file_path = Path(self.dir, self.var_name + batch_extension) with utils.utf8_open_for_write(batch_file_path, "w") as batch_file: batch_file.write(commands_text) os.chmod(batch_file.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) run_args.append(batch_file.name)
def write_require_file(self, file_path, require_dict): with utils.utf8_open_for_write(file_path, "w") as wfd: define_dict = aYaml.YamlDumpDocWrap( {"REQUIRE_REPO_REV": config_vars["MAX_REPO_REV"].str()}, '!define', "definitions", explicit_start=True, sort_mappings=True) require_dict = aYaml.YamlDumpDocWrap(require_dict, '!require', "requirements", explicit_start=True, sort_mappings=True) aYaml.writeAsYaml((define_dict, require_dict), wfd)
def __call__(self, *args, **kwargs) -> None: pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs) with config_vars.push_scope_context() as scope_context: if self.temp_config_vars: config_vars.update(self.temp_config_vars) if self.config_files is not None: reader = ConfigVarYamlReader(config_vars) for config_file in self.config_files: reader.read_yaml_file(config_file) with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = config_vars.resolve_str(text_to_resolve) if self.raise_if_unresolved: unresolved_re = re.compile(r"""\$\(.*?\)""") all_unresolved = unresolved_re.findall(resolved_text) if all_unresolved: unresolved_references = ", ".join(list( set(all_unresolved))) raise ValueError( f"unresolved config_vars in {self.unresolved_file}: {unresolved_references}" ) with utils.utf8_open_for_write(self.resolved_file, "w") as wfd: wfd.write(resolved_text)
def __call__(self, *args, **kwargs): """ Normally list of arguments are calculated by calling self.get_run_args, unless kwargs["run_args"] exists. """ PythonBatchCommandBase.__call__(self, *args, **kwargs) run_args = list() if "run_args" in kwargs: run_args.extend(kwargs["run_args"]) else: self.get_run_args(run_args) run_args = list(map(str, run_args)) self.doing = f"""calling subprocess '{" ".join(run_args)}'""" if self.detach: pid = os.spawnlp(os.P_NOWAIT, *run_args) # in https://docs.python.org/3.6/library/subprocess.html#replacing-the-os-spawn-family # the recommended way to replace os.spawnlp(os.P_NOWAIT,.. is by using subprocess.Popen, # but it does not work properly #pid = subprocess.Popen(run_args).pid else: if self.script: self.shell = True assert len(run_args) == 1 elif self.shell and len(run_args) == 1: if sys.platform == 'darwin': # MacOS needs help with spaces in paths #run_args = shlex.split(run_args[0]) #run_args = [p.replace(" ", r"\ ") for p in run_args] #run_args = " ".join(run_args) run_args = run_args[0] elif sys.platform == 'win32': run_args = run_args[0] out_stream = None need_to_close_out_file = False if self.out_file: if isinstance(self.out_file, (str, os.PathLike, bytes)): out_stream = utils.utf8_open_for_write(self.out_file, "w") need_to_close_out_file = True elif hasattr(self.out_file, "write"): # out_file is already an open file out_stream = self.out_file elif self.capture_stdout: # this will capture stdout in completed_process.stdout instead of writing directly to stdout # so objects overriding handle_completed_process will have access to stdout out_stream = subprocess.PIPE in_stream = None err_stream = subprocess.PIPE completed_process = subprocess.run(run_args, check=False, stdin=in_stream, stdout=out_stream, stderr=err_stream, shell=self.shell, bufsize=0) if need_to_close_out_file: out_stream.close() if completed_process.stderr: self.stderr = utils.unicodify(completed_process.stderr) if self.ignore_all_errors: # in case of ignore_all_errors redirect stderr to stdout so we know there was an error # but it will not be interpreted as an error by whoever is running instl log.info(self.stderr) else: if self.stderr_means_err: log.error(self.stderr) if completed_process.returncode == 0: completed_process.returncode = 123 else: log.info(self.stderr) else: pass if self.ignore_all_errors: completed_process.returncode = 0 completed_process.check_returncode() self.handle_completed_process(completed_process)
def create_config_files(self, curl_config_file_path, num_config_files): file_name_list = list() if self.get_num_urls_to_download() > 0: connect_time_out = str(config_vars.setdefault("CURL_CONNECT_TIMEOUT", "16")) max_time = str(config_vars.setdefault("CURL_MAX_TIME", "180")) retries = str(config_vars.setdefault("CURL_RETRIES", "2")) retry_delay = str(config_vars.setdefault("CURL_RETRY_DELAY", "8")) sync_urls_cookie = str(config_vars.get("COOKIE_FOR_SYNC_URLS", "")) actual_num_config_files = int(max(0, min(len(self.urls_to_download), num_config_files))) if self.urls_to_download_last: actual_num_config_files += 1 num_digits = len(str(actual_num_config_files)) file_name_list = ["-".join((os.fspath(curl_config_file_path), str(file_i).zfill(num_digits))) for file_i in range(actual_num_config_files)] # open the files make sure they have r/w permissions and are utf-8 wfd_list = list() for file_name in file_name_list: wfd = utils.utf8_open_for_write(file_name, "w") wfd_list.append(wfd) # write the header in each file for wfd in wfd_list: basename = os.path.basename(wfd.name) if sync_urls_cookie: cookie_text = f"cookie = {sync_urls_cookie}\n" else: cookie_text = "" curl_write_out_str = CUrlHelper.curl_write_out_str file_header_text = f""" insecure raw fail silent show-error compressed create-dirs connect-timeout = {connect_time_out} max-time = {max_time} retry = {retries} retry-delay = {retry_delay} {cookie_text} write-out = "Progress: ... of ...; {basename}: {curl_write_out_str} """ wfd.write(file_header_text) last_file = None if self.urls_to_download_last: last_file = wfd_list.pop() def url_sorter(l, r): """ smaller files should be downloaded first so the progress bar gets moving early. """ return l[2] - r[2] # non Info.xml files are sorted by size wfd_cycler = itertools.cycle(wfd_list) url_num = 0 sorted_by_size = sorted(self.urls_to_download, key=functools.cmp_to_key(url_sorter)) for url, path, size in sorted_by_size: fixed_path = self.fix_path(path) wfd = next(wfd_cycler) wfd.write(f'''url = "{url}"\noutput = "{fixed_path}"\n\n''') url_num += 1 for wfd in wfd_list: wfd.close() for url, path, size in self.urls_to_download_last: fixed_path = self.fix_path(path) last_file.write(f'''url = "{url}"\noutput = "{fixed_path}"\n\n''') url_num += 1 # insert None which means "wait" before the config file that downloads urls_to_download_last. # but only if there were actually download files other than urls_to_download_last. # it might happen that there are only urls_to_download_last - so no need to "wait". if last_file and len(wfd_list) > 0: file_name_list.insert(-1, None) return file_name_list
elif format_char == 'P' or format_char == 'p': path_to_return = item.name if item.isdir() and 'D' in ls_format: path_to_return += '/' if 'E' in ls_format: if item.issym(): path_to_return += '@' elif item.isfifo(): path_to_return += '|' the_parts[format_char] = path_to_return return the_parts if __name__ == "__main__": path_list = ( '/Users/shai/Desktop/wlc.app', '/p4client/dev_main/ProAudio/Products/Release/Plugins/CODEX.bundle/Contents/sample.tar.PAX_FORMAT.wtar.aa' ) ls_format = "WMIRLUGSTCpE" # 'MIRLUGSTCPE' for out_format in ('text', 'dicts', 'json'): for a_path in path_list: listing = disk_item_listing(a_path, ls_format=ls_format, output_format=out_format) with utils.utf8_open_for_write("ls." + out_format, "w") as wfd: print(listing, file=wfd) print(os.path.realpath(wfd.name))