def run_commands_from_file(initial_vars, options): """ execute a list of instl commands as give in a config file currently limited only to commands of mode "do_something", e.g. commands implemented by InstMisc. """ config_vars.setdefault("__START_DYNAMIC_PROGRESS__", "0") config_vars.setdefault("__TOTAL_DYNAMIC_PROGRESS__", "0") runner = CommandListRunner(initial_vars, options) parallel_run = "__RUN_COMMAND_LIST_IN_PARALLEL__" in config_vars runner.run(parallel=parallel_run)
def init_sync_vars(self): super().init_sync_vars() config_vars.setdefault("REPO_REV", "HEAD") bookkeeping_relative_path = utils.relative_url( config_vars["SYNC_BASE_URL"].str(), config_vars["BOOKKEEPING_DIR_URL"].str()) config_vars["REL_BOOKKEEPING_PATH"] = bookkeeping_relative_path rel_sources = utils.relative_url(config_vars["SYNC_BASE_URL"].str(), config_vars["SYNC_BASE_URL"].str()) config_vars["REL_SRC_PATH"] = rel_sources
def __init__(self, config_var_name, master=None, value=None, debug_var=False): TkBase.__init__(self, master, value, config_var_name) self.debug_var = debug_var self.convert_type_func = convert_type_func self.config_var_name = config_var_name self.__internal_update = False config_vars.setdefault(self.config_var_name, value) # create a ConfigVar is one does not exists # call set_callback_when_value_is_set because config_vars.setdefault will not assign the callback is the confiVar already exists config_vars[self.config_var_name].set_callback_when_value_is_set(self._config_var_set_value_callback) self._our_trace_write_callback = None self.trace("w", self._internal_trace_write_callback) if self.debug_var: print(f"TkConfigVar.__init__({self.config_var_name})")
def resolve_defined_paths(self): self.path_searcher.add_search_paths( list(config_vars.setdefault("SEARCH_PATHS", []))) for path_var_to_resolve in list(config_vars.get( "PATHS_TO_RESOLVE", [])): if path_var_to_resolve in config_vars: resolved_path = self.path_searcher.find_file( str(config_vars[path_var_to_resolve]), return_original_if_not_found=True) config_vars[path_var_to_resolve] = resolved_path
def init_specific_doc_readers(self): IndexYamlReaderBase.init_specific_doc_readers(self) self.specific_doc_readers.pop("__no_tag__", None) self.specific_doc_readers.pop("__unknown_tag__", None) self.specific_doc_readers["!define"] = self.read_defines # !define_const is deprecated and read as non-const self.specific_doc_readers["!define_const"] = self.read_defines acceptables = list( config_vars.setdefault("ACCEPTABLE_YAML_DOC_TAGS", [])) if "__INSTL_COMPILED__" in config_vars: if config_vars["__INSTL_COMPILED__"].str() == "True": acceptables.append("define_Compiled") else: acceptables.append("define_Uncompiled") for acceptibul in acceptables: if acceptibul.startswith("define_if_not_exist"): self.specific_doc_readers[ "!" + acceptibul] = self.read_defines_if_not_exist elif acceptibul.startswith("define"): self.specific_doc_readers["!" + acceptibul] = self.read_defines
def set_sync_locations_for_active_items(self): # get_sync_folders_and_sources_for_active_iids returns: [(iid, direct_sync_indicator, source, source_tag, install_folder),...] # direct_sync_indicator will be None unless the items has "direct_sync" section in index.yaml # source is the relative path as it appears in index.yaml # adjusted source is the source prefixed with $(SOURCE_PREFIX) -- it needed # source_tag is one of '!dir', '!dir_cont', '!file' # install_folder is where the sources should be copied to OR, in case of direct syn where they should be synced to # install_folder will be None for those items that require only sync not copy (such as Icons) # # for each file item in the source this function will set the full path where to download the file: item.download_path # and the top folder common to all items in a single source: item.download_root sync_and_source = self.items_table.get_sync_folders_and_sources_for_active_iids( ) items_to_update = list() local_repo_sync_dir = os.fspath(config_vars["LOCAL_REPO_SYNC_DIR"]) config_vars.setdefault("ALL_SYNC_DIRS", local_repo_sync_dir) for iid, direct_sync_indicator, source, source_tag, install_folder in sync_and_source: direct_sync = self.get_direct_sync_status_from_indicator( direct_sync_indicator) resolved_source_parts = source.split("/") if install_folder: resolved_install_folder = config_vars.resolve_str( install_folder) else: resolved_install_folder = install_folder if source_tag in ('!dir', '!dir_cont'): if direct_sync: # for direct-sync source, if one of the sources is Info.xml and it exists on disk AND source & file # have the same checksum, then no sync is needed at all. All the above is not relevant in repair mode. need_to_sync = True if not self.update_mode: info_xml_item = self.info_map_table.get_file_item( "/".join((source, "Info.xml"))) if info_xml_item: info_xml_of_target = config_vars.resolve_str( "/".join( (resolved_install_folder, resolved_source_parts[-1], "Info.xml"))) need_to_sync = not utils.check_file_checksum( info_xml_of_target, info_xml_item.checksum) if need_to_sync: config_vars["ALL_SYNC_DIRS"].append( resolved_install_folder) item_paths = self.info_map_table.get_recursive_paths_in_dir( dir_path=source, what="any") self.progress( f"mark for download {len(item_paths)} files of {iid}/{source}" ) if source_tag == '!dir': source_parent = "/".join( resolved_source_parts[:-1]) for item in item_paths: items_to_update.append({ "_id": item['_id'], "download_path": config_vars.resolve_str("/".join( (resolved_install_folder, item['path'][len(source_parent) + 1:]))), "download_root": config_vars.resolve_str("/".join( (resolved_install_folder, resolved_source_parts[-1]))) }) else: # !dir_cont source_parent = source for item in item_paths: items_to_update.append({ "_id": item['_id'], "download_path": config_vars.resolve_str("/".join( (resolved_install_folder, item['path'][len(source_parent) + 1:]))), "download_root": resolved_install_folder }) else: num_ignored_files = self.info_map_table.ignore_file_paths_of_dir( dir_path=source) if num_ignored_files < 1: num_ignored_files = "" # sqlite curs.rowcount does not always returns the number of effected rows self.progress( f"avoid download {num_ignored_files} files of {iid}, Info.xml has not changed" ) else: item_paths = self.info_map_table.get_recursive_paths_in_dir( dir_path=source) self.progress( f"mark for download {len(item_paths)} files of {iid}/{source}" ) for item in item_paths: items_to_update.append({ "_id": item['_id'], "download_path": config_vars.resolve_str("/".join( (local_repo_sync_dir, item['path']))), "download_root": None }) elif source_tag == '!file': # if the file was wtarred and split it would have multiple items items_for_file = self.info_map_table.get_required_paths_for_file( source) self.progress( f"mark for download {len(items_for_file)} files of {iid}/{source}" ) if direct_sync: config_vars["ALL_SYNC_DIRS"].append( resolved_install_folder) for item in items_for_file: items_to_update.append({ "_id": item['_id'], "download_path": config_vars.resolve_str("/".join( (resolved_install_folder, item['leaf']))), "download_root": config_vars.resolve_str(item.download_path) }) else: for item in items_for_file: items_to_update.append( { "_id": item['_id'], "download_path": config_vars.resolve_str("/".join( (local_repo_sync_dir, item['path']))), "download_root": None } ) # no need to set item.download_root here - it will not be used self.info_map_table.update_downloads(items_to_update)
def create_config_files(self, curl_config_file_path, num_config_files): file_name_list = list() if self.get_num_urls_to_download() > 0: connect_time_out = str(config_vars.setdefault("CURL_CONNECT_TIMEOUT", "16")) max_time = str(config_vars.setdefault("CURL_MAX_TIME", "180")) retries = str(config_vars.setdefault("CURL_RETRIES", "2")) retry_delay = str(config_vars.setdefault("CURL_RETRY_DELAY", "8")) sync_urls_cookie = str(config_vars.get("COOKIE_FOR_SYNC_URLS", "")) actual_num_config_files = int(max(0, min(len(self.urls_to_download), num_config_files))) if self.urls_to_download_last: actual_num_config_files += 1 num_digits = len(str(actual_num_config_files)) file_name_list = ["-".join((os.fspath(curl_config_file_path), str(file_i).zfill(num_digits))) for file_i in range(actual_num_config_files)] # open the files make sure they have r/w permissions and are utf-8 wfd_list = list() for file_name in file_name_list: wfd = utils.utf8_open_for_write(file_name, "w") wfd_list.append(wfd) # write the header in each file for wfd in wfd_list: basename = os.path.basename(wfd.name) if sync_urls_cookie: cookie_text = f"cookie = {sync_urls_cookie}\n" else: cookie_text = "" curl_write_out_str = CUrlHelper.curl_write_out_str file_header_text = f""" insecure raw fail silent show-error compressed create-dirs connect-timeout = {connect_time_out} max-time = {max_time} retry = {retries} retry-delay = {retry_delay} {cookie_text} write-out = "Progress: ... of ...; {basename}: {curl_write_out_str} """ wfd.write(file_header_text) last_file = None if self.urls_to_download_last: last_file = wfd_list.pop() def url_sorter(l, r): """ smaller files should be downloaded first so the progress bar gets moving early. """ return l[2] - r[2] # non Info.xml files are sorted by size wfd_cycler = itertools.cycle(wfd_list) url_num = 0 sorted_by_size = sorted(self.urls_to_download, key=functools.cmp_to_key(url_sorter)) for url, path, size in sorted_by_size: fixed_path = self.fix_path(path) wfd = next(wfd_cycler) wfd.write(f'''url = "{url}"\noutput = "{fixed_path}"\n\n''') url_num += 1 for wfd in wfd_list: wfd.close() for url, path, size in self.urls_to_download_last: fixed_path = self.fix_path(path) last_file.write(f'''url = "{url}"\noutput = "{fixed_path}"\n\n''') url_num += 1 # insert None which means "wait" before the config file that downloads urls_to_download_last. # but only if there were actually download files other than urls_to_download_last. # it might happen that there are only urls_to_download_last - so no need to "wait". if last_file and len(wfd_list) > 0: file_name_list.insert(-1, None) return file_name_list