def resolve_defined_paths(self): self.path_searcher.add_search_paths(var_stack.ResolveVarToList("SEARCH_PATHS", default=[])) for path_var_to_resolve in var_stack.ResolveVarToList("PATHS_TO_RESOLVE", default=[]): if path_var_to_resolve in var_stack: resolved_path = self.path_searcher.find_file(var_stack.ResolveVarToStr(path_var_to_resolve), return_original_if_not_found=True) var_stack.set_var(path_var_to_resolve, "resolve_defined_paths").append(resolved_path)
def do_set(self, params): if params: params = shlex.split(params) identi, values = params[0], params[1:] var_stack.set_var(identi, "set interactively").extend(values) self.do_list(identi) return False
def do_copy(self, params): out_file = "stdout" if params: out_file = params var_stack.set_var("__MAIN_OUT_FILE__").append(out_file) var_stack.set_var("__MAIN_COMMAND__").append("copy") self.client_prog_inst.do_command() return False
def update_client_input_file_combo(self, *args): new_input_file = self.client_input_path_var.get() if os.path.isfile(new_input_file): new_input_file_dir, new_input_file_name = os.path.split(new_input_file) items_in_dir = os.listdir(new_input_file_dir) dir_items = [os.path.join(new_input_file_dir, item) for item in items_in_dir if os.path.isfile(os.path.join(new_input_file_dir, item))] self.client_input_combobox.configure(values=dir_items) var_stack.set_var("CLIENT_GUI_IN_FILE").append(self.client_input_path_var.get())
def calculate_all_uninstall_items(self): # if true all main install items will be uninstalled regardless if they are indeed installed # and regardless if some other item depends on them force_uninstall_of_main_items = "FORCE_UNINSTALL_OF_MAIN_ITEMS" in var_stack iid_candidates_for_uninstall = var_stack.ResolveVarToList("__MAIN_INSTALL_IIDS__") req_trans_items = self.items_table.get_all_require_translate_items() # create a count of how much require_by each item has how_many_require_by = defaultdict( lambda: 0 ) for rt in req_trans_items: how_many_require_by[rt.iid] += 1 if not force_uninstall_of_main_items: # some main uninstall items might be required by other items (that are not uninstalled), # and so should not be uninstalled for candi in iid_candidates_for_uninstall: for req_trans in req_trans_items: if req_trans.status == 0: if req_trans.require_by == candi: req_trans.status += 1 how_many_require_by[req_trans.iid] -= 1 items_required_by_no_one = [iid for iid, count in how_many_require_by.items() if count == 0] should_be_uninstalled = list(set(iid_candidates_for_uninstall) & set(items_required_by_no_one)) # zero status and count for next stage how_many_require_by = defaultdict( lambda: 0 ) for rt in req_trans_items: how_many_require_by[rt.iid] +=1 rt.status = 0 else: should_be_uninstalled = iid_candidates_for_uninstall # now calculate dependencies for main items that should be uninstalled candi_que = deque(should_be_uninstalled) while len(candi_que) > 0: candi = candi_que.popleft() for req_trans in req_trans_items: if req_trans.status == 0: if req_trans.require_by == candi: req_trans.status += 1 how_many_require_by[req_trans.iid] -= 1 if how_many_require_by[req_trans.iid] == 0 and req_trans.iid != candi: candi_que.append(req_trans.iid) # items who's count is 0 should be uninstalled all_uninstall_items = [iid for iid, count in how_many_require_by.items() if count == 0] if force_uninstall_of_main_items: all_uninstall_items = list(set(all_uninstall_items+iid_candidates_for_uninstall)) var_stack.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(sorted(all_uninstall_items)) iids_that_should_not_be_uninstalled = list(set(iid_candidates_for_uninstall)-set(all_uninstall_items)) var_stack.set_var("__ORPHAN_INSTALL_TARGETS__").extend(iids_that_should_not_be_uninstalled) self.items_table.change_status_of_iids_to_another_status(0, -1, all_uninstall_items) self.sort_all_items_by_target_folder()
def read_defines_if_not_exist(self, a_node, *args, **kwargs): # if document is empty we get a scalar node if a_node.isMapping(): for identifier, contents in a_node.items(): if identifier in ("__include__", "__include_if_exist__"): raise ValueError("!define_if_not_exist doc cannot except __include__ and __include_if_exist__") if self.allow_reading_of_internal_vars or not internal_identifier_re.match(identifier): # do not read internal state identifiers if identifier not in var_stack: var_stack.set_var(identifier, str(contents.start_mark)).extend([item.value for item in contents])
def write_history(self): selected_tab = self.notebook.tab(self.notebook.select(), option='text') var_stack.set_var("SELECTED_TAB").append(selected_tab) the_list_yaml_ready= var_stack.repr_for_yaml(which_vars=var_stack.ResolveVarToList("__GUI_CONFIG_FILE_VARS__", default=[]), include_comments=False, resolve=False, ignore_unknown_vars=True) the_doc_yaml_ready = aYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True) with open(var_stack.ResolveVarToStr("INSTL_GUI_CONFIG_FILE_NAME"), "w", encoding='utf-8') as wfd: utils.make_open_file_read_write_for_all(wfd) aYaml.writeAsYaml(the_doc_yaml_ready, wfd)
def init_sync_vars(self): super().init_sync_vars() var_description = "InstlInstanceSync_svn.init_sync_vars" var_stack.set_value_if_var_does_not_exist("REPO_REV", "HEAD", description=var_description) bookkeeping_relative_path = utils.relative_url(var_stack.ResolveVarToStr("SYNC_BASE_URL"), var_stack.ResolveVarToStr("BOOKKEEPING_DIR_URL")) var_stack.set_var("REL_BOOKKEEPING_PATH", var_description).append(bookkeeping_relative_path) rel_sources = utils.relative_url(var_stack.ResolveVarToStr("SYNC_BASE_URL"), var_stack.ResolveVarToStr("SYNC_BASE_URL")) var_stack.set_var("REL_SRC_PATH", var_description).append(rel_sources)
def get_cookie_for_sync_urls(self, sync_base_url): """ get the cookie for sync_base_url and set config var COOKIE_FOR_SYNC_URLS to the text of the cookie """ net_loc = urllib.parse.urlparse(sync_base_url).netloc the_cookie = connectionBase.connection_factory().get_cookie(net_loc) if the_cookie: # the_cookie is actually a tuple ('Cookie', cookie_text) # we only need the second part var_stack.set_var("COOKIE_FOR_SYNC_URLS").append(the_cookie[1])
def provision_public_key_text(self): if "PUBLIC_KEY" not in var_stack: if "PUBLIC_KEY_FILE" in var_stack: public_key_file = var_stack.ResolveVarToStr("PUBLIC_KEY_FILE") with utils.open_for_read_file_or_url(public_key_file, connectionBase.translate_url, self.path_searcher) as file_fd: public_key_text = file_fd.read() var_stack.set_var("PUBLIC_KEY", "from " + public_key_file).append(public_key_text) else: raise ValueError("No public key, variables PUBLIC_KEY & PUBLIC_KEY_FILE are not defined") resolved_public_key = var_stack.ResolveVarToStr("PUBLIC_KEY") return resolved_public_key
def calculate_all_install_items(self): self.items_table.change_status_of_iids_to_another_status(0, 1, var_stack.ResolveVarToList("__MAIN_INSTALL_IIDS__")) all_items_from_table = self.items_table.get_recursive_dependencies(look_for_status=1) var_stack.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(sorted(all_items_from_table)) self.items_table.change_status_of_iids_to_another_status(0, 2, all_items_from_table) if "IGNORED_IIDS" in var_stack: ignored_iids = var_stack.ResolveVarToList("IGNORED_IIDS") self.items_table.change_status_of_iids(0, ignored_iids) all_items_from_table_except_ignored = list(set(all_items_from_table) - set(ignored_iids)) var_stack.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(sorted(all_items_from_table_except_ignored)) self.sort_all_items_by_target_folder()
def init_sync_vars(self): """ Prepares variables for sync. Will raise ValueError if a mandatory variable is not defined. """ prerequisite_vars = var_stack.ResolveVarToList("__SYNC_PREREQUISITE_VARIABLES__") self.instlObj.check_prerequisite_var_existence(prerequisite_vars) if "PUBLIC_KEY" not in var_stack: if "PUBLIC_KEY_FILE" in var_stack: public_key_file = var_stack.ResolveVarToStr("PUBLIC_KEY_FILE") with utils.open_for_read_file_or_url(public_key_file, connectionBase.translate_url, self.instlObj.path_searcher) as file_fd: public_key_text = file_fd.read() var_stack.set_var("PUBLIC_KEY", "from " + public_key_file).append(public_key_text) self.instlObj.calc_user_cache_dir_var() # this will set USER_CACHE_DIR if it was not explicitly defined
def do_resolve(self): var_stack.set_var("PRINT_COMMAND_TIME").append("no") # do not print time report config_file = var_stack.ResolveVarToStr("__CONFIG_FILE__") if not os.path.isfile(config_file): raise FileNotFoundError(config_file, var_stack.unresolved_var("__CONFIG_FILE__")) input_file = var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__") if not os.path.isfile(input_file): raise FileNotFoundError(input_file, var_stack.unresolved_var("__MAIN_INPUT_FILE__")) output_file = var_stack.ResolveVarToStr("__MAIN_OUT_FILE__") self.read_yaml_file(config_file) with utils.utf8_open(input_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = var_stack.ResolveStrToStr(text_to_resolve) with utils.utf8_open(output_file, "w") as wfd: wfd.write(resolved_text)
def create_instl_history_file(self): var_stack.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = aYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True) # write the history file, but only if variable LOCAL_REPO_BOOKKEEPING_DIR is defined # and the folder actually exists. instl_temp_history_file_path = var_stack.ResolveVarToStr("INSTL_HISTORY_TEMP_PATH") instl_temp_history_folder, instl_temp_history_file_name = os.path.split(instl_temp_history_file_path) if os.path.isdir(instl_temp_history_folder): with open(instl_temp_history_file_path, "w", encoding='utf-8') as wfd: utils.make_open_file_read_write_for_all(wfd) aYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def read_defines(self, a_node, *args, **kwargs): # if document is empty we get a scalar node if a_node.isMapping(): for identifier, contents in a_node.items(): if identifier == '__include__': self.read_include_node(contents, *args, **kwargs) elif identifier == "__include_if_exist__": kwargs.update({'ignore_if_not_exist': True}) self.read_include_node(contents, *args, **kwargs) elif identifier == "__environment__": for item in contents: var_stack.read_environment(item.value) elif self.allow_reading_of_internal_vars or not internal_identifier_re.match( identifier): # do not read internal state identifiers var_stack.set_var(identifier, str(contents.start_mark)).extend([item.value for item in contents])
def calculate_default_doit_item_set(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_DOIT_ITEMS" not in var_stack: raise ValueError("'MAIN_DOIT_ITEMS' was not defined") active_oses = var_stack.ResolveVarToList("TARGET_OS_NAMES") self.items_table.begin_get_for_specific_oses(active_oses) for os_name in active_oses: InstallItem.begin_get_for_specific_os(os_name) self.installState.root_doit_items.extend(var_stack.ResolveVarToList("MAIN_DOIT_ITEMS")) self.installState.root_doit_items = list(filter(bool, self.installState.root_doit_items)) self.installState.calculate_full_doit_items_set(self) var_stack.set_var("__FULL_LIST_OF_DOIT_TARGETS__").extend(self.installState.full_doit_items) var_stack.set_var("__ORPHAN_DOIT_TARGETS__").extend(self.installState.orphan_doit_items)
def find_cmd_tool(self, tool_to_find_var_name): """ locate the path to a cmd.exe tool on windows, if found put the full path in variable :param tool_to_find_var_name: variable name of tool or full path to tool :return: the path to the tool """ tool_path = None if tool_to_find_var_name in var_stack: original_tool_value = var_stack.ResolveVarToStr(tool_to_find_var_name) # first try the variable, could be that the tool was already found if os.path.isfile(original_tool_value): tool_path = original_tool_value if tool_path is None: # next try to ask the system using the where command try: where_tool_path = subprocess.check_output("where " + original_tool_value).strip() where_tool_path = utils.unicodify(where_tool_path) if os.path.isfile(where_tool_path): tool_path = where_tool_path var_stack.set_var(tool_to_find_var_name, "find_cmd_tool").append(tool_path) except Exception: pass # never mind, we'll try on our own if tool_path is None: win_paths = utils.unique_list() # try to find the tool in the PATH variable if "PATH" in os.environ: # remove newline characters that might lurk in the path (see tech support case 143589) adjusted_path = re.sub('[\r\n]',"?",utils.unicodify(os.environ["PATH"])) win_paths.extend(adjusted_path.split(";")) else: print("PATH was not found in environment variables") # also add some known location in case user's PATH variable was altered if "SystemRoot" in os.environ: system_root = utils.unicodify(os.environ["SystemRoot"]) know_locations = (os.path.join(system_root, "System32"), os.path.join(system_root, "SysWOW64")) win_paths.extend(know_locations) for win_path in win_paths: tool_path = os.path.join(win_path, original_tool_value) if os.path.isfile(tool_path): var_stack.set_var(tool_to_find_var_name, "find_cmd_tool ").append(tool_path) break else: # break was not called, tool was not found tool_path = None return tool_path
def calc_user_cache_dir_var(self, make_dir=True): if "USER_CACHE_DIR" not in var_stack: os_family_name = var_stack.ResolveVarToStr("__CURRENT_OS__") if os_family_name == "Mac": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) elif os_family_name == "Win": user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)") elif os_family_name == "Linux": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) else: raise RuntimeError("Unknown operating system "+os_family_name) var_description = "from InstlInstanceBase.get_user_cache_dir" var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir) if make_dir: user_cache_dir_resolved = var_stack.ResolveVarToStr("USER_CACHE_DIR") os.makedirs(user_cache_dir_resolved, exist_ok=True)
def calculate_all_install_items(self): # marked ignored iids, all subsequent operations not act on these iids if "MAIN_IGNORED_TARGETS" in var_stack: ignored_iids = var_stack.ResolveVarToList("MAIN_IGNORED_TARGETS") self.items_table.set_ignore_iids(ignored_iids) # mark main install items main_iids = var_stack.ResolveVarToList("__MAIN_INSTALL_IIDS__") self.items_table.change_status_of_iids_to_another_status( self.items_table.install_status["none"], self.items_table.install_status["main"], main_iids) # find dependant of main install items main_iids_and_dependents = self.items_table.get_recursive_dependencies(look_for_status=self.items_table.install_status["main"]) # mark dependants of main items, but only if they are not already in main items self.items_table.change_status_of_iids_to_another_status( self.items_table.install_status["none"], self.items_table.install_status["depend"], main_iids_and_dependents) # mark update install items, but only those not already marked as main or depend update_iids = var_stack.ResolveVarToList("__MAIN_UPDATE_IIDS__") self.items_table.change_status_of_iids_to_another_status( self.items_table.install_status["none"], self.items_table.install_status["update"], update_iids) # find dependants of update install items update_iids_and_dependents = self.items_table.get_recursive_dependencies(look_for_status=self.items_table.install_status["update"]) # mark dependants of update items, but only if they are not already marked self.items_table.change_status_of_iids_to_another_status( self.items_table.install_status["none"], self.items_table.install_status["depend"], update_iids_and_dependents) all_items_to_install = self.items_table.get_iids_by_status( self.items_table.install_status["main"], self.items_table.install_status["depend"]) var_stack.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(sorted(all_items_to_install)) self.sort_all_items_by_target_folder(consider_direct_sync=True) self.calc_iid_to_name_and_version()
def calculate_full_doit_order(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_DOIT_ITEMS" not in var_stack: raise ValueError("'MAIN_DOIT_ITEMS' was not defined") for iid in var_stack.ResolveVarToList("MAIN_DOIT_ITEMS"): self.resolve_dependencies_for_iid(iid) all_iis_set = set(self.items_table.get_all_iids()) orphan_iids = list(set(self.full_doit_order)-all_iis_set) if orphan_iids: print("Don't know to do with these orphan items::", orphan_iids) var_stack.set_var("__ORPHAN_DOIT_TARGETS__").extend(sorted(orphan_iids)) for o_iid in orphan_iids: self.full_doit_order.remove(o_iid) # print("doit order:", self.full_doit_order) var_stack.set_var("__FULL_LIST_OF_DOIT_TARGETS__").extend(self.full_doit_order)
def write_batch_file(self): if "__MAIN_OUT_FILE__" not in var_stack and "__MAIN_INPUT_FILE__" in var_stack: var_stack.add_const_config_variable("__MAIN_OUT_FILE__", "from write_batch_file", "$(__MAIN_INPUT_FILE__)-$(__MAIN_COMMAND__).$(BATCH_EXT)") self.batch_accum.set_current_section('pre') self.batch_accum += self.platform_helper.get_install_instructions_prefix() self.batch_accum.set_current_section('post') var_stack.set_var("TOTAL_ITEMS_FOR_PROGRESS_REPORT").append( str(self.platform_helper.num_items_for_progress_report)) self.batch_accum += self.platform_helper.get_install_instructions_postfix() lines = self.batch_accum.finalize_list_of_lines() for line in lines: if type(line) != str: raise TypeError("Not a string", type(line), line) # replace unresolved var references to native OS var references, e.g. $(HOME) would be %HOME% on Windows and ${HOME} one Mac lines_after_var_replacement = [value_ref_re.sub(self.platform_helper.var_replacement_pattern, line) for line in lines] output_text = "\n".join(lines_after_var_replacement) out_file = var_stack.ResolveVarToStr("__MAIN_OUT_FILE__") out_file = os.path.abspath(out_file) d_path, f_name = os.path.split(out_file) os.makedirs(d_path, exist_ok=True) with utils.write_to_file_or_stdout(out_file) as fd: fd.write(output_text) fd.write('\n') if out_file != "stdout": self.out_file_realpath = os.path.realpath(out_file) # chmod to 0777 so that file created under sudo, can be re-written under regular user. # However regular user cannot chmod for file created under sudo, hence the try/except try: os.chmod(self.out_file_realpath, 0o777) except Exception: pass else: self.out_file_realpath = "stdout" msg = " ".join( (self.out_file_realpath, str(self.platform_helper.num_items_for_progress_report), "progress items")) print(msg)
def calculate_main_install_items(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_INSTALL_TARGETS" not in var_stack: raise ValueError("'MAIN_INSTALL_TARGETS' was not defined") # legacy, to be removed when InstallItem is no longer in use active_oses = var_stack.ResolveVarToList("TARGET_OS_NAMES") for os_name in active_oses: InstallItem.begin_get_for_specific_os(os_name) main_install_targets = var_stack.ResolveVarToList("MAIN_INSTALL_TARGETS") main_iids, main_guids = utils.separate_guids_from_iids(main_install_targets) iids_from_main_guids, orphaned_main_guids = self.items_table.iids_from_guids(main_guids) main_iids.extend(iids_from_main_guids) main_iids = self.resolve_special_build_in_iids(main_iids) main_iids, orphaned_main_iids = self.items_table.iids_from_iids(main_iids) var_stack.set_var("__MAIN_INSTALL_IIDS__").extend(sorted(main_iids)) var_stack.set_var("__ORPHAN_INSTALL_TARGETS__").extend(sorted(orphaned_main_guids+orphaned_main_iids))
def create_remove_instructions(self): have_info_path = var_stack.ResolveVarToStr("HAVE_INFO_MAP_PATH") if not os.path.isfile(have_info_path): have_info_path = var_stack.ResolveVarToStr("SITE_HAVE_INFO_MAP_PATH") self.read_info_map_from_file(have_info_path) self.batch_accum.set_current_section("remove") self.batch_accum += self.platform_helper.progress("Starting remove") sorted_target_folder_list = sorted( self.all_items_by_target_folder, key=lambda fold: var_stack.ResolveStrToStr(fold), reverse=True ) # print(sorted_target_folder_list) self.accumulate_unique_actions("pre_remove", var_stack.ResolveVarToList("__FULL_LIST_OF_INSTALL_TARGETS__")) for folder_name in sorted_target_folder_list: self.batch_accum += self.platform_helper.progress("Removing from {0}".format(folder_name)) var_stack.set_var("__TARGET_DIR__").append(os.path.normpath(folder_name)) items_in_folder = self.all_items_by_target_folder[folder_name] self.batch_accum += self.platform_helper.new_line() self.accumulate_unique_actions("pre_remove_from_folder", items_in_folder) for IID in items_in_folder: with self.install_definitions_index[IID].push_var_stack_scope() as installi: self.batch_accum += self.platform_helper.progress("Removing {installi.name}...".format(**locals())) for source_var in var_stack.get_configVar_obj("iid_source_var_list"): source = var_stack.ResolveVarToList(source_var) self.batch_accum += self.platform_helper.progress("Removing {source[0]}...".format(**locals())) self.batch_accum += var_stack.ResolveVarToList("iid_action_list_pre_remove_item", default=[]) self.create_remove_instructions_for_source(folder_name, source) self.batch_accum += var_stack.ResolveVarToList("iid_action_list_post_remove_item", default=[]) self.batch_accum += self.platform_helper.progress("Remove {source[0]} done".format(**locals())) self.batch_accum += self.platform_helper.progress("Remove {installi.name} done".format(**locals())) self.accumulate_unique_actions("post_remove_from_folder", items_in_folder) self.batch_accum += self.platform_helper.progress("Remove from {0} done".format(folder_name)) self.accumulate_unique_actions("post_remove", var_stack.ResolveVarToList("__FULL_LIST_OF_INSTALL_TARGETS__"))
def init_default_doit_vars(self): if "SYNC_BASE_URL" in var_stack: resolved_sync_base_url = var_stack.ResolveVarToStr("SYNC_BASE_URL") url_main_item = utils.main_url_item(resolved_sync_base_url) var_stack.set_var("SYNC_BASE_URL_MAIN_ITEM", description="from init_default_doit_vars").append(url_main_item) if var_stack.ResolveVarToStr("TARGET_OS") != var_stack.ResolveVarToStr("__CURRENT_OS__"): target_os_names = var_stack.ResolveVarToList(var_stack.ResolveStrToStr("$(TARGET_OS)_ALL_OS_NAMES")) var_stack.set_var("TARGET_OS_NAMES").extend(target_os_names) second_name = var_stack.ResolveVarToStr("TARGET_OS") if len(target_os_names) > 1: second_name = target_os_names[1] var_stack.set_var("TARGET_OS_SECOND_NAME").append(second_name) self.platform_helper.no_progress_messages = "NO_PROGRESS_MESSAGES" in var_stack
def init_default_client_vars(self): if "SYNC_BASE_URL" in var_stack: #raise ValueError("'SYNC_BASE_URL' was not defined") resolved_sync_base_url = var_stack.ResolveVarToStr("SYNC_BASE_URL") url_main_item = utils.main_url_item(resolved_sync_base_url) var_stack.set_var("SYNC_BASE_URL_MAIN_ITEM", description="from init_default_client_vars").append(url_main_item) # TARGET_OS_NAMES defaults to __CURRENT_OS_NAMES__, which is not what we want if syncing to # an OS which is not the current if var_stack.ResolveVarToStr("TARGET_OS") != var_stack.ResolveVarToStr("__CURRENT_OS__"): target_os_names = var_stack.ResolveVarToList(var_stack.ResolveStrToStr("$(TARGET_OS)_ALL_OS_NAMES")) var_stack.set_var("TARGET_OS_NAMES").extend(target_os_names) second_name = var_stack.ResolveVarToStr("TARGET_OS") if len(target_os_names) > 1: second_name = target_os_names[1] var_stack.set_var("TARGET_OS_SECOND_NAME").append(second_name) self.read_repo_type_defaults() if var_stack.ResolveVarToStr("REPO_TYPE", default="URL") == "P4": if "P4_SYNC_DIR" not in var_stack: if "SYNC_BASE_URL" in var_stack: p4_sync_dir = utils.P4GetPathFromDepotPath(var_stack.ResolveVarToStr("SYNC_BASE_URL")) var_stack.set_var("P4_SYNC_DIR", "from SYNC_BASE_URL").append(p4_sync_dir)
def read_defines(self, a_node, *args, **kwargs): # if document is empty we get a scalar node if a_node.isMapping(): for identifier, contents in a_node.items(): if identifier.startswith("__if"): # __if__, __ifdef__, __ifndef__ self.read_conditional_node(identifier, contents, *args, **kwargs) elif identifier == '__include__': self.read_include_node(contents, *args, **kwargs) elif identifier == "__include_if_exist__": kwargs.update({'ignore_if_not_exist': True}) self.read_include_node(contents, *args, **kwargs) elif identifier == "__environment__": contents_list = [c.value for c in contents] var_stack.read_environment(contents_list) elif self.allow_reading_of_internal_vars or not internal_identifier_re.match( identifier): # do not read internal state identifiers new_var = var_stack.set_var(identifier, str(contents.start_mark)) if contents.tag == '!non_freeze': new_var.non_freeze = True new_var.extend([item.value for item in contents])
def calculate_main_install_items(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_INSTALL_TARGETS" not in var_stack: raise ValueError("'MAIN_INSTALL_TARGETS' was not defined") self.main_install_targets.extend(var_stack.ResolveVarToList("MAIN_INSTALL_TARGETS")) main_iids, main_guids = utils.separate_guids_from_iids(self.main_install_targets) iids_from_main_guids, orphaned_main_guids = self.items_table.iids_from_guids(main_guids) main_iids.extend(iids_from_main_guids) main_iids, update_iids = self.resolve_special_build_in_iids(main_iids) main_iids, orphaned_main_iids = self.items_table.iids_from_iids(main_iids) update_iids, orphaned_update_iids = self.items_table.iids_from_iids(update_iids) var_stack.set_var("__MAIN_INSTALL_IIDS__").extend(sorted(main_iids)) var_stack.set_var("__MAIN_UPDATE_IIDS__").extend(sorted(update_iids)) var_stack.set_var("__ORPHAN_INSTALL_TARGETS__").extend(sorted(orphaned_main_guids+orphaned_main_iids+orphaned_update_iids))
def do_help(self): import pyinstl.helpHelper var_stack.set_var("PRINT_COMMAND_TIME").append("no") # do not print time report help_folder_path = os.path.join(var_stack.ResolveVarToStr("__INSTL_DATA_FOLDER__"), "help") pyinstl.helpHelper.do_help(var_stack.ResolveVarToStr("__HELP_SUBJECT__"), help_folder_path, self)
def do_version(self): var_stack.set_var("PRINT_COMMAND_TIME").append("no") # do not print time report print(self.get_version_str())
if __name__ == "__main__": import re from configVar import var_stack repo_rev_re = re.compile("^(REPO_REV:\s+\d+)", re.MULTILINE) index_yaml_re = re.compile("^(NUMBER_OF_BITS:\s+.+)", re.MULTILINE) domain = "betanlb.waves.com" reqs = WavesCentralRequester(domain) login_data = reqs.request("Login", {"Password": "******", "Username": "******"}) # GetInstlUrlComboCollection combo_data = reqs.request("GetInstlUrlComboCollection", {'repositoryRevision': '-1', "repositoryVersions":[9]}) #print("combo_data:\n", combo_data) InstlUrlAccessParameters = combo_data['oResult'][0]['InstlUrlAccessParameters'] repo_rev_yaml_url = "https://" + InstlUrlAccessParameters['ResourceRootUrl'] + "/admin/V10_repo_rev.yaml" index_yaml_url = "https://" + InstlUrlAccessParameters['ResourceRootUrl'] + "/V10/795/instl/index.yaml" netloc_and_cookies = translate_cookies_from_GetInstlUrlComboCollection(InstlUrlAccessParameters) var_stack.set_var("COOKIE_JAR").append(netloc_and_cookies) the_text = utils.read_file_or_url(repo_rev_yaml_url) print(the_text.name, repo_rev_re.search(the_text).groups(1)[0]) the_text = utils.read_file_or_url(index_yaml_url) print("index.yaml:", index_yaml_re.search(the_text).groups(1)[0]) local_index = "/Volumes/BonaFide/installers/betainstl/V10/svn/instl/index.yaml" the_text = utils.read_file_or_url(local_index) print("local index.yaml:", index_yaml_re.search(the_text).groups(1)[0])