def resolve_defined_paths(self): self.path_searcher.add_search_paths(var_stack.resolve_to_list("$(SEARCH_PATHS)")) for path_var_to_resolve in var_stack.resolve_to_list("$(PATHS_TO_RESOLVE)"): if path_var_to_resolve in var_stack: resolved_path = self.path_searcher.find_file(var_stack.resolve_var(path_var_to_resolve), return_original_if_not_found=True) var_stack.set_var(path_var_to_resolve, "resolve_defined_paths").append(resolved_path)
def init_default_vars(self, initial_vars): if initial_vars: var_description = "from initial_vars" for var, value in initial_vars.iteritems(): if isinstance(value, basestring): var_list.add_const_config_variable(var, var_description, value) else: var_list.add_const_config_variable(var, var_description, *value) var_description = "from InstlInstanceBase.init_default_vars" # read defaults/main.yaml main_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "main.yaml") self.read_yaml_file(main_defaults_file_path) # read defaults/compile-info.yaml compile_info_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "compile-info.yaml") if os.path.isfile(compile_info_file_path): self.read_yaml_file(compile_info_file_path) if "__COMPILATION_TIME__" not in var_list: if var_list.resolve("$(__INSTL_COMPILED__)") == "True": var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "unknown compilation time") else: var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "(not compiled)") # read class specific defaults/*.yaml class_specific_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", type(self).__name__+".yaml") if os.path.isfile(class_specific_defaults_file_path): self.read_yaml_file(class_specific_defaults_file_path) log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=False) var_list.set_var("LOG_FILE", var_description).append(log_file) debug_log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=True) var_list.set_var("LOG_FILE_DEBUG", var_description).extend( (debug_log_file, logging.getLevelName(pyinstl.log_utils.debug_logging_level), pyinstl.log_utils.debug_logging_started) )
def do_set(self, params): if params: params = shlex.split(params) identi, values = params[0], params[1:] var_list.set_var(identi, "set interactively").extend(values) self.do_list(identi) return False
def read_include_node(self, i_node): if i_node.isScalar(): resolved_file_name = var_list.resolve(i_node.value) self.read_yaml_file(resolved_file_name) elif i_node.isSequence(): for sub_i_node in i_node: self.read_include_node(sub_i_node) elif i_node.isMapping(): if "url" in i_node and "checksum" in i_node and "sig" in i_node: resolved_file_url = var_list.resolve(i_node["url"].value) resolved_checksum = var_list.resolve(i_node["checksum"].value) resolved_signature = var_list.resolve(i_node["sig"].value) cached_files_dir = self.get_default_sync_dir(continue_dir="cache", mkdir=True) cached_file = os.path.join(cached_files_dir, resolved_checksum) if "PUBLIC_KEY" not in var_list: if "PUBLIC_KEY_FILE" in var_list: public_key_file = var_list.resolve("$(PUBLIC_KEY_FILE)") with open_for_read_file_or_url(public_key_file, self.path_searcher) as file_fd: public_key_text = file_fd.read() var_list.set_var("PUBLIC_KEY", "from "+public_key_file).append(public_key_text) public_key_text = var_list.resolve("$(PUBLIC_KEY)") download_from_file_or_url(resolved_file_url, cached_file, cache=True, public_key=public_key_text, textual_sig=resolved_signature, expected_checksum=resolved_checksum) self.read_yaml_file(cached_file) if "copy" in i_node: self.batch_accum.set_current_section('post-sync') self.batch_accum += self.platform_helper.copy_tool.copy_file_to_file(cached_file, var_list.resolve(i_node["copy"].value), link_dest=True)
def write_batch_file(self): self.batch_accum.set_current_section('pre') self.batch_accum += self.platform_helper.get_install_instructions_prefix() self.batch_accum.set_current_section('post') var_stack.set_var("TOTAL_ITEMS_FOR_PROGRESS_REPORT").append( str(self.platform_helper.num_items_for_progress_report)) self.batch_accum += self.platform_helper.get_install_instructions_postfix() lines = self.batch_accum.finalize_list_of_lines() lines_after_var_replacement = '\n'.join( [value_ref_re.sub(self.platform_helper.var_replacement_pattern, line) for line in lines]) from utils import write_to_file_or_stdout out_file = var_stack.resolve("$(__MAIN_OUT_FILE__)", raise_on_fail=True) with write_to_file_or_stdout(out_file) as fd: fd.write(lines_after_var_replacement) fd.write('\n') if out_file != "stdout": self.out_file_realpath = os.path.realpath(out_file) # chmod to 0777 so that file created under sudo, can be re-written under regular user. # However regular user cannot chmod for file created under sudo, hence the try/except try: os.chmod(self.out_file_realpath, 0777) except: pass else: self.out_file_realpath = "stdout" msg = " ".join( (self.out_file_realpath, str(self.platform_helper.num_items_for_progress_report), "progress items")) print(msg) logging.info(msg)
def do_up_repo_rev(self): self.batch_accum.set_current_section('admin') just_with_number = int(var_list.resolve("$(__JUST_WITH_NUMBER__)")) if just_with_number > 0: var_list.set_var("REPO_REV").append("$(__JUST_WITH_NUMBER__)") if just_with_number == 0: self.batch_accum += " ".join( ["aws", "s3", "cp", "\"$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)\"", "\"s3://$(S3_BUCKET_NAME)/admin/$(REPO_REV_FILE_NAME)\"", "--acl", "public-read", "--content-type", 'text/plain' ] ) self.batch_accum += self.platform_helper.progress("Uploaded '$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)' to 's3://$(S3_BUCKET_NAME)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)'") self.batch_accum += " ".join( ["aws", "s3", "cp", "\"$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)\"", "\"s3://$(S3_BUCKET_NAME)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)\"", "--acl", "public-read", "--content-type", 'text/plain' ] ) self.batch_accum += self.platform_helper.progress("Uploaded '$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(REPO_REV)' to 's3://$(S3_BUCKET_NAME)/admin/$(REPO_REV_FILE_NAME)'") self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def do_copy(self, params): out_file = "stdout" if params: out_file = params var_list.set_var("__MAIN_OUT_FILE__").append(out_file) var_list.set_var("__MAIN_COMMAND__").append("copy") self.client_prog_inst.do_command() return False
def write_history(self): selected_tab = self.notebook.tab(self.notebook.select(), option='text') var_stack.set_var("SELECTED_TAB").append(selected_tab) the_list_yaml_ready= var_stack.repr_for_yaml(which_vars=var_stack.resolve_var_to_list("__GUI_CONFIG_FILE_VARS__"), include_comments=False, resolve=False, ignore_unknown_vars=True) the_doc_yaml_ready = augmentedYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True) with open(var_stack.resolve_var("INSTL_GUI_CONFIG_FILE_NAME"), "w") as wfd: make_open_file_read_write_for_all(wfd) augmentedYaml.writeAsYaml(the_doc_yaml_ready, wfd)
def read_defines(self, a_node): # if document is empty we get a scalar node if a_node.isMapping(): for identifier, contents in a_node: logging.debug("%s: %s", identifier, str(contents)) if self.allow_reading_of_internal_vars or not self.internal_identifier_re.match(identifier): # do not read internal state identifiers var_stack.set_var(identifier, str(contents.start_mark)).extend([item.value for item in contents]) elif identifier == '__include__': self.read_include_node(contents)
def update_client_input_file_combo(self, *args): prev_input_file = var_stack.resolve_var("CLIENT_GUI_IN_FILE") new_input_file = self.client_input_path_var.get() if os.path.isfile(new_input_file): new_input_file_dir, new_input_file_name = os.path.split(new_input_file) items_in_dir = os.listdir(new_input_file_dir) dir_items = [os.path.join(new_input_file_dir, item) for item in items_in_dir if os.path.isfile(os.path.join(new_input_file_dir, item))] self.client_input_combobox.configure(values = dir_items) var_stack.set_var("CLIENT_GUI_IN_FILE").append(self.client_input_path_var.get())
def init_sync_vars(self): var_description = "from InstlInstanceBase.init_sync_vars" self.check_prerequisite_var_existence(("SYNC_BASE_URL", "SVN_CLIENT_PATH")) var_list.set_value_if_var_does_not_exist("REPO_REV", "HEAD", description=var_description) bookkeeping_relative_path = relative_url(var_list.resolve("$(SYNC_BASE_URL)"), var_list.resolve("$(BOOKKEEPING_DIR_URL)")) var_list.set_var("REL_BOOKKIPING_PATH", var_description).append(bookkeeping_relative_path) rel_sources = relative_url(var_list.resolve("$(SYNC_BASE_URL)"), var_list.resolve("$(SYNC_BASE_URL)/$(SOURCE_PREFIX)")) var_list.set_var("REL_SRC_PATH", var_description).append(rel_sources)
def init_sync_vars(self): super(InstlInstanceSync_svn, self).init_sync_vars() var_description = "InstlInstanceSync_svn.init_sync_vars" var_stack.set_value_if_var_does_not_exist("REPO_REV", "HEAD", description=var_description) bookkeeping_relative_path = relative_url(var_stack.resolve("$(SYNC_BASE_URL)"), var_stack.resolve("$(BOOKKEEPING_DIR_URL)")) var_stack.set_var("REL_BOOKKIPING_PATH", var_description).append(bookkeeping_relative_path) rel_sources = relative_url(var_stack.resolve("$(SYNC_BASE_URL)"), var_stack.resolve("$(SYNC_BASE_URL)")) var_stack.set_var("REL_SRC_PATH", var_description).append(rel_sources)
def do_up2s3(self): root_links_folder = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)") # call svn info and to find out the last repo revision base_repo_rev = int(var_list.resolve("$(BASE_REPO_REV)")) last_repo_rev = self.get_last_repo_rev() revision_list = range(base_repo_rev, last_repo_rev+1) dirs_to_upload = list() no_need_upload_nums = list() yes_need_upload_nums = list() for dir_as_int in revision_list: dir_name = str(dir_as_int) if not os.path.isdir(var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name)): print("revision dir", dir_name, "is missing, run create-links to create this folder") else: create_links_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(CREATE_LINKS_STAMP_FILE_NAME)") if not os.path.isfile(create_links_done_stamp_file): print("revision dir", dir_name, "does not have create-links stamp file:", create_links_done_stamp_file) else: up_2_s3_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(UP_2_S3_STAMP_FILE_NAME)") if os.path.isfile(up_2_s3_done_stamp_file): no_need_upload_nums.append(dir_name) else: yes_need_upload_nums.append(dir_name) dirs_to_upload.append(dir_name) if yes_need_upload_nums: if no_need_upload_nums: no_need_upload__str = ", ".join(no_need_upload_nums) msg = " ".join( ("Revisions already uploaded to S3:", no_need_upload__str) ) print(msg) yes_need_upload_str = ", ".join(yes_need_upload_nums) msg = " ".join( ("Revisions will be uploaded to S3:", yes_need_upload_str) ) print(msg) else: msg = " ".join( ("All revisions already uploaded to S3:", str(base_repo_rev), "...", str(last_repo_rev)) ) print(msg) self.batch_accum.set_current_section('upload') for dir_name in dirs_to_upload: accum = BatchAccumulator() # sub-accumulator serves as a template for each version accum.set_current_section('upload') save_dir_var = "REV_"+dir_name+"_SAVE_DIR" self.batch_accum += self.platform_helper.save_dir(save_dir_var) var_list.set_var("__CURR_REPO_REV__").append(dir_name) self.do_upload_to_s3_aws_for_revision(accum) revision_lines = accum.finalize_list_of_lines() # will resolve with current __CURR_REPO_REV__ self.batch_accum += revision_lines self.batch_accum += self.platform_helper.restore_dir(save_dir_var) self.batch_accum += self.platform_helper.new_line() self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def provision_public_key_text(self): if "PUBLIC_KEY" not in var_stack: if "PUBLIC_KEY_FILE" in var_stack: public_key_file = var_stack.resolve("$(PUBLIC_KEY_FILE)") with open_for_read_file_or_url(public_key_file, self.path_searcher) as file_fd: public_key_text = file_fd.read() var_stack.set_var("PUBLIC_KEY", "from " + public_key_file).append(public_key_text) else: raise ValueError("No public key, variables PUBLIC_KEY & PUBLIC_KEY_FILE are not defined") resolved_public_key = var_stack.resolve("$(PUBLIC_KEY)") return resolved_public_key
def create_instl_history_file(self): var_stack.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = augmentedYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True) # write the history file, but only if variable LOCAL_REPO_BOOKKEEPING_DIR is defined # and the folder actually exists. if os.path.isdir(var_stack.resolve("$(LOCAL_REPO_BOOKKEEPING_DIR)", default="")): with open(var_stack.resolve("$(INSTL_HISTORY_TEMP_PATH)"), "w") as wfd: make_open_file_read_write_for_all(wfd) augmentedYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def calculate_default_install_item_set(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_INSTALL_TARGETS" not in var_list: raise ValueError("'MAIN_INSTALL_TARGETS' was not defined") for os_name in var_list.resolve_to_list("$(TARGET_OS_NAMES)"): InstallItem.begin_get_for_specific_os(os_name) self.installState.root_install_items.extend(var_list.resolve_to_list("$(MAIN_INSTALL_TARGETS)")) self.installState.root_install_items = filter(bool, self.installState.root_install_items) self.installState.calculate_full_install_items_set(self) var_list.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(self.installState.full_install_items) var_list.set_var("__ORPHAN_INSTALL_TARGETS__").extend(self.installState.orphan_install_items)
def do_create_links(self): self.check_prerequisite_var_existence(("REPO_NAME", "SVN_REPO_URL", "ROOT_LINKS_FOLDER_REPO")) self.batch_accum.set_current_section('links') info_as_io = None # call svn info and to find out the last repo revision last_repo_rev = self.get_last_repo_rev() self.batch_accum += self.platform_helper.mkdir("$(ROOT_LINKS_FOLDER_REPO)/Base") accum = BatchAccumulator() # sub-accumulator serves as a template for each version accum.set_current_section('links') self.create_links_for_revision(accum) no_need_link_nums = list() yes_need_link_nums = list() base_rev = int(var_list.resolve("$(BASE_REPO_REV)")) if base_rev > last_repo_rev: raise ValueError("base_rev "+str(base_rev)+" > last_repo_rev "+str(last_repo_rev)) for revision in range(base_rev, last_repo_rev+1): if self.needToCreatelinksForRevision(revision): yes_need_link_nums.append(str(revision)) save_dir_var = "REV_"+str(revision)+"_SAVE_DIR" self.batch_accum += self.platform_helper.save_dir(save_dir_var) var_list.set_var("__CURR_REPO_REV__").append(str(revision)) revision_lines = accum.finalize_list_of_lines() # will resolve with current __CURR_REPO_REV__ self.batch_accum += revision_lines self.batch_accum += self.platform_helper.restore_dir(save_dir_var) self.batch_accum += self.platform_helper.new_line() else: no_need_link_nums.append(str(revision)) if yes_need_link_nums: if no_need_link_nums: no_need_links_str = ", ".join(no_need_link_nums) msg = " ".join( ("Links already created for revisions:", no_need_links_str) ) print(msg) yes_need_links_str = ", ".join(yes_need_link_nums) msg = " ".join( ("Need to create links for revisions:", yes_need_links_str) ) print(msg) else: msg = " ".join( ("Links already created for all revisions:", str(base_rev), "...", str(last_repo_rev)) ) print(msg) self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def init_sync_vars(self): """ Prepares variables for sync. Will raise ValueError if a mandatory variable is not defined. """ var_description = "from InstlInstanceBase.init_sync_vars" self.instlObj.check_prerequisite_var_existence(("SYNC_BASE_URL", "DOWNLOAD_TOOL_PATH", "REPO_REV")) if "PUBLIC_KEY" not in var_list: if "PUBLIC_KEY_FILE" in var_list: public_key_file = var_list.resolve("$(PUBLIC_KEY_FILE)") with open_for_read_file_or_url(public_key_file, self.instlObj.path_searcher) as file_fd: public_key_text = file_fd.read() var_list.set_var("PUBLIC_KEY", "from "+public_key_file).append(public_key_text) self.local_sync_dir = var_list.resolve("$(LOCAL_REPO_SYNC_DIR)")
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ const_attrib_to_var = { "input_file": ("__MAIN_INPUT_FILE__", None), "output_file": ("__MAIN_OUT_FILE__", None), "props_file": ("__PROPS_FILE__", None), "config_file": ("__CONFIG_FILE__", None), "sh1_checksum": ("__SHA1_CHECKSUM__", None), "rsa_signature": ("__RSA_SIGNATURE__", None), "start_progress": ("__START_DYNAMIC_PROGRESS__", "0"), "total_progress": ("__TOTAL_DYNAMIC_PROGRESS__", "0"), "just_with_number": ("__JUST_WITH_NUMBER__", "0"), "limit_command_to": ("__LIMIT_COMMAND_TO__", None), } for attrib, var in const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_list.add_const_config_variable(var[0], "from command line options", *attrib_value) elif var[1]: # there's a default var_list.add_const_config_variable(var[0], "from default", var[1]) non_const_attrib_to_var = { "filter_in": "__FILTER_IN_VERSION__", "target_repo_rev": "TARGET_REPO_REV", "base_repo_rev": "BASE_REPO_REV", } for attrib, var in non_const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_list.set_var(var, "from command line options").append(attrib_value[0]) if cmd_line_options_obj.command: var_list.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", cmd_line_options_obj.subject) else: var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "") if cmd_line_options_obj.state_file: var_list.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options", cmd_line_options_obj.state_file) if cmd_line_options_obj.filter_out: var_list.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options", *cmd_line_options_obj.filter_out) if cmd_line_options_obj.run: var_list.add_const_config_variable("__RUN_BATCH_FILE__", "from command line options", "yes")
def calc_user_cache_dir_var(self, make_dir=True): if "USER_CACHE_DIR" not in var_stack: os_family_name = var_stack.resolve("$(__CURRENT_OS__)") if os_family_name == "Mac": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) elif os_family_name == "Win": user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)") elif os_family_name == "Linux": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) var_description = "from InstlInstanceBase.get_user_cache_dir" var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir) if make_dir: user_cache_dir_resolved = var_stack.resolve("$(USER_CACHE_DIR)", raise_on_fail=True) safe_makedirs(user_cache_dir_resolved)
def get_last_repo_rev(self): retVal = 0 revision_line_re = re.compile("^Revision:\s+(?P<last_rev>\d+)$") repo_url = var_list.resolve("$(SVN_REPO_URL)") if os.path.isdir(repo_url): svn_info_command = [var_list.resolve("$(SVN_CLIENT_PATH)"), "info", "."] else: svn_info_command = [var_list.resolve("$(SVN_CLIENT_PATH)"), "info", repo_url] with ChangeDirIfExists(repo_url): proc = subprocess.Popen(svn_info_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) my_stdout, my_stderr = proc.communicate() if proc.returncode != 0 or my_stderr != "": raise ValueError("Could not read info from svn: "+my_stderr) info_as_io = StringIO.StringIO(my_stdout) for line in info_as_io: match = revision_line_re.match(line) if match: retVal = int(match.group("last_rev")) break if retVal <= 0: raise ValueError("Could not find last repo rev for "+repo_url) var_list.set_var("__LAST_REPO_REV__").append(str(retVal)) return retVal
def create_remove_instructions(self): self.have_map = svnTree.SVNTree() have_info_path = var_stack.resolve("$(HAVE_INFO_MAP_PATH)") if not os.path.isfile(have_info_path): have_info_path = var_stack.resolve("$(SITE_HAVE_INFO_MAP_PATH)") self.have_map.read_info_map_from_file(have_info_path, a_format="text") self.batch_accum.set_current_section('remove') self.batch_accum += self.platform_helper.progress("Starting remove") sorted_target_folder_list = sorted(self.installState.install_items_by_target_folder, key=lambda fold: var_stack.resolve(fold), reverse=True) # print(sorted_target_folder_list) self.accumulate_unique_actions('pre_remove', self.installState.full_install_items) for folder_name in sorted_target_folder_list: var_stack.set_var("__TARGET_DIR__").append(os.path.normpath(folder_name)) items_in_folder = self.installState.install_items_by_target_folder[folder_name] logging.info("folder %s", var_stack.resolve(folder_name)) self.batch_accum += self.platform_helper.new_line() self.accumulate_unique_actions('pre_remove_from_folder', items_in_folder) for IID in items_in_folder: with self.install_definitions_index[IID] as installi: for source_var in var_stack.get_configVar_obj("iid_source_var_list"): source = var_stack.resolve_var_to_list(source_var) self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_pre_remove_item") self.create_remove_instructions_for_source(folder_name, source) self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_post_remove_item") self.batch_accum += self.platform_helper.progress("Remove {installi.name}".format(**locals())) self.accumulate_unique_actions('post_remove_from_folder', items_in_folder) self.accumulate_unique_actions('post_remove', self.installState.full_install_items)
def init_default_client_vars(self): if "SYNC_BASE_URL" in var_stack: #raise ValueError("'SYNC_BASE_URL' was not defined") resolved_sync_base_url = var_stack.resolve("$(SYNC_BASE_URL)") url_main_item = main_url_item(resolved_sync_base_url) var_stack.set_var("SYNC_BASE_URL_MAIN_ITEM", description="from init_default_client_vars").append(url_main_item) # TARGET_OS_NAMES defaults to __CURRENT_OS_NAMES__, which is not what we want if syncing to # an OS which is not the current if var_stack.resolve("$(TARGET_OS)") != var_stack.resolve("$(__CURRENT_OS__)"): target_os_names = var_stack.resolve_var_to_list(var_stack.resolve("$(TARGET_OS)_ALL_OS_NAMES")) var_stack.set_var("TARGET_OS_NAMES").extend(target_os_names) second_name = var_stack.resolve("$(TARGET_OS)") if len(target_os_names) > 1: second_name = target_os_names[1] var_stack.set_var("TARGET_OS_SECOND_NAME").append(second_name) self.read_repo_type_defaults() if var_stack.resolve("$(REPO_TYPE)") == "P4": if "P4_SYNC_DIR" not in var_stack: if "SYNC_BASE_URL" in var_stack: p4_sync_dir = P4GetPathFromDepotPath(var_stack.resolve("$(SYNC_BASE_URL)")) var_stack.set_var("P4_SYNC_DIR", "from SYNC_BASE_URL").append(p4_sync_dir)
def set_default_variables(self): if "__CONFIG_FILE__" in var_list: config_file_resolved = self.path_searcher.find_file(var_list.resolve("$(__CONFIG_FILE__)"), return_original_if_not_found=True) var_list.set_var("__CONFIG_FILE_PATH__").append(config_file_resolved) self.read_yaml_file(config_file_resolved) self.resolve_defined_paths() if "PUBLIC_KEY" not in var_list: if "PUBLIC_KEY_FILE" in var_list: try: public_key_file = var_list.resove("$(PUBLIC_KEY_FILE)") public_key_text = open(public_key_file, "rb").read() var_list.set_var("PUBLIC_KEY", "from "+public_key_file).append(public_key_text) except: pass # lo nora if "PRIVATE_KEY" not in var_list: if "PRIVATE_KEY_FILE" in var_list: try: private_key_file = var_list.resove("$(PRIVATE_KEY_FILE)") private_key_text = open(private_key_file, "rb").read() var_list.set_var("PUBLIC_KEY", "from "+private_key_file).append(private_key_text) except: pass # lo nora
def create_instl_history_file(self): var_list.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = augmentedYaml.YamlDumpDocWrap(var_list, '!define', "Definitions", explicit_start=True, sort_mappings=True) with open(var_list.resolve("$(INSTL_HISTORY_TEMP_PATH)"), "w") as wfd: augmentedYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ const_attrib_to_var = { "input_file": ("__MAIN_INPUT_FILE__", None), "output_file": ("__MAIN_OUT_FILE__", None), "props_file": ("__PROPS_FILE__", None), "config_file": ("__CONFIG_FILE__", None), "sh1_checksum": ("__SHA1_CHECKSUM__", None), "rsa_signature": ("__RSA_SIGNATURE__", None), "start_progress": ("__START_DYNAMIC_PROGRESS__", "0"), "total_progress": ("__TOTAL_DYNAMIC_PROGRESS__", "0"), "just_with_number": ("__JUST_WITH_NUMBER__", "0"), "limit_command_to": ("__LIMIT_COMMAND_TO__", None), "shortcut_path": ("__SHORTCUT_PATH__", None), "target_path": ("__SHORTCUT_TARGET_PATH__", None), "credentials": ("__CREDENTIALS__", None), "base_url": ("__BASE_URL__", None), "file_sizes_file": ("__FILE_SIZES_FILE__", None) } for attrib, var in const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_stack.add_const_config_variable(var[0], "from command line options", *attrib_value) elif var[1] is not None: # there's a default var_stack.add_const_config_variable(var[0], "from default", var[1]) non_const_attrib_to_var = { "filter_in": "__FILTER_IN_VERSION__", "target_repo_rev": "TARGET_REPO_REV", "base_repo_rev": "BASE_REPO_REV", } for attrib, var in non_const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_stack.set_var(var, "from command line options").append(attrib_value[0]) if cmd_line_options_obj.command: var_stack.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options", cmd_line_options_obj.subject) else: var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "") if cmd_line_options_obj.state_file: var_stack.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options", cmd_line_options_obj.state_file) if cmd_line_options_obj.filter_out: var_stack.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options", *cmd_line_options_obj.filter_out) if cmd_line_options_obj.run: var_stack.add_const_config_variable("__RUN_BATCH__", "from command line options", "yes") if cmd_line_options_obj.no_wtar_artifacts: var_stack.add_const_config_variable("__NO_WTAR_ARTIFACTS__", "from command line options", "yes") # if credentials were given... credentials = None if "__CREDENTIALS__" in var_stack: credentials = var_stack.resolve_var("__CREDENTIALS__", default=None) connection_factory(credentials)
def set_default_variables(self): client_command_list = var_stack.resolve_var_to_list("__CLIENT_GUI_CMD_LIST__") var_stack.set_var("CLIENT_GUI_CMD").append(client_command_list[0]) admin_command_list = var_stack.resolve_var_to_list("__ADMIN_GUI_CMD_LIST__") var_stack.set_var("ADMIN_GUI_CMD").append(admin_command_list[0]) self.commands_with_run_option_list = var_stack.resolve_var_to_list("__COMMANDS_WITH_RUN_OPTION__")
def update_admin_state(self, *args): var_stack.set_var("ADMIN_GUI_CMD").append(self.admin_command_name_var.get()) current_config_path = var_stack.resolve_var("ADMIN_GUI_CONFIG_FILE", default="") new_config_path = self.admin_config_path_var.get() if current_config_path != new_config_path: self.admin_config_file_dirty = True var_stack.set_var("ADMIN_GUI_CONFIG_FILE").append(new_config_path) if self.admin_config_file_dirty: self.read_admin_config_file() _, input_file_base_name = os.path.split(var_stack.unresolved_var("ADMIN_GUI_CONFIG_FILE")) var_stack.set_var("ADMIN_GUI_CONFIG_FILE_NAME").append(input_file_base_name) var_stack.set_var("ADMIN_GUI_OUT_BATCH_FILE").append(self.admin_output_path_var.get()) var_stack.set_var("ADMIN_GUI_RUN_BATCH").append(bool_int_to_str(self.run_admin_batch_file_var.get())) var_stack.set_var("ADMIN_GUI_LIMIT").append(self.admin_limit_var.get()) self.admin_stage_index_var.set(var_stack.resolve("$(__STAGING_INDEX_FILE__)")) self.admin_svn_repo_var.set(var_stack.resolve("$(SVN_REPO_URL), REPO_REV: $(REPO_REV)")) sync_url = var_stack.resolve("$(SYNC_BASE_URL)") self.admin_sync_url_var.set(sync_url) if self.admin_command_name_var.get() in self.commands_that_accept_limit_option: self.limit_path_entry_widget.configure(state='normal') else: self.limit_path_entry_widget.configure(state='disabled') if self.admin_command_name_var.get() in self.commands_with_run_option_list: self.admin_run_batch_file_checkbox.configure(state='normal') else: self.admin_run_batch_file_checkbox.configure(state='disabled') command_line = " ".join(self.create_admin_command_line()) self.admin_command_line_var.set(var_stack.resolve(command_line))
def update_client_state(self, *args): var_stack.set_var("CLIENT_GUI_CMD").append(self.client_command_name_var.get()) self.update_client_input_file_combo() _, input_file_base_name = os.path.split(var_stack.unresolved_var("CLIENT_GUI_IN_FILE")) var_stack.set_var("CLIENT_GUI_IN_FILE_NAME").append(input_file_base_name) var_stack.set_var("CLIENT_GUI_OUT_FILE").append(self.client_output_path_var.get()) var_stack.set_var("CLIENT_GUI_RUN_BATCH").append(bool_int_to_str(self.run_client_batch_file_var.get())) var_stack.set_var("CLIENT_GUI_CREDENTIALS").append(self.client_credentials_var.get()) var_stack.set_var("CLIENT_GUI_CREDENTIALS_ON").append(self.client_credentials_on_var.get()) if self.client_command_name_var.get() in self.commands_with_run_option_list: self.client_run_batch_file_checkbox.configure(state='normal') else: self.client_run_batch_file_checkbox.configure(state='disabled') command_line = " ".join(self.create_client_command_line()) self.client_command_line_var.set(var_stack.resolve(command_line))
def do_create_repo_rev_file(self): if "REPO_REV_FILE_VARS" not in var_list: raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = var_list.resolve_to_list("$(REPO_REV_FILE_VARS)") var_list.set_var("REPO_REV").append("$(TARGET_REPO_REV)") # override the repo rev from the config file dangerous_intersection = set(repo_rev_vars).intersection(set(("AWS_ACCESS_KEY_ID","AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"))) if dangerous_intersection: print("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError("file REPO_REV_FILE_VARS "+str(dangerous_intersection)+" and so is forbidden to upload") info_map_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/info_map.txt") info_map_sigs = self.create_sig_for_file(info_map_file) var_list.set_var("INFO_MAP_SIG").append(info_map_sigs["SHA-512_rsa_sig"]) var_list.set_var("INFO_MAP_CHECKSUM").append(info_map_sigs["sha1_checksum"]) var_list.set_var("INDEX_URL").append("$(SYNC_BASE_URL)/$(REPO_REV)/instl/index.yaml") index_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/index.yaml") index_file_sigs = self.create_sig_for_file(index_file) var_list.set_var("INDEX_SIG").append(index_file_sigs["SHA-512_rsa_sig"]) var_list.set_var("INDEX_CHECKSUM").append(index_file_sigs["sha1_checksum"]) for var in repo_rev_vars: if var not in var_list: raise ValueError(var+" is missing cannot write repo rev file") repo_rev_yaml = YamlDumpDocWrap(var_list.repr_for_yaml(repo_rev_vars, include_comments=False), '!define', "", explicit_start=True, sort_mappings=True) safe_makedirs(var_list.resolve("$(ROOT_LINKS_FOLDER)/admin")) local_file = var_list.resolve("$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(TARGET_REPO_REV)") with open(local_file, "w") as wfd: writeAsYaml(repo_rev_yaml, out_stream=wfd, indentor=None, sort=True) print("created", local_file)