def do_check_sig(self): file_to_check = self.path_searcher.find_file(var_list.resolve("$(__MAIN_INPUT_FILE__)"), return_original_if_not_found=True) file_contents = open(file_to_check, "rb").read() sha1_checksum = var_list.resolve("$(__SHA1_CHECKSUM__)") if sha1_checksum: checksumOk = check_buffer_checksum(file_contents, sha1_checksum) if checksumOk: print("Checksum OK") else: print("Bad checksum, should be:", get_buffer_checksum(file_contents)) rsa_signature = var_list.resolve("$(__RSA_SIGNATURE__)") if rsa_signature: if "PUBLIC_KEY_FILE" in var_list: public_key_file = self.path_searcher.find_file(var_list.resolve("$(PUBLIC_KEY_FILE)"), return_original_if_not_found=True) public_key_text = open(public_key_file, "rb").read() signatureOk = check_buffer_signature(file_contents, rsa_signature, public_key_text) if signatureOk: print("Signature OK") else: print("Bad Signature")
def download_url_to_file(self, src_url, trg_file): """ Create command to download a single file. src_url is expected to be already escaped (spaces as %20...) """ connect_time_out = var_stack.resolve("$(CURL_CONNECT_TIMEOUT)", raise_on_fail=True) max_time = var_stack.resolve("$(CURL_MAX_TIME)", raise_on_fail=True) retries = var_stack.resolve("$(CURL_RETRIES)", raise_on_fail=True) download_command_parts = list() download_command_parts.append("$(DOWNLOAD_TOOL_PATH)") download_command_parts.append("--insecure") download_command_parts.append("--fail") download_command_parts.append("--raw") download_command_parts.append("--silent") download_command_parts.append("--show-error") download_command_parts.append("--compressed") download_command_parts.append("--connect-timeout") download_command_parts.append(connect_time_out) download_command_parts.append("--max-time") download_command_parts.append(max_time) download_command_parts.append("--retry") download_command_parts.append(retries) download_command_parts.append("write-out") download_command_parts.append(DownloadToolBase.curl_write_out_str) download_command_parts.append("-o") download_command_parts.append(quoteme_double(trg_file)) download_command_parts.append(quoteme_double(src_url)) return " ".join(download_command_parts)
def do_stage2svn(self): self.batch_accum.set_current_section('admin') if var_list.defined("__LIMIT_COMMAND_TO__"): print("limiting to ", "; ".join(var_list.resolve_to_list("$(__LIMIT_COMMAND_TO__)"))) else: print ("no limiting to specific folder") stage_folder = var_list.resolve(("$(STAGING_FOLDER)")) svn_folder = var_list.resolve(("$(SVN_CHECKOUT_FOLDER)")) self.batch_accum += self.platform_helper.unlock(stage_folder, recursive=True) self.batch_accum += self.platform_helper.progress("chflags -R nouchg "+stage_folder) self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(svn_folder) stage_folder_svn_folder_pairs = [] if var_list.defined("__LIMIT_COMMAND_TO__"): limit_list = var_list.resolve_to_list("$(__LIMIT_COMMAND_TO__)") for limit in limit_list: stage_folder_svn_folder_pairs.append( (os.path.join(stage_folder,limit) , os.path.join(svn_folder, limit) ) ) else: stage_folder_svn_folder_pairs.append( (stage_folder , svn_folder) ) for pair in stage_folder_svn_folder_pairs: comparer = filecmp.dircmp(pair[0], pair[1], ignore=[".svn", ".DS_Store", "Icon\015"]) self.stage2svn_for_folder(comparer) self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def do_create_repo_rev_file(self): if "REPO_REV_FILE_VARS" not in var_list: raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = var_list.resolve_to_list("$(REPO_REV_FILE_VARS)") var_list.set_var("REPO_REV").append("$(TARGET_REPO_REV)") # override the repo rev from the config file dangerous_intersection = set(repo_rev_vars).intersection(set(("AWS_ACCESS_KEY_ID","AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"))) if dangerous_intersection: print("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError("file REPO_REV_FILE_VARS "+str(dangerous_intersection)+" and so is forbidden to upload") info_map_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/info_map.txt") info_map_sigs = self.create_sig_for_file(info_map_file) var_list.set_var("INFO_MAP_SIG").append(info_map_sigs["SHA-512_rsa_sig"]) var_list.set_var("INFO_MAP_CHECKSUM").append(info_map_sigs["sha1_checksum"]) var_list.set_var("INDEX_URL").append("$(SYNC_BASE_URL)/$(REPO_REV)/instl/index.yaml") index_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/index.yaml") index_file_sigs = self.create_sig_for_file(index_file) var_list.set_var("INDEX_SIG").append(index_file_sigs["SHA-512_rsa_sig"]) var_list.set_var("INDEX_CHECKSUM").append(index_file_sigs["sha1_checksum"]) for var in repo_rev_vars: if var not in var_list: raise ValueError(var+" is missing cannot write repo rev file") repo_rev_yaml = YamlDumpDocWrap(var_list.repr_for_yaml(repo_rev_vars, include_comments=False), '!define', "", explicit_start=True, sort_mappings=True) safe_makedirs(var_list.resolve("$(ROOT_LINKS_FOLDER)/admin")) local_file = var_list.resolve("$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(TARGET_REPO_REV)") with open(local_file, "w") as wfd: writeAsYaml(repo_rev_yaml, out_stream=wfd, indentor=None, sort=True) print("created", local_file)
def create_sig_for_file(self, file_to_sig): retVal = None config_dir, _ = os.path.split(var_list.resolve("$(__CONFIG_FILE_PATH__)")) private_key_file = os.path.join(config_dir, var_list.resolve("$(REPO_NAME)")+".private_key") with open(private_key_file, "rb") as private_key_fd: retVal = create_file_signatures(file_to_sig, private_key_fd.read()) return retVal
def mark_required_items_for_source(self, source): """ source is a tuple (source_folder, tag), where tag is either !file or !dir """ remote_sub_item = self.work_info_map.get_item_at_path(source[0]) if remote_sub_item is None: # if item was not found it might have been wtared. So look for wtar parts and mark them. item_is_wtared = self.mark_wtar_items_for_source(source) if not item_is_wtared: raise ValueError(source[0], var_stack.resolve("does not exist in remote map, IID: $(iid_iid)")) else: if source[1] == "!file": if not remote_sub_item.isFile(): raise ValueError( source[0], "has type", source[1], var_stack.resolve("but is not a file, IID: $(iid_iid)") ) remote_sub_item.set_user_data_non_recursive(True) elif source[1] == "!files": if not remote_sub_item.isDir(): raise ValueError( source[0], "has type", source[1], var_stack.resolve("but is not a dir, IID: $(iid_iid)") ) remote_sub_item.set_user_data_files_recursive(True) elif source[1] == "!dir" or source[1] == "!dir_cont": # !dir and !dir_cont are only different when copying if not remote_sub_item.isDir(): raise ValueError( source[0], "has type", source[1], var_stack.resolve("but is not a dir, IID: $(iid_iid)") ) remote_sub_item.set_user_data_all_recursive(True)
def needToCreatelinksForRevision(self, revision): """ Need to create links if the create_links_done_stamp_file was not found. If the file was found there is still one situation where we would like to re-create the links: If the links are for a revision that was not the base revision and now this revision is the base revision. In which case the whole revision will need to be uploaded. """ current_base_repo_rev = int(var_list.resolve("$(BASE_REPO_REV)")) retVal = True revision_links_folder = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+str(revision)) create_links_done_stamp_file = var_list.resolve(revision_links_folder+"/$(CREATE_LINKS_STAMP_FILE_NAME)") if os.path.isfile(create_links_done_stamp_file): if revision == current_base_repo_rev: # revision is the new base_repo_rev try: previous_base_repo_rev = int(open(create_links_done_stamp_file, "r").read()) # try to read the previous if previous_base_repo_rev == current_base_repo_rev: retVal = False else: msg = " ".join( ("new base revision", str(current_base_repo_rev), "(was", str(previous_base_repo_rev),") need to refresh links") ) self.batch_accum += self.platform_helper.echo(msg); print(msg) # if we need to create links, remove the upload stems in order to force upload try: os.remove(var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+str(revision)+"/$(UP_2_S3_STAMP_FILE_NAME)")) except: pass except: pass # no previous base repo rev indication was found so return True to re-create the links else: retVal = False return retVal
def do_command(self): the_command = var_stack.resolve("$(__MAIN_COMMAND__)") fixed_command_name = the_command.replace('-', '_') # print("client_commands", fixed_command_name) self.installState = InstallInstructionsState() self.read_yaml_file(var_stack.resolve("$(__MAIN_INPUT_FILE__)")) self.init_default_client_vars() self.resolve_defined_paths() self.batch_accum.set_current_section('begin') self.batch_accum += self.platform_helper.setup_echo() self.platform_helper.init_download_tool() # after reading variable COPY_TOOL from yaml, we might need to re-init the copy tool. self.platform_helper.init_copy_tool() self.resolve_index_inheritance() self.add_default_items() self.calculate_default_install_item_set() self.platform_helper.num_items_for_progress_report = int(var_stack.resolve("$(LAST_PROGRESS)")) do_command_func = getattr(self, "do_" + fixed_command_name) do_command_func() self.create_instl_history_file() self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH__" in var_stack: self.run_batch_file()
def do_command(self): the_command = var_list.resolve("$(__MAIN_COMMAND__)") self.set_default_variables() self.platform_helper.num_items_for_progress_report = int(var_list.resolve("$(LAST_PROGRESS)")) self.platform_helper.init_copy_tool() fixed_command_name = the_command.replace('-', '_') do_command_func = getattr(self, "do_"+fixed_command_name) do_command_func()
def do_create_rsa_keys(self): public_key_file = var_list.resolve("$(PUBLIC_KEY_FILE)") private_key_file = var_list.resolve("$(PRIVATE_KEY_FILE)") pubkey, privkey = rsa.newkeys(4096, poolsize=8) with open(public_key_file, "wb") as wfd: wfd.write(pubkey.save_pkcs1(format='PEM')) print("public key created:", public_key_file) with open(private_key_file, "wb") as wfd: wfd.write(privkey.save_pkcs1(format='PEM')) print("private key created:", private_key_file)
def init_sync_vars(self): var_description = "from InstlInstanceBase.init_sync_vars" self.check_prerequisite_var_existence(("SYNC_BASE_URL", "SVN_CLIENT_PATH")) var_list.set_value_if_var_does_not_exist("REPO_REV", "HEAD", description=var_description) bookkeeping_relative_path = relative_url(var_list.resolve("$(SYNC_BASE_URL)"), var_list.resolve("$(BOOKKEEPING_DIR_URL)")) var_list.set_var("REL_BOOKKIPING_PATH", var_description).append(bookkeeping_relative_path) rel_sources = relative_url(var_list.resolve("$(SYNC_BASE_URL)"), var_list.resolve("$(SYNC_BASE_URL)/$(SOURCE_PREFIX)")) var_list.set_var("REL_SRC_PATH", var_description).append(rel_sources)
def do_win_shortcut(self): shortcut_path = var_stack.resolve("$(__SHORTCUT_PATH__)", raise_on_fail=True) target_path = var_stack.resolve("$(__SHORTCUT_TARGET_PATH__)", raise_on_fail=True) working_directory, target_name = os.path.split(target_path) from win32com.client import Dispatch shell = Dispatch("WScript.Shell") shortcut = shell.CreateShortCut(shortcut_path) shortcut.Targetpath = target_path shortcut.WorkingDirectory = working_directory shortcut.save()
def init_sync_vars(self): super(InstlInstanceSync_svn, self).init_sync_vars() var_description = "InstlInstanceSync_svn.init_sync_vars" var_stack.set_value_if_var_does_not_exist("REPO_REV", "HEAD", description=var_description) bookkeeping_relative_path = relative_url(var_stack.resolve("$(SYNC_BASE_URL)"), var_stack.resolve("$(BOOKKEEPING_DIR_URL)")) var_stack.set_var("REL_BOOKKIPING_PATH", var_description).append(bookkeeping_relative_path) rel_sources = relative_url(var_stack.resolve("$(SYNC_BASE_URL)"), var_stack.resolve("$(SYNC_BASE_URL)")) var_stack.set_var("REL_SRC_PATH", var_description).append(rel_sources)
def do_wtar(self): self.batch_accum.set_current_section('admin') regex_list = var_list.resolve_to_list("$(WTAR_REGEX)") compiled_regex_list = list() for regex in regex_list: compiled_regex_list.append(re.compile(regex)) self.batch_accum += self.platform_helper.split_func() max_file_size = int(var_list.resolve(("$(MAX_FILE_SIZE)"))) stage_folder = var_list.resolve(("$(STAGING_FOLDER)")) self.batch_accum += self.platform_helper.unlock(stage_folder, recursive=True) self.batch_accum += self.platform_helper.progress("chflags -R nouchg "+stage_folder) self.batch_accum += self.platform_helper.new_line() folders_to_check = [stage_folder] while len(folders_to_check) > 0: folder_to_check = folders_to_check.pop() dir_items = os.listdir(folder_to_check) items_to_tar = list() for dir_item in dir_items: dir_item_full_path = os.path.join(folder_to_check, dir_item) if not os.path.islink(dir_item_full_path): to_tar = self.should_wtar(dir_item_full_path, compiled_regex_list, max_file_size) if to_tar: items_to_tar.append(dir_item) else: if os.path.isdir(dir_item_full_path): folders_to_check.append(dir_item_full_path) if items_to_tar: self.batch_accum += self.platform_helper.cd(folder_to_check) for item_to_tar in items_to_tar: item_to_tar_full_path = os.path.join(folder_to_check, item_to_tar) if item_to_tar.endswith(".wtar"): for delete_file in dir_items: if fnmatch.fnmatch(delete_file, item_to_tar+'.??'): self.batch_accum += self.platform_helper.rmfile(delete_file) self.batch_accum += self.platform_helper.split(item_to_tar) else: for delete_file in dir_items: if fnmatch.fnmatch(delete_file, item_to_tar+'.wtar*'): self.batch_accum += self.platform_helper.rmfile(delete_file) self.prepare_permissions_for_wtar(item_to_tar_full_path) self.batch_accum += self.platform_helper.tar(item_to_tar) self.batch_accum += self.platform_helper.split(item_to_tar+".wtar") if os.path.isdir(item_to_tar_full_path): self.batch_accum += self.platform_helper.rmdir(item_to_tar, recursive=True) elif os.path.isfile(item_to_tar_full_path): self.batch_accum += self.platform_helper.rmfile(item_to_tar) self.batch_accum += self.platform_helper.progress(item_to_tar_full_path) self.batch_accum += self.platform_helper.new_line() self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def do_verify_index(self): self.read_yaml_file(var_list.resolve("$(__MAIN_INPUT_FILE__)")) info_map = svnTree.SVNTree() with open_for_read_file_or_url(var_list.resolve("$(INFO_MAP_FILE_URL)")) as rfd: info_map.read_from_text(rfd) # for each iid get full paths to it's sources iid_to_sources = defaultdict(list) InstallItem.begin_get_for_all_oses() for iid in sorted(self.install_definitions_index): with self.install_definitions_index[iid]: for source_var in var_list.get_configVar_obj("iid_source_var_list"): source = var_list.resolve_var_to_list(source_var) if source[2] in ("common", "Mac"): iid_to_sources[iid].append( ("/".join( ("Mac", source[0])), source[1])) if source[2] in ("common", "Win", "Win32", "Win64"): iid_to_sources[iid].append( ("/".join( ("Win", source[0])), source[1])) for iid in sorted(iid_to_sources): with self.install_definitions_index[iid]: iid_problem_messages = list() # check inherits for inheritee in var_list.resolve_var_to_list("iid_inherite"): if inheritee not in self.install_definitions_index: iid_problem_messages.append(" ".join( ("inherits from non existing", inheritee ) )) # check depends for dependee in var_list.resolve_var_to_list("iid_depend_list"): if dependee not in self.install_definitions_index: iid_problem_messages.append(" ".join( ("depends on non existing", dependee ) )) # check sources for source in iid_to_sources[iid]: map_item = info_map.get_item_at_path(source[0]) if map_item is None: iid_problem_messages.append(" ".join( ("source", quoteme_single(source[0]), "does not exist") )) else: if source[1] in ("!dir", "!dir_cont", "!files"): if map_item.isFile(): iid_problem_messages.append(" ".join( ("source", quoteme_single(source[0]), "is a file but type is", source[1]) )) else: file_list, dir_list = map_item.unsorted_sub_items() if source[1] == "!files" and len(file_list) == 0: iid_problem_messages.append(" ".join( ("source", quoteme_single(source[0]), "has no files but type is", source[1]) )) if source[1] in ("!dir", "!dir_cont") and len(file_list)+len(dir_list) == 0: iid_problem_messages.append(" ".join( ("source", quoteme_single(source[0]), "has no files or dirs but type is", source[1]) )) if source[1] == "!file" and not map_item.isFile(): iid_problem_messages.append(" ".join( ("source", quoteme_single(source[0]), "is a dir but type is", source[1]) )) if iid_problem_messages: print(iid+":") for problem_message in sorted(iid_problem_messages): print(" ", problem_message) self.find_cycles() print("index:", len(self.install_definitions_index), "iids") num_files = info_map.num_subs_in_tree(what="file") num_dirs = info_map.num_subs_in_tree(what="dir") print("info map:", num_files, "files in", num_dirs, "folders")
def do_make_sig(self): private_key = None if "PRIVATE_KEY_FILE" in var_list: private_key_file = self.path_searcher.find_file(var_list.resolve("$(PRIVATE_KEY_FILE)"), return_original_if_not_found=True) private_key = open(private_key_file, "rb").read() file_to_sign = self.path_searcher.find_file(var_list.resolve("$(__MAIN_INPUT_FILE__)"), return_original_if_not_found=True) file_sigs = create_file_signatures(file_to_sign, private_key_text=private_key) print("sha1:\n", file_sigs["sha1_checksum"]) print("SHA-512_rsa_sig:\n", file_sigs.get("SHA-512_rsa_sig", "no private key"))
def do_up2s3(self): root_links_folder = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)") # call svn info and to find out the last repo revision base_repo_rev = int(var_list.resolve("$(BASE_REPO_REV)")) last_repo_rev = self.get_last_repo_rev() revision_list = range(base_repo_rev, last_repo_rev+1) dirs_to_upload = list() no_need_upload_nums = list() yes_need_upload_nums = list() for dir_as_int in revision_list: dir_name = str(dir_as_int) if not os.path.isdir(var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name)): print("revision dir", dir_name, "is missing, run create-links to create this folder") else: create_links_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(CREATE_LINKS_STAMP_FILE_NAME)") if not os.path.isfile(create_links_done_stamp_file): print("revision dir", dir_name, "does not have create-links stamp file:", create_links_done_stamp_file) else: up_2_s3_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(UP_2_S3_STAMP_FILE_NAME)") if os.path.isfile(up_2_s3_done_stamp_file): no_need_upload_nums.append(dir_name) else: yes_need_upload_nums.append(dir_name) dirs_to_upload.append(dir_name) if yes_need_upload_nums: if no_need_upload_nums: no_need_upload__str = ", ".join(no_need_upload_nums) msg = " ".join( ("Revisions already uploaded to S3:", no_need_upload__str) ) print(msg) yes_need_upload_str = ", ".join(yes_need_upload_nums) msg = " ".join( ("Revisions will be uploaded to S3:", yes_need_upload_str) ) print(msg) else: msg = " ".join( ("All revisions already uploaded to S3:", str(base_repo_rev), "...", str(last_repo_rev)) ) print(msg) self.batch_accum.set_current_section('upload') for dir_name in dirs_to_upload: accum = BatchAccumulator() # sub-accumulator serves as a template for each version accum.set_current_section('upload') save_dir_var = "REV_"+dir_name+"_SAVE_DIR" self.batch_accum += self.platform_helper.save_dir(save_dir_var) var_list.set_var("__CURR_REPO_REV__").append(dir_name) self.do_upload_to_s3_aws_for_revision(accum) revision_lines = accum.finalize_list_of_lines() # will resolve with current __CURR_REPO_REV__ self.batch_accum += revision_lines self.batch_accum += self.platform_helper.restore_dir(save_dir_var) self.batch_accum += self.platform_helper.new_line() self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def provision_public_key_text(self): if "PUBLIC_KEY" not in var_stack: if "PUBLIC_KEY_FILE" in var_stack: public_key_file = var_stack.resolve("$(PUBLIC_KEY_FILE)") with open_for_read_file_or_url(public_key_file, self.path_searcher) as file_fd: public_key_text = file_fd.read() var_stack.set_var("PUBLIC_KEY", "from " + public_key_file).append(public_key_text) else: raise ValueError("No public key, variables PUBLIC_KEY & PUBLIC_KEY_FILE are not defined") resolved_public_key = var_stack.resolve("$(PUBLIC_KEY)") return resolved_public_key
def create_instl_history_file(self): var_stack.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = augmentedYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True) # write the history file, but only if variable LOCAL_REPO_BOOKKEEPING_DIR is defined # and the folder actually exists. if os.path.isdir(var_stack.resolve("$(LOCAL_REPO_BOOKKEEPING_DIR)", default="")): with open(var_stack.resolve("$(INSTL_HISTORY_TEMP_PATH)"), "w") as wfd: make_open_file_read_write_for_all(wfd) augmentedYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def do_svn2stage(self): self.batch_accum.set_current_section('admin') stage_folder = var_list.resolve(("$(STAGING_FOLDER)")) svn_folder = var_list.resolve(("$(SVN_CHECKOUT_FOLDER)")) svn_command_parts = ['"$(SVN_CLIENT_PATH)"', "checkout", '"$(SVN_REPO_URL)"', '"'+svn_folder+'"', "--depth", "infinity"] self.batch_accum += " ".join(svn_command_parts) self.batch_accum += self.platform_helper.progress("Checkout $(SVN_REPO_URL) to $(SVN_CHECKOUT_FOLDER)") self.batch_accum += self.platform_helper.copy_tool.copy_dir_contents_to_dir(svn_folder, stage_folder, link_dest=False, ignore=(".svn", ".DS_Store")) self.batch_accum += self.platform_helper.progress("rsync $(SVN_CHECKOUT_FOLDER) to $(STAGING_FOLDER)") self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def init_sync_vars(self): """ Prepares variables for sync. Will raise ValueError if a mandatory variable is not defined. """ var_description = "from InstlInstanceBase.init_sync_vars" self.instlObj.check_prerequisite_var_existence(("SYNC_BASE_URL", "DOWNLOAD_TOOL_PATH", "REPO_REV")) if "PUBLIC_KEY" not in var_list: if "PUBLIC_KEY_FILE" in var_list: public_key_file = var_list.resolve("$(PUBLIC_KEY_FILE)") with open_for_read_file_or_url(public_key_file, self.instlObj.path_searcher) as file_fd: public_key_text = file_fd.read() var_list.set_var("PUBLIC_KEY", "from "+public_key_file).append(public_key_text) self.local_sync_dir = var_list.resolve("$(LOCAL_REPO_SYNC_DIR)")
def do_command(self): the_command = var_stack.resolve("$(__MAIN_COMMAND__)", raise_on_fail=True) fixed_command = the_command.replace('-', '_') self.curr_progress = int(var_stack.resolve("$(__START_DYNAMIC_PROGRESS__)")) + 1 self.total_progress = int(var_stack.resolve("$(__TOTAL_DYNAMIC_PROGRESS__)")) self.progress_staccato_period = int(var_stack.resolve("$(PROGRESS_STACCATO_PERIOD)")) self.progress_staccato_count = 0 self.actual_progress = 1 self.progress_staccato_command = False do_command_func = getattr(self, "do_"+fixed_command) before_time = time.clock() do_command_func() after_time = time.clock() if the_command not in ("help", "version"): print(the_command, "time:", round(after_time - before_time, 2), "sec.")
def use_copy_tool(self, tool_name): if tool_name == "robocopy": self.copy_tool = CopyTool_win_robocopy(self) elif tool_name == "xcopy": self.copy_tool = CopyTool_win_xcopy(self) else: raise ValueError(tool_name, "is not a valid copy tool for", var_stack.resolve("$(TARGET_OS)"))
def do_sync(self): logging.info("Creating sync instructions") if var_list.resolve("$(REPO_TYPE)") == "URL": from instlInstanceSync_url import InstlInstanceSync_url syncer = InstlInstanceSync_url(self) elif var_list.resolve("$(REPO_TYPE)") == "SVN": from instlInstanceSync_svn import InstlInstanceSync_svn syncer = InstlInstanceSync_svn(self) elif var_list.resolve("$(REPO_TYPE)") == "P4": from instlInstanceSync_p4 import InstlInstanceSync_p4 syncer = InstlInstanceSync_p4(self) else: raise ValueError('REPO_TYPE is not defined in input file') syncer.init_sync_vars() syncer.create_sync_instructions(self.installState) self.batch_accum += self.platform_helper.progress("Done sync")
def write_batch_file(self): self.batch_accum.set_current_section('pre') self.batch_accum += self.platform_helper.get_install_instructions_prefix() self.batch_accum.set_current_section('post') var_stack.set_var("TOTAL_ITEMS_FOR_PROGRESS_REPORT").append( str(self.platform_helper.num_items_for_progress_report)) self.batch_accum += self.platform_helper.get_install_instructions_postfix() lines = self.batch_accum.finalize_list_of_lines() lines_after_var_replacement = '\n'.join( [value_ref_re.sub(self.platform_helper.var_replacement_pattern, line) for line in lines]) from utils import write_to_file_or_stdout out_file = var_stack.resolve("$(__MAIN_OUT_FILE__)", raise_on_fail=True) with write_to_file_or_stdout(out_file) as fd: fd.write(lines_after_var_replacement) fd.write('\n') if out_file != "stdout": self.out_file_realpath = os.path.realpath(out_file) # chmod to 0777 so that file created under sudo, can be re-written under regular user. # However regular user cannot chmod for file created under sudo, hence the try/except try: os.chmod(self.out_file_realpath, 0777) except: pass else: self.out_file_realpath = "stdout" msg = " ".join( (self.out_file_realpath, str(self.platform_helper.num_items_for_progress_report), "progress items")) print(msg) logging.info(msg)
def calc_user_cache_dir_var(self, make_dir=True): if "USER_CACHE_DIR" not in var_stack: os_family_name = var_stack.resolve("$(__CURRENT_OS__)") if os_family_name == "Mac": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) elif os_family_name == "Win": user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)") elif os_family_name == "Linux": user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)" user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param) var_description = "from InstlInstanceBase.get_user_cache_dir" var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir) if make_dir: user_cache_dir_resolved = var_stack.resolve("$(USER_CACHE_DIR)", raise_on_fail=True) safe_makedirs(user_cache_dir_resolved)
def read_user_config(self): user_config_path = var_stack.resolve("$(__USER_CONFIG_FILE_PATH__)") if os.path.isfile(user_config_path): previous_allow_reading_of_internal_vars = self.allow_reading_of_internal_vars self.allow_reading_of_internal_vars = True self.read_yaml_file(user_config_path) self.allow_reading_of_internal_vars = previous_allow_reading_of_internal_vars
def download_from_config_files(self, parallel_run_config_file_path, config_files): with open(parallel_run_config_file_path, "w") as wfd: for config_file in config_files: wfd.write(var_list.resolve("\"$(DOWNLOAD_TOOL_PATH)\" --config \""+config_file+"\"\n")) download_command = " ".join( (self.platform_helper.run_instl(), "parallel-run", "--in", quoteme_double(parallel_run_config_file_path)) ) return (download_command, self.platform_helper.exit_if_error())
def do_trans(self): self.read_info_map_file(var_list.resolve("$(__MAIN_INPUT_FILE__)")) if "__PROPS_FILE__" in var_list: self.read_info_map_file(var_list.resolve("$(__PROPS_FILE__)")) self.filter_out_info_map(var_list.resolve_to_list("$(__FILTER_OUT_PATHS__)")) base_rev = int(var_list.resolve("$(BASE_REPO_REV)")) if base_rev > 0: for item in self.svnTree.walk_items(): item.set_last_rev(max(item.last_rev(), base_rev)) if "__FILTER_IN_VERSION__" in var_list: self.filter_in_specific_version(var_list.resolve("$(__FILTER_IN_VERSION__)")) self.write_info_map_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def do_create_folders(self): self.progress_staccato_command = True self.read_info_map_file(var_stack.resolve("$(__MAIN_INPUT_FILE__)", raise_on_fail=True)) for dir_item in self.svnTree.walk_items(what="dir"): dir_path = dir_item.full_path() safe_makedirs(dir_path) self.dynamic_progress("Create folder {dir_path}".format(**locals()))