def read_const_defines(self, a_node): """ Read a !define_const sub-doc. All variables will be made const. Reading of internal state identifiers is allowed. __include__ is not allowed. """ if a_node.isMapping(): for identifier, contents in a_node: if identifier == "__include__": raise ValueError("!define_const doc cannot except __include__") logging.debug("%s: %s", identifier, str(contents)) var_list.add_const_config_variable(identifier, "from !define_const section", *[item.value for item in contents])
def init_default_vars(self, initial_vars): if initial_vars: var_description = "from initial_vars" for var, value in initial_vars.iteritems(): if isinstance(value, basestring): var_list.add_const_config_variable(var, var_description, value) else: var_list.add_const_config_variable(var, var_description, *value) var_description = "from InstlInstanceBase.init_default_vars" # read defaults/main.yaml main_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "main.yaml") self.read_yaml_file(main_defaults_file_path) # read defaults/compile-info.yaml compile_info_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "compile-info.yaml") if os.path.isfile(compile_info_file_path): self.read_yaml_file(compile_info_file_path) if "__COMPILATION_TIME__" not in var_list: if var_list.resolve("$(__INSTL_COMPILED__)") == "True": var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "unknown compilation time") else: var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "(not compiled)") # read class specific defaults/*.yaml class_specific_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", type(self).__name__+".yaml") if os.path.isfile(class_specific_defaults_file_path): self.read_yaml_file(class_specific_defaults_file_path) log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=False) var_list.set_var("LOG_FILE", var_description).append(log_file) debug_log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=True) var_list.set_var("LOG_FILE_DEBUG", var_description).extend( (debug_log_file, logging.getLevelName(pyinstl.log_utils.debug_logging_level), pyinstl.log_utils.debug_logging_started) )
def create_download_instructions(self): self.instlObj.batch_accum.set_current_section("sync") file_list, dir_list = self.work_info_map.sorted_sub_items() if len(file_list) + len(dir_list) == 0: print("0 files to sync") print("0 bytes to sync") return self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Starting sync from $(SYNC_BASE_URL)") self.instlObj.batch_accum += self.instlObj.platform_helper.mkdir("$(LOCAL_REPO_SYNC_DIR)") self.instlObj.batch_accum += self.instlObj.platform_helper.pushd("$(LOCAL_REPO_SYNC_DIR)") self.sync_base_url = var_stack.resolve("$(SYNC_BASE_URL)") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() prefix_accum = BatchAccumulator() # sub-accumulator for prefix instructions prefix_accum.set_current_section("sync") for need_item in file_list + dir_list: self.create_prefix_instructions_for_item(prefix_accum, need_item) if len(prefix_accum) > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Pre download processing") self.instlObj.batch_accum.merge_with(prefix_accum) self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() num_dirs_to_create = self.work_info_map.num_subs_in_tree(what="dir") logging.info("Num directories to create: %d", num_dirs_to_create) if num_dirs_to_create > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.create_folders("$(TO_SYNC_INFO_MAP_PATH)") self.instlObj.platform_helper.num_items_for_progress_report += num_dirs_to_create self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Create folders") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() self.work_info_map.set_user_data_all_recursive(False) # items that need checksum will be marked True for need_item in file_list + dir_list: self.create_download_instructions_for_item(need_item) var_stack.add_const_config_variable( "__NUM_FILES_TO_DOWNLOAD__", "create_download_instructions", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(), ) print(self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(), "files to sync") bytes_to_sync = self.work_info_map.safe_size # backward compatibility for info_maps that do not have sizes if -1 == bytes_to_sync: print("Unknown number of bytes to sync") else: print(bytes_to_sync, "bytes to sync") if -1 != bytes_to_sync: free_bytes = get_disk_free_space(var_stack.resolve("$(LOCAL_REPO_SYNC_DIR)")) if bytes_to_sync > free_bytes: print("not enough disk space for sync:", bytes_to_sync - free_bytes, "bytes missing") logging.info("Num files to sync: %d", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download()) curl_config_folder = var_stack.resolve("$(LOCAL_REPO_BOOKKEEPING_DIR)/curl", raise_on_fail=True) safe_makedirs(curl_config_folder) curl_config_file_path = var_stack.resolve( os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME)"), raise_on_fail=True ) num_config_files = int(var_stack.resolve("$(PARALLEL_SYNC)")) config_file_list = self.instlObj.platform_helper.dl_tool.create_config_files( curl_config_file_path, num_config_files ) logging.info("Num parallel syncs: %d", len(config_file_list)) if len(config_file_list) > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() self.instlObj.batch_accum += self.instlObj.platform_helper.progress( "Downloading with " + str(len(config_file_list)) + " processes in parallel" ) parallel_run_config_file_path = var_stack.resolve( os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME).parallel-run") ) self.instlObj.batch_accum += self.instlObj.platform_helper.dl_tool.download_from_config_files( parallel_run_config_file_path, config_file_list ) self.instlObj.batch_accum += self.instlObj.platform_helper.progress( "Downloading " + str(self.files_to_download) + " files done", self.files_to_download ) self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() num_files_to_check = self.work_info_map.num_subs_in_tree(what="file") logging.info("Num files to checksum check: %d", num_files_to_check) if num_files_to_check > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Checking checksum...") self.instlObj.batch_accum += self.instlObj.platform_helper.check_checksum_for_folder( "$(TO_SYNC_INFO_MAP_PATH)" ) self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_check self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Check checksum done") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() self.instlObj.batch_accum += self.instlObj.platform_helper.popd()
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ const_attrib_to_var = { "input_file": ("__MAIN_INPUT_FILE__", None), "output_file": ("__MAIN_OUT_FILE__", None), "props_file": ("__PROPS_FILE__", None), "config_file": ("__CONFIG_FILE__", None), "sh1_checksum": ("__SHA1_CHECKSUM__", None), "rsa_signature": ("__RSA_SIGNATURE__", None), "start_progress": ("__START_DYNAMIC_PROGRESS__", "0"), "total_progress": ("__TOTAL_DYNAMIC_PROGRESS__", "0"), "just_with_number": ("__JUST_WITH_NUMBER__", "0"), "limit_command_to": ("__LIMIT_COMMAND_TO__", None), "shortcut_path": ("__SHORTCUT_PATH__", None), "target_path": ("__SHORTCUT_TARGET_PATH__", None), "credentials": ("__CREDENTIALS__", None), "base_url": ("__BASE_URL__", None), "file_sizes_file": ("__FILE_SIZES_FILE__", None) } for attrib, var in const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_stack.add_const_config_variable(var[0], "from command line options", *attrib_value) elif var[1] is not None: # there's a default var_stack.add_const_config_variable(var[0], "from default", var[1]) non_const_attrib_to_var = { "filter_in": "__FILTER_IN_VERSION__", "target_repo_rev": "TARGET_REPO_REV", "base_repo_rev": "BASE_REPO_REV", } for attrib, var in non_const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_stack.set_var(var, "from command line options").append(attrib_value[0]) if cmd_line_options_obj.command: var_stack.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options", cmd_line_options_obj.subject) else: var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "") if cmd_line_options_obj.state_file: var_stack.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options", cmd_line_options_obj.state_file) if cmd_line_options_obj.filter_out: var_stack.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options", *cmd_line_options_obj.filter_out) if cmd_line_options_obj.run: var_stack.add_const_config_variable("__RUN_BATCH__", "from command line options", "yes") if cmd_line_options_obj.no_wtar_artifacts: var_stack.add_const_config_variable("__NO_WTAR_ARTIFACTS__", "from command line options", "yes") # if credentials were given... credentials = None if "__CREDENTIALS__" in var_stack: credentials = var_stack.resolve_var("__CREDENTIALS__", default=None) connection_factory(credentials)
def create_download_instructions(self): self.instlObj.batch_accum.set_current_section('sync') self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Starting sync from $(SYNC_BASE_URL)/$(SOURCE_PREFIX)") self.instlObj.batch_accum += self.instlObj.platform_helper.mkdir("$(LOCAL_REPO_SYNC_DIR)") self.instlObj.batch_accum += self.instlObj.platform_helper.cd("$(LOCAL_REPO_SYNC_DIR)") self.sync_base_url = var_list.resolve("$(SYNC_BASE_URL)") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() file_list, dir_list = self.work_info_map.sorted_sub_items() prefix_accum = BatchAccumulator() # sub-accumulator for prefix instructions prefix_accum.set_current_section('sync') for need_item in file_list + dir_list: self.create_prefix_instructions_for_item(prefix_accum, need_item) if len(prefix_accum) > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Pre download processing") self.instlObj.batch_accum.merge_with(prefix_accum) self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() num_dirs_to_create = self.work_info_map.num_subs_in_tree(what="dir") logging.info("Num directories to create: %d", num_dirs_to_create) self.instlObj.batch_accum += self.instlObj.platform_helper.create_folders("$(TO_SYNC_INFO_MAP_PATH)") self.instlObj.platform_helper.num_items_for_progress_report += num_dirs_to_create self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Create folders") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() self.work_info_map.set_user_data_all_recursive(False) # items that need checksum will be marked True for need_item in file_list + dir_list: self.create_download_instructions_for_item(need_item) var_list.add_const_config_variable("__NUM_FILES_TO_DOWNLOAD__", "create_download_instructions", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download()) print(self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(), "files to sync") logging.info("Num files to sync: %d", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download()) curl_config_folder = var_list.resolve(os.path.join("$(LOCAL_REPO_SYNC_DIR)", "curl")) safe_makedirs(curl_config_folder) curl_config_file_path = var_list.resolve(os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME)")) num_config_files = int(var_list.resolve("$(PARALLEL_SYNC)")) config_file_list = self.instlObj.platform_helper.dl_tool.create_config_files(curl_config_file_path, num_config_files) logging.info("Num parallel syncs: %d", len(config_file_list)) if len(config_file_list) > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Downloading with "+str(len(config_file_list))+" processes in parallel") parallel_run_config_file_path = var_list.resolve(os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME).parallel-run")) self.instlObj.batch_accum += self.instlObj.platform_helper.dl_tool.download_from_config_files(parallel_run_config_file_path, config_file_list) self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Downloading "+str(self.files_to_download)+" files done", self.files_to_download) self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() num_files_to_check = self.work_info_map.num_subs_in_tree(what="file") logging.info("Num files to checksum check: %d", num_files_to_check) if num_files_to_check > 0: self.instlObj.batch_accum += self.instlObj.platform_helper.check_checksum_for_folder("$(TO_SYNC_INFO_MAP_PATH)") self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_check self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Check checksum done") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line() num_files_to_unwtar_estimation = self.estimate_num_unwtar_actions() logging.info("Num files to unwtar: %d", num_files_to_unwtar_estimation) self.instlObj.batch_accum += self.instlObj.platform_helper.unwtar_current_folder() self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_unwtar_estimation self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Unwtar done") self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ const_attrib_to_var = { "input_file": ("__MAIN_INPUT_FILE__", None), "output_file": ("__MAIN_OUT_FILE__", None), "props_file": ("__PROPS_FILE__", None), "config_file": ("__CONFIG_FILE__", None), "sh1_checksum": ("__SHA1_CHECKSUM__", None), "rsa_signature": ("__RSA_SIGNATURE__", None), "start_progress": ("__START_DYNAMIC_PROGRESS__", "0"), "total_progress": ("__TOTAL_DYNAMIC_PROGRESS__", "0"), "just_with_number": ("__JUST_WITH_NUMBER__", "0"), "limit_command_to": ("__LIMIT_COMMAND_TO__", None), } for attrib, var in const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_list.add_const_config_variable(var[0], "from command line options", *attrib_value) elif var[1]: # there's a default var_list.add_const_config_variable(var[0], "from default", var[1]) non_const_attrib_to_var = { "filter_in": "__FILTER_IN_VERSION__", "target_repo_rev": "TARGET_REPO_REV", "base_repo_rev": "BASE_REPO_REV", } for attrib, var in non_const_attrib_to_var.iteritems(): attrib_value = getattr(cmd_line_options_obj, attrib) if attrib_value: var_list.set_var(var, "from command line options").append(attrib_value[0]) if cmd_line_options_obj.command: var_list.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", cmd_line_options_obj.subject) else: var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "") if cmd_line_options_obj.state_file: var_list.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options", cmd_line_options_obj.state_file) if cmd_line_options_obj.filter_out: var_list.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options", *cmd_line_options_obj.filter_out) if cmd_line_options_obj.run: var_list.add_const_config_variable("__RUN_BATCH_FILE__", "from command line options", "yes")