def read_include_node(self, i_node, *args, **kwargs): if i_node.isScalar(): resolved_file_name = var_stack.ResolveStrToStr(i_node.value) self.read_yaml_file(resolved_file_name, *args, **kwargs) elif i_node.isSequence(): for sub_i_node in i_node: self.read_include_node(sub_i_node, *args, **kwargs) elif i_node.isMapping(): if "url" in i_node: cached_files_dir = self.get_default_sync_dir(continue_dir="cache", make_dir=True) resolved_file_url = var_stack.ResolveStrToStr(i_node["url"].value) cached_file_path = None expected_checksum = None if "checksum" in i_node: expected_checksum = var_stack.ResolveStrToStr(i_node["checksum"].value) cached_file_path = os.path.join(cached_files_dir, expected_checksum) expected_signature = None public_key_text = None if "sig" in i_node: expected_signature = var_stack.ResolveStrToStr(i_node["sig"].value) public_key_text = self.provision_public_key_text() if expected_checksum is None: self.read_yaml_file(resolved_file_url, *args, **kwargs) cached_file_path = resolved_file_url else: try: utils.download_from_file_or_url(resolved_file_url,cached_file_path, connectionBase.translate_url, cache=True, public_key=public_key_text, textual_sig=expected_signature, expected_checksum=expected_checksum) self.read_yaml_file(cached_file_path, *args, **kwargs) except (FileNotFoundError, urllib.error.URLError): ignore = kwargs.get('ignore_if_not_exist', False) if ignore: print("'ignore_if_not_exist' specified, ignoring FileNotFoundError for", resolved_file_url) else: raise if "copy" in i_node: self.batch_accum.set_current_section('post') for copy_destination in i_node["copy"]: need_to_copy = True destination_file_resolved_path = var_stack.ResolveStrToStr(copy_destination.value) if os.path.isfile(destination_file_resolved_path) and expected_checksum is not None: checksums_match = utils.check_file_checksum(file_path=destination_file_resolved_path, expected_checksum=expected_checksum) need_to_copy = not checksums_match if need_to_copy: destination_folder, destination_file_name = os.path.split(copy_destination.value) self.batch_accum += self.platform_helper.mkdir(destination_folder) self.batch_accum += self.platform_helper.copy_tool.copy_file_to_file(cached_file_path, var_stack.ResolveStrToStr(copy_destination.value), link_dest=True)
def read_include_node(self, i_node, *args, **kwargs): if i_node.isScalar(): kwargs['original-path-to-file'] = i_node.value resolved_file_name = config_vars.resolve_str(i_node.value) self.read_yaml_file(resolved_file_name, *args, **kwargs) elif i_node.isSequence(): for sub_i_node in i_node: self.read_include_node(sub_i_node, *args, **kwargs) elif i_node.isMapping(): if "url" in i_node: file_was_downloaded_and_read = False kwargs['original-path-to-file'] = i_node["url"].value resolved_file_url = config_vars.resolve_str( i_node["url"].value) expected_checksum = None if "checksum" in i_node: expected_checksum = config_vars.resolve_str( i_node["checksum"].value) try: file_path = utils.download_from_file_or_url( in_url=resolved_file_url, config_vars=config_vars, in_target_path=None, translate_url_callback=connectionBase.translate_url, cache_folder=self.get_aux_cache_dir(make_dir=True), expected_checksum=expected_checksum) self.read_yaml_file(file_path, *args, **kwargs) file_was_downloaded_and_read = True except (FileNotFoundError, urllib.error.URLError): ignore = kwargs.get('ignore_if_not_exist', False) if ignore: self.progress( f"'ignore_if_not_exist' specified, ignoring FileNotFoundError for {resolved_file_url}" ) else: raise if "copy" in i_node and file_was_downloaded_and_read: self.batch_accum.set_current_section('post') for copy_destination in i_node["copy"]: need_to_copy = True destination_file_resolved_path = utils.ExpandAndResolvePath( config_vars.resolve_str(copy_destination.value)) if destination_file_resolved_path.is_file( ) and expected_checksum is not None: checksums_match = utils.check_file_checksum( file_path=destination_file_resolved_path, expected_checksum=expected_checksum) need_to_copy = not checksums_match if need_to_copy: self.batch_accum += MakeDir( destination_file_resolved_path.parent, chowner=True) self.batch_accum += CopyFileToFile( file_path, destination_file_resolved_path, hard_links=False, copy_owner=True)
def read_remote_info_map(self): """ Reads the info map of the static files available for syncing. Writes the map to local sync folder for reference and debugging. """ info_map_file_url = None try: os.makedirs(var_stack.ResolveVarToStr("LOCAL_REPO_BOOKKEEPING_DIR"), exist_ok=True) os.makedirs(var_stack.ResolveVarToStr("LOCAL_REPO_REV_BOOKKEEPING_DIR"), exist_ok=True) info_map_file_url = var_stack.ResolveVarToStr("INFO_MAP_FILE_URL") local_copy_of_info_map = var_stack.ResolveVarToStr("LOCAL_COPY_OF_REMOTE_INFO_MAP_PATH") utils.download_from_file_or_url(info_map_file_url, local_copy_of_info_map, connectionBase.translate_url, cache=True, expected_checksum=var_stack.ResolveVarToStr("INFO_MAP_FILE_URL_CHECKSUM")) self.instlObj.read_info_map_from_file(local_copy_of_info_map) self.instlObj.info_map_table.write_to_file(var_stack.ResolveVarToStr("NEW_HAVE_INFO_MAP_PATH"), field_to_write=('path', 'flags', 'revision', 'checksum', 'size')) #utils.smart_copy_file(local_copy_of_info_map, # var_stack.ResolveVarToStr("NEW_HAVE_INFO_MAP_PATH")) except Exception: print("Exception reading info_map:", info_map_file_url) raise
def read_remote_info_map(self): """ Reads the info map of the static files available for syncing. Writes the map to local sync folder for reference and debugging. """ info_map_file_url = None try: with self.instlObj.info_map_table.reading_files_context(): os.makedirs(os.fspath( config_vars["LOCAL_REPO_BOOKKEEPING_DIR"]), exist_ok=True) os.makedirs(os.fspath( config_vars["LOCAL_REPO_REV_BOOKKEEPING_DIR"]), exist_ok=True) if "INSTL_FOLDER_BASE_URL" not in config_vars: if "REPO_REV_FOLDER_HIERARCHY" not in config_vars: config_vars[ "REPO_REV_FOLDER_HIERARCHY"] = self.instlObj.repo_rev_to_folder_hierarchy( config_vars["REPO_REV"].str()) config_vars[ "INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(REPO_REV_FOLDER_HIERARCHY)/instl" if "INFO_MAP_FILE_URL" not in config_vars: config_vars["INFO_MAP_FILE_URL"] = config_vars.resolve_str( "$(INSTL_FOLDER_BASE_URL)/info_map.txt") info_map_file_url = config_vars["INFO_MAP_FILE_URL"].str() info_map_file_expected_checksum = None if "INFO_MAP_CHECKSUM" in config_vars: info_map_file_expected_checksum = config_vars[ "INFO_MAP_CHECKSUM"].str() local_copy_of_info_map_in = os.fspath( config_vars["LOCAL_COPY_OF_REMOTE_INFO_MAP_PATH"]) local_copy_of_info_map_out = utils.download_from_file_or_url( in_url=info_map_file_url, config_vars=config_vars, in_target_path=local_copy_of_info_map_in, translate_url_callback=connectionBase.translate_url, cache_folder=self.instlObj.get_default_sync_dir( continue_dir="cache", make_dir=True), expected_checksum=info_map_file_expected_checksum) self.instlObj.info_map_table.read_from_file( local_copy_of_info_map_out) self.instlObj.progress(f"read info_map {info_map_file_url}") additional_info_maps = self.instlObj.items_table.get_details_for_active_iids( "info_map", unique_values=True) for additional_info_map in additional_info_maps: # try to get the zipped info_map additional_info_map_file_name = config_vars.resolve_str( f"{additional_info_map}$(WZLIB_EXTENSION)") path_in_main_info_map = config_vars.resolve_str( f"instl/{additional_info_map_file_name}") additional_info_map_item = self.instlObj.info_map_table.get_file_item( path_in_main_info_map) if not additional_info_map_item: # zipped not found try the unzipped inf_map additional_info_map_file_name = additional_info_map path_in_main_info_map = config_vars.resolve_str( f"instl/{additional_info_map}") additional_info_map_item = self.instlObj.info_map_table.get_file_item( path_in_main_info_map) checksum = additional_info_map_item.checksum if additional_info_map_item else None info_map_file_url = config_vars.resolve_str( f"$(INSTL_FOLDER_BASE_URL)/{additional_info_map_file_name}" ) local_copy_of_info_map_in = config_vars.resolve_str( f"$(LOCAL_REPO_REV_BOOKKEEPING_DIR)/{additional_info_map}" ) local_copy_of_info_map_out = utils.download_from_file_or_url( in_url=info_map_file_url, config_vars=config_vars, in_target_path=local_copy_of_info_map_in, translate_url_callback=connectionBase.translate_url, cache_folder=self.instlObj.get_default_sync_dir( "cache", make_dir=True), expected_checksum=checksum) self.instlObj.info_map_table.read_from_file( local_copy_of_info_map_out) self.instlObj.progress( f"read info_map {info_map_file_url}") new_have_info_map_path = os.fspath( config_vars["NEW_HAVE_INFO_MAP_PATH"]) self.instlObj.info_map_table.write_to_file( new_have_info_map_path, field_to_write=('path', 'flags', 'revision', 'checksum', 'size')) except Exception: log.error(f"""Exception reading info_map: {info_map_file_url}""") raise
def read_remote_info_map(self): """ Reads the info map of the static files available for syncing. Writes the map to local sync folder for reference and debugging. """ info_map_file_url = None try: with self.instlObj.info_map_table.reading_files_context(): os.makedirs(var_stack.ResolveVarToStr("LOCAL_REPO_BOOKKEEPING_DIR"), exist_ok=True) os.makedirs(var_stack.ResolveVarToStr("LOCAL_REPO_REV_BOOKKEEPING_DIR"), exist_ok=True) if "INSTL_FOLDER_BASE_URL" not in var_stack: if "REPO_REV_FOLDER_HIERARCHY" not in var_stack: var_stack.set_var("REPO_REV_FOLDER_HIERARCHY").append(self.instlObj.repo_rev_to_folder_hierarchy(var_stack.ResolveVarToStr("REPO_REV"))) var_stack.set_var("INSTL_FOLDER_BASE_URL").append("$(BASE_LINKS_URL)/$(REPO_NAME)/$(REPO_REV_FOLDER_HIERARCHY)/instl") if "INFO_MAP_FILE_URL" not in var_stack: var_stack.set_var("INFO_MAP_FILE_URL").append(var_stack.ResolveStrToStr("$(INSTL_FOLDER_BASE_URL)/info_map.txt")) info_map_file_url = var_stack.ResolveVarToStr("INFO_MAP_FILE_URL") info_map_file_expected_checksum = None if "INFO_MAP_CHECKSUM" in var_stack: info_map_file_expected_checksum = var_stack.ResolveVarToStr("INFO_MAP_CHECKSUM") local_copy_of_info_map_in = var_stack.ResolveVarToStr("LOCAL_COPY_OF_REMOTE_INFO_MAP_PATH") local_copy_of_info_map_out = utils.download_from_file_or_url(in_url=info_map_file_url, in_target_path=local_copy_of_info_map_in, translate_url_callback=connectionBase.translate_url, cache_folder=self.instlObj.get_default_sync_dir(continue_dir="cache", make_dir=True), expected_checksum=info_map_file_expected_checksum) #assert local_copy_of_info_map_in == local_copy_of_info_map_out, local_copy_of_info_map_in +" != "+ local_copy_of_info_map_out self.instlObj.read_info_map_from_file(local_copy_of_info_map_out) self.instlObj.progress("read info_map {}".format(info_map_file_url)) additional_info_maps = self.instlObj.items_table.get_details_for_active_iids("info_map", unique_values=True) for additional_info_map in additional_info_maps: # try to get the zipped info_map additional_info_map_file_name = var_stack.ResolveStrToStr("{}$(WZLIB_EXTENSION)".format(additional_info_map)) path_in_main_info_map = var_stack.ResolveStrToStr("instl/{}".format(additional_info_map_file_name)) additional_info_map_item = self.instlObj.info_map_table.get_file_item(path_in_main_info_map) if not additional_info_map_item: # zipped not found try the unzipped inf_map additional_info_map_file_name = additional_info_map path_in_main_info_map = var_stack.ResolveStrToStr("instl/{}".format(additional_info_map)) additional_info_map_item = self.instlObj.info_map_table.get_file_item(path_in_main_info_map) checksum = additional_info_map_item.checksum if additional_info_map_item else None info_map_file_url = var_stack.ResolveStrToStr("$(INSTL_FOLDER_BASE_URL)/{}".format(additional_info_map_file_name)) local_copy_of_info_map_in = var_stack.ResolveStrToStr("$(LOCAL_REPO_REV_BOOKKEEPING_DIR)/{}".format(additional_info_map)) local_copy_of_info_map_out = utils.download_from_file_or_url(in_url=info_map_file_url, in_target_path=local_copy_of_info_map_in, translate_url_callback=connectionBase.translate_url, cache_folder=self.instlObj.get_default_sync_dir("cache", make_dir=True), expected_checksum=checksum) #assert local_copy_of_info_map_in == local_copy_of_info_map_out, local_copy_of_info_map_in +" != "+ local_copy_of_info_map_out self.instlObj.read_info_map_from_file(local_copy_of_info_map_out) self.instlObj.progress("read info_map {}".format(info_map_file_url)) new_have_info_map_path = var_stack.ResolveVarToStr("NEW_HAVE_INFO_MAP_PATH") self.instlObj.info_map_table.write_to_file(new_have_info_map_path, field_to_write=('path', 'flags', 'revision', 'checksum', 'size')) except Exception: print("Exception reading info_map:", info_map_file_url) raise