def resolve_defined_paths(self): self.path_searcher.add_search_paths(var_stack.resolve_to_list("$(SEARCH_PATHS)")) for path_var_to_resolve in var_stack.resolve_to_list("$(PATHS_TO_RESOLVE)"): if path_var_to_resolve in var_stack: resolved_path = self.path_searcher.find_file(var_stack.resolve_var(path_var_to_resolve), return_original_if_not_found=True) var_stack.set_var(path_var_to_resolve, "resolve_defined_paths").append(resolved_path)
def do_stage2svn(self): self.batch_accum.set_current_section('admin') if var_list.defined("__LIMIT_COMMAND_TO__"): print("limiting to ", "; ".join(var_list.resolve_to_list("$(__LIMIT_COMMAND_TO__)"))) else: print ("no limiting to specific folder") stage_folder = var_list.resolve(("$(STAGING_FOLDER)")) svn_folder = var_list.resolve(("$(SVN_CHECKOUT_FOLDER)")) self.batch_accum += self.platform_helper.unlock(stage_folder, recursive=True) self.batch_accum += self.platform_helper.progress("chflags -R nouchg "+stage_folder) self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(svn_folder) stage_folder_svn_folder_pairs = [] if var_list.defined("__LIMIT_COMMAND_TO__"): limit_list = var_list.resolve_to_list("$(__LIMIT_COMMAND_TO__)") for limit in limit_list: stage_folder_svn_folder_pairs.append( (os.path.join(stage_folder,limit) , os.path.join(svn_folder, limit) ) ) else: stage_folder_svn_folder_pairs.append( (stage_folder , svn_folder) ) for pair in stage_folder_svn_folder_pairs: comparer = filecmp.dircmp(pair[0], pair[1], ignore=[".svn", ".DS_Store", "Icon\015"]) self.stage2svn_for_folder(comparer) self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def check_version_compatibility(self): retVal = True if "INSTL_MINIMAL_VERSION" in var_stack: inst_ver = map(int, var_stack.resolve_to_list("$(__INSTL_VERSION__)")) required_ver = map(int, var_stack.resolve_to_list("$(INSTL_MINIMAL_VERSION)")) retVal = inst_ver >= required_ver return retVal
def calculate_default_install_item_set(self): """ calculate the set of iids to install from the "MAIN_INSTALL_TARGETS" variable. Full set of install iids and orphan iids are also writen to variable. """ if "MAIN_INSTALL_TARGETS" not in var_list: raise ValueError("'MAIN_INSTALL_TARGETS' was not defined") for os_name in var_list.resolve_to_list("$(TARGET_OS_NAMES)"): InstallItem.begin_get_for_specific_os(os_name) self.installState.root_install_items.extend(var_list.resolve_to_list("$(MAIN_INSTALL_TARGETS)")) self.installState.root_install_items = filter(bool, self.installState.root_install_items) self.installState.calculate_full_install_items_set(self) var_list.set_var("__FULL_LIST_OF_INSTALL_TARGETS__").extend(self.installState.full_install_items) var_list.set_var("__ORPHAN_INSTALL_TARGETS__").extend(self.installState.orphan_install_items)
def __init__(self, initial_vars): super(InstlGui, self).__init__(initial_vars) self.master = Tk() self.master.createcommand('exit', self.quit_app) # exit from quit menu or Command-Q self.master.protocol('WM_DELETE_WINDOW', self.quit_app) # exit from closing the window self.commands_that_accept_limit_option = var_stack.resolve_to_list("$(__COMMANDS_WITH_LIMIT_OPTION__)") self.client_command_name_var = StringVar() self.client_input_path_var = StringVar() self.client_input_combobox = None self.client_output_path_var = StringVar() self.run_client_batch_file_var = IntVar() self.admin_command_name_var = StringVar() self.admin_config_path_var = StringVar() self.admin_output_path_var = StringVar() self.admin_stage_index_var = StringVar() self.admin_sync_url_var = StringVar() self.admin_svn_repo_var = StringVar() self.admin_config_file_dirty = True self.run_admin_batch_file_var = IntVar() self.admin_limit_var = StringVar() self.limit_path_entry_widget = None self.client_credentials_var = StringVar() self.client_credentials_on_var = IntVar()
def do_create_repo_rev_file(self): if "REPO_REV_FILE_VARS" not in var_list: raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = var_list.resolve_to_list("$(REPO_REV_FILE_VARS)") var_list.set_var("REPO_REV").append("$(TARGET_REPO_REV)") # override the repo rev from the config file dangerous_intersection = set(repo_rev_vars).intersection(set(("AWS_ACCESS_KEY_ID","AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"))) if dangerous_intersection: print("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError("file REPO_REV_FILE_VARS "+str(dangerous_intersection)+" and so is forbidden to upload") info_map_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/info_map.txt") info_map_sigs = self.create_sig_for_file(info_map_file) var_list.set_var("INFO_MAP_SIG").append(info_map_sigs["SHA-512_rsa_sig"]) var_list.set_var("INFO_MAP_CHECKSUM").append(info_map_sigs["sha1_checksum"]) var_list.set_var("INDEX_URL").append("$(SYNC_BASE_URL)/$(REPO_REV)/instl/index.yaml") index_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(TARGET_REPO_REV)/instl/index.yaml") index_file_sigs = self.create_sig_for_file(index_file) var_list.set_var("INDEX_SIG").append(index_file_sigs["SHA-512_rsa_sig"]) var_list.set_var("INDEX_CHECKSUM").append(index_file_sigs["sha1_checksum"]) for var in repo_rev_vars: if var not in var_list: raise ValueError(var+" is missing cannot write repo rev file") repo_rev_yaml = YamlDumpDocWrap(var_list.repr_for_yaml(repo_rev_vars, include_comments=False), '!define', "", explicit_start=True, sort_mappings=True) safe_makedirs(var_list.resolve("$(ROOT_LINKS_FOLDER)/admin")) local_file = var_list.resolve("$(ROOT_LINKS_FOLDER)/admin/$(REPO_REV_FILE_NAME).$(TARGET_REPO_REV)") with open(local_file, "w") as wfd: writeAsYaml(repo_rev_yaml, out_stream=wfd, indentor=None, sort=True) print("created", local_file)
def is_acceptable_yaml_doc(self, doc_node): acceptables = var_stack.resolve_to_list("$(ACCEPTABLE_YAML_DOC_TAGS)") + ["define", "define_const", "index", 'require'] if "__INSTL_COMPILED__" in var_stack: if var_stack.resolve("$(__INSTL_COMPILED__)") == "True": acceptables.append("define_Compiled") else: acceptables.append("define_Uncompiled") acceptables = ["!" + acceptibul for acceptibul in acceptables] retVal = doc_node.tag in acceptables return retVal
def do_wtar(self): self.batch_accum.set_current_section('admin') regex_list = var_list.resolve_to_list("$(WTAR_REGEX)") compiled_regex_list = list() for regex in regex_list: compiled_regex_list.append(re.compile(regex)) self.batch_accum += self.platform_helper.split_func() max_file_size = int(var_list.resolve(("$(MAX_FILE_SIZE)"))) stage_folder = var_list.resolve(("$(STAGING_FOLDER)")) self.batch_accum += self.platform_helper.unlock(stage_folder, recursive=True) self.batch_accum += self.platform_helper.progress("chflags -R nouchg "+stage_folder) self.batch_accum += self.platform_helper.new_line() folders_to_check = [stage_folder] while len(folders_to_check) > 0: folder_to_check = folders_to_check.pop() dir_items = os.listdir(folder_to_check) items_to_tar = list() for dir_item in dir_items: dir_item_full_path = os.path.join(folder_to_check, dir_item) if not os.path.islink(dir_item_full_path): to_tar = self.should_wtar(dir_item_full_path, compiled_regex_list, max_file_size) if to_tar: items_to_tar.append(dir_item) else: if os.path.isdir(dir_item_full_path): folders_to_check.append(dir_item_full_path) if items_to_tar: self.batch_accum += self.platform_helper.cd(folder_to_check) for item_to_tar in items_to_tar: item_to_tar_full_path = os.path.join(folder_to_check, item_to_tar) if item_to_tar.endswith(".wtar"): for delete_file in dir_items: if fnmatch.fnmatch(delete_file, item_to_tar+'.??'): self.batch_accum += self.platform_helper.rmfile(delete_file) self.batch_accum += self.platform_helper.split(item_to_tar) else: for delete_file in dir_items: if fnmatch.fnmatch(delete_file, item_to_tar+'.wtar*'): self.batch_accum += self.platform_helper.rmfile(delete_file) self.prepare_permissions_for_wtar(item_to_tar_full_path) self.batch_accum += self.platform_helper.tar(item_to_tar) self.batch_accum += self.platform_helper.split(item_to_tar+".wtar") if os.path.isdir(item_to_tar_full_path): self.batch_accum += self.platform_helper.rmdir(item_to_tar, recursive=True) elif os.path.isfile(item_to_tar_full_path): self.batch_accum += self.platform_helper.rmfile(item_to_tar) self.batch_accum += self.platform_helper.progress(item_to_tar_full_path) self.batch_accum += self.platform_helper.new_line() self.create_variables_assignment() self.write_batch_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def should_file_be_exec(self, file_path): retVal = False try: regex_list = var_list.resolve_to_list("$(EXEC_PROP_REGEX)") for regex in regex_list: if re.search(regex, file_path): retVal = True raise Exception except: pass return retVal
def create_copy_instructions_for_source(self, source): """ source is a tuple (source_path, tag), where tag is either !file or !dir """ source_path = os.path.normpath("$(LOCAL_REPO_SYNC_DIR)/" + source[0]) ignore_list = var_stack.resolve_to_list("$(COPY_IGNORE_PATTERNS)") if source[1] == '!file': # get a single file, not recommended source_item = self.have_map.get_item_at_path(source[0]) if source_item: self.batch_accum += self.platform_helper.copy_tool.copy_file_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) else: source_folder, source_name = os.path.split(source[0]) source_folder_item = self.have_map.get_item_at_path(source_folder) if source_folder_item: for wtar_item in source_folder_item.walk_items_with_filter(svnTree.WtarFilter(source_name), what="file"): source_path = os.path.normpath("$(LOCAL_REPO_SYNC_DIR)/" + wtar_item.full_path()) self.batch_accum += self.platform_helper.copy_tool.copy_file_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) elif source[1] == '!dir_cont': # get all files and folders from a folder self.batch_accum += self.platform_helper.copy_tool.copy_dir_contents_to_dir(source_path, ".", link_dest=True, ignore=ignore_list, preserve_dest_files=True) # preserve files already in destination elif source[1] == '!files': # get all files from a folder self.batch_accum += self.platform_helper.copy_tool.copy_dir_files_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) else: # !dir source_item = self.have_map.get_item_at_path(source[0]) if source_item: self.batch_accum += self.platform_helper.copy_tool.copy_dir_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) else: source_folder, source_name = os.path.split(source[0]) source_folder_item = self.have_map.get_item_at_path(source_folder) if source_folder_item: for wtar_item in source_folder_item.walk_items_with_filter(svnTree.WtarFilter(source_name), what="file"): source_path = os.path.normpath("$(LOCAL_REPO_SYNC_DIR)/" + wtar_item.full_path()) self.batch_accum += self.platform_helper.copy_tool.copy_file_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) logging.debug("%s; (%s - %s)", source_path, var_stack.resolve(source_path), source[1])
def do_trans(self): self.read_info_map_file(var_list.resolve("$(__MAIN_INPUT_FILE__)")) if "__PROPS_FILE__" in var_list: self.read_info_map_file(var_list.resolve("$(__PROPS_FILE__)")) self.filter_out_info_map(var_list.resolve_to_list("$(__FILTER_OUT_PATHS__)")) base_rev = int(var_list.resolve("$(BASE_REPO_REV)")) if base_rev > 0: for item in self.svnTree.walk_items(): item.set_last_rev(max(item.last_rev(), base_rev)) if "__FILTER_IN_VERSION__" in var_list: self.filter_in_specific_version(var_list.resolve("$(__FILTER_IN_VERSION__)")) self.write_info_map_file() if "__RUN_BATCH_FILE__" in var_list: self.run_batch_file()
def create_copy_instructions_for_source(self, source): """ source is a tuple (source_folder, tag), where tag is either !file or !dir """ source_path = os.path.normpath("$(LOCAL_REPO_SOURCES_DIR)/"+source[0]) ignore_list = var_list.resolve_to_list("$(COPY_IGNORE_PATTERNS)") if source[1] == '!file': # get a single file, not recommended self.batch_accum += self.platform_helper.copy_tool.copy_file_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) elif source[1] == '!dir_cont': # get all files and folders from a folder self.batch_accum += self.platform_helper.copy_tool.copy_dir_contents_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) elif source[1] == '!files': # get all files from a folder self.batch_accum += self.platform_helper.copy_tool.copy_dir_files_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) else: # !dir self.batch_accum += self.platform_helper.copy_tool.copy_dir_to_dir(source_path, ".", link_dest=True, ignore=ignore_list) logging.debug("%s; (%s - %s)", source_path, var_list.resolve(source_path), source[1])
def do_remove_empty_folders(self): folder_to_remove = var_stack.resolve("$(__MAIN_INPUT_FILE__)") files_to_ignore = var_stack.resolve_to_list("$(REMOVE_EMPTY_FOLDERS_IGNORE_FILES)") for rootpath, dirnames, filenames in os.walk(folder_to_remove, topdown=False, onerror=None, followlinks=False): # when topdown=False os.walk creates dirnames for each rootpath at the beginning and has # no knowledge if a directory has already been deleted. existing_dirs = [dirname for dirname in dirnames if os.path.isdir(os.path.join(rootpath, dirname))] if len(existing_dirs) == 0: ignored_files = list() for filename in filenames: if filename in files_to_ignore: ignored_files.append(filename) else: break if len(filenames) == len(ignored_files): # only remove the ignored files if the folder is to be removed for filename in ignored_files: os.remove(os.path.join(rootpath, filename)) os.rmdir(rootpath)
def create_client_command_line(self): retVal = [var_stack.resolve_var("__INSTL_EXE_PATH__"), var_stack.resolve_var("CLIENT_GUI_CMD"), "--in", var_stack.resolve_var("CLIENT_GUI_IN_FILE"), "--out", var_stack.resolve_var("CLIENT_GUI_OUT_FILE")] if self.client_credentials_on_var.get(): credentials = self.client_credentials_var.get() if credentials != "": retVal.append("--credentials") retVal.append(credentials) if self.run_client_batch_file_var.get() == 1: retVal.append("--run") if 'Win' in var_stack.resolve_to_list("$(__CURRENT_OS_NAMES__)"): if not getattr(sys, 'frozen', False): retVal.insert(0, sys.executable) return retVal
def accumulate_unique_actions(self, action_type, iid_list): """ accumulate action_type actions from iid_list, eliminating duplicates""" unique_actions = unique_list() # unique_list will eliminate identical actions while keeping the order for IID in iid_list: with self.install_definitions_index[IID] as installi: item_actions = var_list.resolve_to_list("$(iid_action_list_"+action_type+")") num_unique_actions = 0 for an_action in item_actions: len_before = len(unique_actions) unique_actions.append(an_action) len_after = len(unique_actions) if len_before < len_after: # add progress only for the first same action num_unique_actions += 1 action_description = self.action_type_to_progress_message[action_type] if num_unique_actions > 1: action_description = " ".join( (action_description, str(num_unique_actions)) ) unique_actions.append(self.platform_helper.progress("{installi.name} {action_description}".format(**locals()))) self.batch_accum += unique_actions logging.info("... %s actions: %d", action_type, len(unique_actions))
def create_admin_command_line(self): command_name = var_stack.resolve_var("ADMIN_GUI_CMD") template_variable = admin_command_template_variables[command_name] retVal = var_stack.resolve_var_to_list(template_variable) # some special handling of command line parameters cannot yet be expressed in the command template if command_name != 'depend': if self.admin_command_name_var.get() in self.commands_that_accept_limit_option: limit_path = self.admin_limit_var.get() if limit_path != "": retVal.append("--limit") limit_paths = shlex.split(limit_path) # there might be space separated paths retVal.extend(limit_paths) if self.run_admin_batch_file_var.get() == 1 and command_name in self.commands_with_run_option_list: retVal.append("--run") if 'Win' in var_stack.resolve_to_list("$(__CURRENT_OS_NAMES__)"): if not getattr(sys, 'frozen', False): retVal.insert(0, sys.executable) return retVal
def is_acceptable_yaml_doc(self, doc_node): acceptables = var_list.resolve_to_list("$(ACCEPTABLE_YAML_DOC_TAGS)") + ["define", "define_const", "index"] acceptables = ["!"+acceptibul for acceptibul in acceptables] retVal = doc_node.tag in acceptables return retVal
def create_copy_instructions(self): # copy and actions instructions for sources self.batch_accum.set_current_section('copy') self.batch_accum += self.platform_helper.progress("Starting copy from $(LOCAL_REPO_SOURCES_DIR)") sorted_target_folder_list = sorted(self.installState.install_items_by_target_folder, key=lambda fold: var_list.resolve(fold)) # first create all target folders so to avoid dependency order problems such as creating links between folders for folder_name in sorted_target_folder_list: self.batch_accum += self.platform_helper.mkdir_with_owner(folder_name) self.batch_accum += self.platform_helper.progress("Make directories done") self.accumulate_unique_actions('copy_in', self.installState.full_install_items) if 'Mac' in var_list.resolve_to_list("$(__CURRENT_OS_NAMES__)") and 'Mac' in var_list.resolve_to_list("$(TARGET_OS)"): self.batch_accum += self.platform_helper.resolve_symlink_files(in_dir="$(LOCAL_REPO_SOURCES_DIR)") self.batch_accum += self.platform_helper.progress("Resolve .symlink files") have_map = svnTree.SVNTree() have_info_path = var_list.resolve("$(NEW_HAVE_INFO_MAP_PATH)") # in case we're in synccopy command if not os.path.isfile(have_info_path): have_info_path = var_list.resolve("$(HAVE_INFO_MAP_PATH)") # in case we're in copy command if os.path.isfile(have_info_path): have_map.read_info_map_from_file(have_info_path, format="text") num_files_to_set_exec = have_map.num_subs_in_tree(what="file", predicate=lambda in_item: in_item.isExecutable()) logging.info("Num files to set exec: %d", num_files_to_set_exec) if num_files_to_set_exec > 0: self.batch_accum += self.platform_helper.pushd("$(LOCAL_REPO_SYNC_DIR)") self.batch_accum += self.platform_helper.set_exec_for_folder(have_info_path) self.platform_helper.num_items_for_progress_report += num_files_to_set_exec self.batch_accum += self.platform_helper.progress("Set exec done") self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.popd() for folder_name in sorted_target_folder_list: items_in_folder = self.installState.install_items_by_target_folder[folder_name] logging.info("folder %s", var_list.resolve(folder_name)) self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(folder_name) # accumulate folder_in actions from all items, eliminating duplicates self.accumulate_unique_actions('folder_in', items_in_folder) batch_accum_len_before = len(self.batch_accum) self.batch_accum += self.platform_helper.copy_tool.begin_copy_folder() for IID in items_in_folder: with self.install_definitions_index[IID] as installi: for source_var in var_list.get_configVar_obj("iid_source_var_list"): source = var_list.resolve_var_to_list(source_var) self.batch_accum += var_list.resolve_to_list("$(iid_action_list_before)") self.create_copy_instructions_for_source(source) self.batch_accum += var_list.resolve_to_list("$(iid_action_list_after)") self.batch_accum += self.platform_helper.progress("Copy {installi.name}".format(**locals())) self.batch_accum += self.platform_helper.copy_tool.end_copy_folder() logging.info("... copy actions: %d", len(self.batch_accum) - batch_accum_len_before) # accumulate folder_out actions from all items, eliminating duplicates self.accumulate_unique_actions('folder_out', items_in_folder) self.batch_accum.indent_level -= 1 # actions instructions for sources that do not need copying, here folder_name is the sync folder for folder_name, items_in_folder in self.installState.no_copy_items_by_sync_folder.iteritems(): # calculate total number of actions for all items relating to folder_name, if 0 we can skip this folder altogether num_actions_for_folder = reduce(lambda x, y: x+len(self.install_definitions_index[y].all_action_list()), items_in_folder, 0) logging.info("%d non-copy items folder %s (%s)", num_actions_for_folder, folder_name, var_list.resolve(folder_name)) if 0 == num_actions_for_folder: continue self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(folder_name) self.batch_accum.indent_level += 1 # accumulate folder_in actions from all items, eliminating duplicates self.accumulate_unique_actions('folder_in', items_in_folder) for IID in items_in_folder: with self.install_definitions_index[IID]: self.batch_accum += var_list.resolve_to_list("$(iid_action_list_before)") self.batch_accum += var_list.resolve_to_list("$(iid_action_list_after)") # accumulate folder_out actions from all items, eliminating duplicates self.accumulate_unique_actions('folder_out', items_in_folder) self.batch_accum += self.platform_helper.progress("{folder_name}".format(**locals())) self.batch_accum.indent_level -= 1 self.accumulate_unique_actions('copy_out', self.installState.full_install_items) self.platform_helper.copy_tool.finalize() # messages about orphan iids for iid in self.installState.orphan_install_items: logging.info("Orphan item: %s", iid) self.batch_accum += self.platform_helper.echo("Don't know how to install "+iid) self.batch_accum += self.platform_helper.progress("Done copy")
def create_copy_instructions(self): self.have_map = svnTree.SVNTree() # read NEW_HAVE_INFO_MAP_PATH and not HAVE_INFO_MAP_PATH. Copy might be called after the sync batch file was created # but before it was executed. HAVE_INFO_MAP_PATH is only created # when the sync batch file is executed. have_info_path = var_stack.resolve("$(NEW_HAVE_INFO_MAP_PATH)") self.have_map.read_info_map_from_file(have_info_path, a_format="text") # copy and actions instructions for sources self.batch_accum.set_current_section('copy') self.batch_accum += self.platform_helper.progress("Starting copy from $(LOCAL_REPO_SYNC_DIR)") sorted_target_folder_list = sorted(self.installState.install_items_by_target_folder, key=lambda fold: var_stack.resolve(fold)) # first create all target folders so to avoid dependency order problems such as creating links between folders if len(sorted_target_folder_list) > 0: self.batch_accum += self.platform_helper.progress("Creating folders...") for folder_name in sorted_target_folder_list: self.batch_accum += self.platform_helper.mkdir_with_owner(folder_name) self.batch_accum += self.platform_helper.progress("Create folders done") self.accumulate_unique_actions('pre_copy', self.installState.full_install_items) if 'Mac' in var_stack.resolve_to_list("$(__CURRENT_OS_NAMES__)") and 'Mac' in var_stack.resolve_to_list("$(TARGET_OS)"): self.pre_copy_mac_handling() for folder_name in sorted_target_folder_list: items_in_folder = self.installState.install_items_by_target_folder[folder_name] logging.info("folder %s", var_stack.resolve(folder_name)) self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(folder_name) # accumulate pre_copy_to_folder actions from all items, eliminating duplicates self.accumulate_unique_actions('pre_copy_to_folder', items_in_folder) batch_accum_len_before = len(self.batch_accum) self.batch_accum += self.platform_helper.copy_tool.begin_copy_folder() for IID in items_in_folder: with self.install_definitions_index[IID] as installi: for source_var in var_stack.get_configVar_obj("iid_source_var_list"): source = var_stack.resolve_var_to_list(source_var) self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_pre_copy_item") self.create_copy_instructions_for_source(source) self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_post_copy_item") self.batch_accum += self.platform_helper.progress("Copy {installi.name}".format(**locals())) self.batch_accum += self.platform_helper.copy_tool.end_copy_folder() logging.info("... copy actions: %d", len(self.batch_accum) - batch_accum_len_before) self.batch_accum += self.platform_helper.progress("Expanding files...") self.batch_accum += self.platform_helper.unwtar_current_folder(no_artifacts=True) self.batch_accum += self.platform_helper.progress("Expand files done") if 'Mac' in var_stack.resolve_to_list("$(__CURRENT_OS_NAMES__)") and 'Mac' in var_stack.resolve_to_list("$(TARGET_OS)"): self.batch_accum += self.platform_helper.progress("Resolving symlinks...") self.batch_accum += self.platform_helper.resolve_symlink_files() self.batch_accum += self.platform_helper.progress("Resolve symlinks done") # accumulate post_copy_to_folder actions from all items, eliminating duplicates self.accumulate_unique_actions('post_copy_to_folder', items_in_folder) self.batch_accum.indent_level -= 1 # actions instructions for sources that do not need copying, here folder_name is the sync folder for folder_name, items_in_folder in self.installState.no_copy_items_by_sync_folder.iteritems(): self.batch_accum += self.platform_helper.new_line() self.batch_accum += self.platform_helper.cd(folder_name) self.batch_accum.indent_level += 1 # accumulate pre_copy_to_folder actions from all items, eliminating duplicates self.accumulate_unique_actions('pre_copy_to_folder', items_in_folder) for IID in items_in_folder: with self.install_definitions_index[IID]: for source_var in var_stack.resolve_var_to_list_if_exists("iid_source_var_list"): source = var_stack.resolve_var_to_list(source_var) source_folder, source_name = os.path.split(source[0]) to_untar = os.path.join(folder_name, source_name) self.batch_accum += self.platform_helper.unwtar_something(to_untar) self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_pre_copy_item") self.batch_accum += var_stack.resolve_var_to_list_if_exists("iid_action_list_post_copy_item") # accumulate post_copy_to_folder actions from all items, eliminating duplicates self.accumulate_unique_actions('post_copy_to_folder', items_in_folder) self.batch_accum += self.platform_helper.progress("{folder_name}".format(**locals())) self.batch_accum.indent_level -= 1 self.accumulate_unique_actions('post_copy', self.installState.full_install_items) self.batch_accum.set_current_section('post-copy') # Copy have_info file to "site" (e.g. /Library/Application support/... or c:\ProgramData\...) # for reference. But when preparing offline installers the site location is the same as the sync location # so copy should be avoided. if var_stack.resolve("$(HAVE_INFO_MAP_PATH)") != var_stack.resolve("$(SITE_HAVE_INFO_MAP_PATH)"): self.batch_accum += self.platform_helper.mkdir_with_owner("$(SITE_REPO_BOOKKEEPING_DIR)") self.batch_accum += self.platform_helper.copy_file_to_file("$(HAVE_INFO_MAP_PATH)", "$(SITE_HAVE_INFO_MAP_PATH)") self.platform_helper.copy_tool.finalize() self.create_require_file_instructions() # messages about orphan iids for iid in self.installState.orphan_install_items: logging.info("Orphan item: %s", iid) self.batch_accum += self.platform_helper.echo("Don't know how to install " + iid) self.batch_accum += self.platform_helper.progress("Done copy")
def do_upload_to_s3_aws_for_revision(self, accum): map_file_path = 'instl/info_map.txt' info_map_path = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/$(__CURR_REPO_REV__)/"+map_file_path) repo_rev = int(var_list.resolve("$(__CURR_REPO_REV__)")) self.svnTree.clear_subs() self.read_info_map_file(info_map_path) accum += self.platform_helper.cd("$(ROOT_LINKS_FOLDER_REPO)/$(__CURR_REPO_REV__)") if 'Mac' in var_list.resolve_to_list("$(__CURRENT_OS_NAMES__)"): accum += "find . -name .DS_Store -delete" # Files a folders that do not belong to __CURR_REPO_REV__ should not be uploaded. # Since aws sync command uploads the whole folder, we delete from disk all files # and folders that should not be uploaded. # To save delete instructions for every file we rely on the fact that each folder # has last_rev which is the maximum last_rev of it's sub-items. self.svnTree.remove_item_at_path('instl') # never remove the instl folder from collections import deque dir_queue = deque() dir_queue.append(self.svnTree) while len(dir_queue) > 0: curr_item = dir_queue.popleft() files, dirs = curr_item.unsorted_sub_items() for file_item in files: if file_item.last_rev() > repo_rev: raise ValueError(str(file_item)+" last_rev > repo_rev "+str(repo_rev)) elif file_item.last_rev() < repo_rev: accum += self.platform_helper.rmfile(file_item.full_path()) accum += self.platform_helper.progress("rmfile "+file_item.full_path()) for dir_item in dirs: if dir_item.last_rev() > repo_rev: raise ValueError(str(dir_item)+" last_rev > repo_rev "+str(repo_rev)) elif dir_item.last_rev() < repo_rev: # whole folder should be removed accum += self.platform_helper.rmdir(dir_item.full_path(), recursive=True) accum += self.platform_helper.progress("rmdir "+dir_item.full_path()) else: dir_queue.append(dir_item) # need to check inside the folder # remove broken links, aws cannot handle them accum += " ".join( ("find", ".", "-type", "l", "!", "-exec", "test", "-e", "{}", "\;", "-exec", "rm", "-f", "{}", "\;") ) accum += " ".join(["aws", "s3", "sync", ".", "s3://$(S3_BUCKET_NAME)/$(REPO_NAME)/$(__CURR_REPO_REV__)", "--acl", "public-read", "--exclude", '"*.DS_Store"', "--exclude", '"$(UP_2_S3_STAMP_FILE_NAME)"', "--exclude", '"$(CREATE_LINKS_STAMP_FILE_NAME)"' ]) up_repo_rev_file_command_parts = [self.platform_helper.run_instl(), "up-repo-rev", "--config-file", '"$(__CONFIG_FILE_PATH__)"', "--out", "up_repo_rev.$(__CURR_REPO_REV__)", "--just-with-number", "$(__CURR_REPO_REV__)", "--run"] accum += " ".join(up_repo_rev_file_command_parts) accum += self.platform_helper.progress("up-repo-rev file - just with number") accum += " ".join(["echo", "-n", "$(BASE_REPO_REV)", ">", "$(UP_2_S3_STAMP_FILE_NAME)"]) accum += self.platform_helper.progress("Uploaded $(ROOT_LINKS_FOLDER_REPO)/$(__CURR_REPO_REV__)") accum += self.platform_helper.echo("done up2s3 revision $(__CURR_REPO_REV__)")