Ejemplo n.º 1
0
    def read_include_node(self, i_node, *args, **kwargs):
        if i_node.isScalar():
            kwargs['original-path-to-file'] = i_node.value
            resolved_file_name = config_vars.resolve_str(i_node.value)
            self.read_yaml_file(resolved_file_name, *args, **kwargs)
        elif i_node.isSequence():
            for sub_i_node in i_node:
                self.read_include_node(sub_i_node, *args, **kwargs)
        elif i_node.isMapping():
            if "url" in i_node:
                file_was_downloaded_and_read = False
                kwargs['original-path-to-file'] = i_node["url"].value
                resolved_file_url = config_vars.resolve_str(
                    i_node["url"].value)
                expected_checksum = None
                if "checksum" in i_node:
                    expected_checksum = config_vars.resolve_str(
                        i_node["checksum"].value)

                try:
                    file_path = utils.download_from_file_or_url(
                        in_url=resolved_file_url,
                        config_vars=config_vars,
                        in_target_path=None,
                        translate_url_callback=connectionBase.translate_url,
                        cache_folder=self.get_aux_cache_dir(make_dir=True),
                        expected_checksum=expected_checksum)
                    self.read_yaml_file(file_path, *args, **kwargs)
                    file_was_downloaded_and_read = True
                except (FileNotFoundError, urllib.error.URLError):
                    ignore = kwargs.get('ignore_if_not_exist', False)
                    if ignore:
                        self.progress(
                            f"'ignore_if_not_exist' specified, ignoring FileNotFoundError for {resolved_file_url}"
                        )
                    else:
                        raise

                if "copy" in i_node and file_was_downloaded_and_read:
                    self.batch_accum.set_current_section('post')
                    for copy_destination in i_node["copy"]:
                        need_to_copy = True
                        destination_file_resolved_path = utils.ExpandAndResolvePath(
                            config_vars.resolve_str(copy_destination.value))
                        if destination_file_resolved_path.is_file(
                        ) and expected_checksum is not None:
                            checksums_match = utils.check_file_checksum(
                                file_path=destination_file_resolved_path,
                                expected_checksum=expected_checksum)
                            need_to_copy = not checksums_match
                        if need_to_copy:
                            self.batch_accum += MakeDir(
                                destination_file_resolved_path.parent,
                                chowner=True)
                            self.batch_accum += CopyFileToFile(
                                file_path,
                                destination_file_resolved_path,
                                hard_links=False,
                                copy_owner=True)
Ejemplo n.º 2
0
 def create_create_folders_instructions(self, folder_list: List[str]) -> None:
     with self.batch_accum.sub_accum(Stage("create folders")) as create_folders_section:
         kwargs_defaults = {'remove_obstacles': True, 'chowner': False, 'recursive_chmod': False}
         third_party_folders = [utils.ExpandAndResolvePath(config_vars.resolve_str(os.fspath(p))) for p in config_vars.get("THIRD_PARTY_FOLDERS", []).list()]
         for target_folder_path in folder_list:
             target_folder_path = utils.ExpandAndResolvePath(config_vars.resolve_str(os.fspath(target_folder_path)))
             our_folder = next((False for p in third_party_folders if p == target_folder_path), True)
             create_folders_section += MakeDir(target_folder_path, chowner=our_folder, recursive_chmod=False)
Ejemplo n.º 3
0
    def create_remove_instructions(self):

        have_info_path = config_vars["HAVE_INFO_MAP_PATH"].Path()
        if not have_info_path or not have_info_path.is_file():
            have_info_path = config_vars["SITE_HAVE_INFO_MAP_PATH"].Path()
        self.info_map_table.read_from_file(have_info_path,
                                           disable_indexes_during_read=True)
        self.calc_iid_to_name_and_version()

        self.batch_accum.set_current_section('remove')
        self.batch_accum += Progress("Start remove")
        sorted_target_folder_list = sorted(
            self.all_iids_by_target_folder,
            key=lambda fold: config_vars.resolve_str(fold),
            reverse=True)

        self.batch_accum += self.accumulate_unique_actions_for_active_iids(
            'pre_remove',
            list(config_vars["__FULL_LIST_OF_INSTALL_TARGETS__"]))

        for folder_name in sorted_target_folder_list:
            with self.batch_accum.sub_accum(
                    Stage("Remove from folder",
                          folder_name)) as folder_accum_transaction:
                folder_accum_transaction += self.create_remove_previous_sources_instructions_for_target_folder(
                    folder_name)
                config_vars["__TARGET_DIR__"] = Path(folder_name)
                items_in_folder = self.all_iids_by_target_folder[folder_name]

                folder_accum_transaction += self.accumulate_unique_actions_for_active_iids(
                    'pre_remove_from_folder', items_in_folder)

                for IID in items_in_folder:
                    name_for_iid = self.name_for_iid(iid=IID)
                    with folder_accum_transaction.sub_accum(
                            Stage("Remove",
                                  name_for_iid)) as iid_accum_transaction:
                        sources_for_iid = self.items_table.get_sources_for_iid(
                            IID)
                        resolved_sources_for_iid = [
                            (config_vars.resolve_str(s[0]), s[1])
                            for s in sources_for_iid
                        ]
                        for source in resolved_sources_for_iid:
                            _, source_leaf = os.path.split(source[0])
                            iid_accum_transaction += self.accumulate_actions_for_iid(
                                iid=IID, detail_name="pre_remove_item")
                            iid_accum_transaction += self.create_remove_instructions_for_source(
                                IID, folder_name, source)
                            iid_accum_transaction += self.accumulate_actions_for_iid(
                                iid=IID, detail_name="post_remove_item")

                folder_accum_transaction += self.accumulate_unique_actions_for_active_iids(
                    'post_remove_from_folder', items_in_folder)

        self.batch_accum += self.accumulate_unique_actions_for_active_iids(
            'post_remove',
            list(config_vars["__FULL_LIST_OF_INSTALL_TARGETS__"]))
Ejemplo n.º 4
0
 def test_array(self):
     config_vars["PUSHKIN"] = "1", "2", "3"
     self.assertEqual("123", config_vars["PUSHKIN"].str())
     self.assertEqual("123", config_vars.resolve_str("$(PUSHKIN)"))
     self.assertEqual("1", config_vars.resolve_str("$(PUSHKIN[0])"))
     self.assertEqual("2", config_vars.resolve_str("$(PUSHKIN[1])"))
     self.assertEqual("3", config_vars.resolve_str("$(PUSHKIN[2])"))
     self.assertEqual(
         "321",
         config_vars.resolve_str("$(PUSHKIN[2])$(PUSHKIN[1])$(PUSHKIN[0])"))
Ejemplo n.º 5
0
    def __call__(self, *args, **kwargs) -> None:
        # fill the iid_to_svn_item_t table
        self.info_map_table.populate_IIDToSVNItem()

        # get the list of info map file names
        info_map_to_item = dict()
        all_info_map_names = self.items_table.get_unique_detail_values('info_map')
        for infomap_file_name in all_info_map_names:
            info_map_file_path = self.work_folder.joinpath(infomap_file_name)
            if info_map_file_path.is_file():
                log.info(f"{infomap_file_name} was found so no need to create it")
                # file already exists, probably copied from the "Common" repository
                # just checking that the fie is also zipped
                zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)")
                zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name)
                if not zip_info_map_file_path.is_file():
                    raise FileNotFoundError(f"found {info_map_file_path} but not {zip_info_map_file_path}")
            else:
                self.info_map_table.mark_items_required_by_infomap(infomap_file_name)
                info_map_items = self.info_map_table.get_required_items()
                info_map_to_item[infomap_file_name] = info_map_items

        files_to_add_to_default_info_map = list()  # the named info_map files and their wzip version should be added to the default info_map
        # write each info map to file
        for infomap_file_name, info_map_items in info_map_to_item.items():
            if info_map_items:  # could be that no items are linked to the info map file
                info_map_file_path = self.work_folder.joinpath(infomap_file_name)
                self.info_map_table.write_to_file(in_file=info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map)
                files_to_add_to_default_info_map.append(info_map_file_path)

                zip_infomap_file_name = config_vars.resolve_str(infomap_file_name+"$(WZLIB_EXTENSION)")
                zip_info_map_file_path = self.work_folder.joinpath(zip_infomap_file_name)
                with Wzip(info_map_file_path, self.work_folder, own_progress_count=0) as wzipper:
                    wzipper()
                files_to_add_to_default_info_map.append(zip_info_map_file_path)

        # add the default info map
        default_info_map_file_name = str(config_vars["MAIN_INFO_MAP_FILE_NAME"])
        default_info_map_file_path = self.work_folder.joinpath(default_info_map_file_name)
        info_map_items = self.info_map_table.get_items_for_default_infomap()
        self.info_map_table.write_to_file(in_file=default_info_map_file_path, items_list=info_map_items, field_to_write=self.fields_relevant_to_info_map)
        with Wzip(default_info_map_file_path, self.work_folder, own_progress_count=0) as wzipper:
            wzipper()

        # add a line to default info map for each non default info_map created above
        with utils.utf8_open_for_read(default_info_map_file_path, "a") as wfd:
            for file_to_add in files_to_add_to_default_info_map:
                file_checksum = utils.get_file_checksum(file_to_add)
                file_size = file_to_add.stat().st_size
                # todo: make path relative
                line_for_main_info_map = f"instl/{file_to_add.name}, f, {config_vars['TARGET_REPO_REV'].str()}, {file_checksum}, {file_size}\n"
                wfd.write(line_for_main_info_map)
Ejemplo n.º 6
0
 def activate_repo_rev(self):
     try:
         if self.redis_conn:
             current_items = self.tree.get_children()
             domain_repo = self.tk_vars["DOMAIN_REPO_TO_ACTIVATE"].get()
             if domain_repo in current_items:
                 host = self.redis_conn.host
                 repo_rev = self.tk_vars["REPO_REV_TO_ACTIVATE"].get()
                 redis_value = config_vars.resolve_str(":".join(('activate', domain_repo, str(repo_rev))))
                 redis_key   = config_vars.resolve_str(":".join(("$(REDIS_KEYS_PREFIX)", host, "waiting_list")))
                 answer = messagebox.askyesno("Activate repo-rev", f"Activate repo-rev {repo_rev} on {domain_repo} ?")
                 if answer:
                     self.redis_conn.lpush(redis_key, redis_value)
     except Exception as ex:
         print(f"activate_repo_rev exception {ex}")
Ejemplo n.º 7
0
 def test_Plist_for_native_instruments(self):
     config_vars[
         "Plist_for_native_instruments"] = r'''ShellCommand('"$(LOCAL_REPO_SYNC_DIR)/Mac/Utilities/plist/plist_creator.sh" $(__Plist_for_native_instruments_1__) $(__Plist_for_native_instruments_2__)', ignore_all_errors=True)'''
     o = config_vars.resolve_str(
         '$(Plist_for_native_instruments<"Aphex Vintage Exciter", "/Applications/Waves/Data/NKS FX/">)'
     )
     print(o)
Ejemplo n.º 8
0
    def init_default_client_vars(self):

        if "SYNC_BASE_URL" in config_vars:
            resolved_sync_base_url = config_vars["SYNC_BASE_URL"].str()
            url_main_item = utils.main_url_item(resolved_sync_base_url)
            config_vars["SYNC_BASE_URL_MAIN_ITEM"] = url_main_item
        # TARGET_OS_NAMES defaults to __CURRENT_OS_NAMES__, which is not what we want if syncing to
        # an OS which is not the current
        if config_vars["TARGET_OS"].str() != config_vars["__CURRENT_OS__"].str(
        ):
            target_os_names = list(config_vars[config_vars.resolve_str(
                "$(TARGET_OS)_ALL_OS_NAMES")])
            config_vars["TARGET_OS_NAMES"] = target_os_names
            second_name: str = config_vars["TARGET_OS"].str()
            if len(target_os_names) > 1:
                second_name = target_os_names[1]
            config_vars["TARGET_OS_SECOND_NAME"] = second_name

        if "REPO_TYPE" in config_vars:  # some commands do not need to have REPO_TYPE
            self.read_defaults_file(str(config_vars["REPO_TYPE"]))

        # AUXILIARY_IIDS are iids that are not real products such as UNINSTALL_AS_... iids
        if "AUXILIARY_IIDS" not in config_vars:
            log.warning(f"could not find configVar 'AUXILIARY_IIDS'")
        self.auxiliary_iids.extend(list(config_vars.get("AUXILIARY_IIDS", [])))

        config_vars["__MAIN_DRIVE_NAME__"] = utils.get_main_drive_name()
Ejemplo n.º 9
0
    def create_copy_instructions(self) -> None:
        self.progress("create copy instructions ...")
        # If we got here while in synccopy command, there is no need to read the info map again.
        # If we got here while in copy command, read HAVE_INFO_MAP_COPY_PATH which defaults to NEW_HAVE_INFO_MAP_PATH.
        # Copy might be called after the sync batch file was created but before it was executed
        if len(self.info_map_table.files_read_list) == 0:
            have_info_path = os.fspath(config_vars["HAVE_INFO_MAP_COPY_PATH"])
            self.info_map_table.read_from_file(have_info_path, disable_indexes_during_read=True)

        self.avoid_copy_markers = list(config_vars.get('AVOID_COPY_MARKERS', []))

        # copy and actions instructions for sources
        self.batch_accum.set_current_section('copy')
        self.batch_accum += self.create_sync_folder_manifest_command("before-copy", back_ground=True)
        self.batch_accum += Progress("Start copy from $(COPY_SOURCES_ROOT_DIR)")

        sorted_target_folder_list = sorted(self.all_iids_by_target_folder,
                                           key=lambda fold: config_vars.resolve_str(fold))

        # first create all target folders so to avoid dependency order problems such as creating links between folders
        self.create_create_folders_instructions(sorted_target_folder_list)

        self.batch_accum += self.accumulate_unique_actions_for_active_iids('pre_copy')

        if self.mac_current_and_target:
            self.pre_copy_mac_handling()

        remove_previous_sources = bool(config_vars.get("REMOVE_PREVIOUS_SOURCES",True))
        for target_folder_path in sorted_target_folder_list:
            if remove_previous_sources:
                with self.batch_accum.sub_accum(Stage("remove_previous_sources_instructions_for_target_folder", target_folder_path)) as seb_sec:
                    seb_sec += self.create_remove_previous_sources_instructions_for_target_folder(target_folder_path)
            self.create_copy_instructions_for_target_folder(target_folder_path)

        # actions instructions for sources that do not need copying, here folder_name is the sync folder
        for sync_folder_name in sorted(self.no_copy_iids_by_sync_folder.keys()):
            with self.batch_accum.sub_accum(CdStage("create_copy_instructions_for_no_copy_folder", sync_folder_name)) as folder_accum:
                folder_accum += self.create_copy_instructions_for_no_copy_folder(sync_folder_name)

        self.progress(self.bytes_to_copy, "bytes to copy")

        self.batch_accum += self.accumulate_unique_actions_for_active_iids('post_copy')

        self.batch_accum.set_current_section('post-copy')
        # Copy have_info file to "site" (e.g. /Library/Application support/... or c:\ProgramData\...)
        # for reference. But when preparing offline installers the site location is the same as the sync location
        # so copy should be avoided.
        if os.fspath(config_vars["HAVE_INFO_MAP_PATH"]) != os.fspath(config_vars["SITE_HAVE_INFO_MAP_PATH"]):
            self.batch_accum += MakeDir("$(SITE_REPO_BOOKKEEPING_DIR)", chowner=True)
            self.batch_accum += CopyFileToFile("$(HAVE_INFO_MAP_PATH)", "$(SITE_HAVE_INFO_MAP_PATH)", hard_links=False, copy_owner=True)

        self.create_require_file_instructions()

        # messages about orphan iids
        for iid in sorted(list(config_vars["__ORPHAN_INSTALL_TARGETS__"])):
            self.batch_accum += Echo(f"Don't know how to install {iid}")
        self.batch_accum += Progress("Done copy")
        self.progress("create copy instructions done")
        self.progress("")
Ejemplo n.º 10
0
    def create_copy_instructions_for_target_folder(self,
                                                   target_folder_path) -> None:
        with self.batch_accum.sub_accum(
                CdStage("copy_to_folder",
                        target_folder_path)) as copy_to_folder_accum:
            self.current_destination_folder = target_folder_path
            num_items_copied_to_folder = 0
            items_in_folder = sorted(
                self.all_iids_by_target_folder[target_folder_path])

            # accumulate pre_copy_to_folder actions from all items, eliminating duplicates
            copy_to_folder_accum += self.accumulate_unique_actions_for_active_iids(
                'pre_copy_to_folder', items_in_folder)

            num_symlink_items: int = 0
            for IID in items_in_folder:
                name_and_version = self.name_and_version_for_iid(iid=IID)
                with copy_to_folder_accum.sub_accum(
                        Stage("copy", name_and_version)) as iid_accum:
                    self.current_iid = IID
                    sources_for_iid = self.items_table.get_sources_for_iid(IID)
                    resolved_sources_for_iid = [(config_vars.resolve_str(s[0]),
                                                 s[1])
                                                for s in sources_for_iid]
                    for source in resolved_sources_for_iid:
                        self.progress(
                            f"create copy instructions of {source[0]} to {config_vars.resolve_str(target_folder_path)}"
                        )
                        with iid_accum.sub_accum(
                                Stage("copy source",
                                      source[0])) as source_accum:
                            num_items_copied_to_folder += 1
                            source_accum += self.accumulate_actions_for_iid(
                                iid=IID,
                                detail_name="pre_copy_item",
                                message=None)
                            source_accum += self.create_copy_instructions_for_source(
                                source, name_and_version)
                            source_accum += self.accumulate_actions_for_iid(
                                iid=IID,
                                detail_name="post_copy_item",
                                message=None)
                            if self.mac_current_and_target:
                                num_symlink_items += self.info_map_table.count_symlinks_in_dir(
                                    source[0])
            self.current_iid = None

            # only if items were actually copied there's need to (Mac only) resolve symlinks
            if self.mac_current_and_target:
                if num_items_copied_to_folder > 0 and num_symlink_items > 0:
                    copy_to_folder_accum += ResolveSymlinkFilesInFolder(
                        target_folder_path,
                        own_progress_count=num_symlink_items)

            # accumulate post_copy_to_folder actions from all items, eliminating duplicates
            if copy_to_folder_accum.is_essential():
                copy_to_folder_accum += self.accumulate_unique_actions_for_active_iids(
                    'post_copy_to_folder', items_in_folder)
            self.current_destination_folder = None
Ejemplo n.º 11
0
 def pre_resolve_path(self, path_to_resolve) -> str:
     """ for some paths we cannot wait for resolution in the batch file"""
     resolved_path = config_vars.resolve_str(path_to_resolve)
     try:
         resolved_path = str(Path(resolved_path).resolve())
     except:
         pass
     return resolved_path
Ejemplo n.º 12
0
 def get_direct_sync_status_from_indicator(self, direct_sync_indicator):
     retVal = False
     if direct_sync_indicator is not None:
         try:
             retVal = utils.str_to_bool_int(config_vars.resolve_str(direct_sync_indicator))
         except:
             pass
     return retVal
Ejemplo n.º 13
0
 def get_version_str(self, short=False):
     instl_ver_str = ".".join(list(config_vars["__INSTL_VERSION__"]))
     if not short:
         if "__PLATFORM_NODE__" not in config_vars:
             config_vars.update({"__PLATFORM_NODE__": platform.node()})
         instl_ver_str = config_vars.resolve_str(
             "$(INSTL_EXEC_DISPLAY_NAME) version " + instl_ver_str +
             " $(__COMPILATION_TIME__) $(__PLATFORM_NODE__)")
     return instl_ver_str
Ejemplo n.º 14
0
 def __call__(self, *args, **kwargs) -> None:
     pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs)
     if self.config_files is not None:
         reader = ConfigVarYamlReader(config_vars)
         for config_file in self.config_files:
             reader.read_yaml_file(config_file)
     with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd:
         text_to_resolve = rfd.read()
     resolved_text = config_vars.resolve_str(text_to_resolve)
     with utils.utf8_open_for_write(self.resolved_file, "w") as wfd:
         wfd.write(resolved_text)
Ejemplo n.º 15
0
    def prepare_command_list_from_file(self):
        command_lines = list()
        for config_file in config_vars["__CONFIG_FILE__"].list():
            with utils.utf8_open_for_read(os.fspath(config_file), "r") as rfd:
                command_lines.extend(rfd.readlines())

        command_list = list()
        for command_line in command_lines:
            resolved_command_line = config_vars.resolve_str(
                command_line.strip())
            argv = shlex.split(resolved_command_line)
            command_list.append(argv)
        return command_list
Ejemplo n.º 16
0
    def create_remove_previous_sources_instructions_for_target_folder(self, target_folder_path):
        retVal = AnonymousAccum()
        target_folder_path_resolved = utils.ExpandAndResolvePath(config_vars.resolve_str(target_folder_path))
        if target_folder_path_resolved.is_dir():  # no need to remove previous sources if folder does not exist
            iids_in_folder = self.all_iids_by_target_folder[target_folder_path]
            previous_sources = self.items_table.get_details_and_tag_for_active_iids("previous_sources", unique_values=True, limit_to_iids=iids_in_folder)

            if len(previous_sources) > 0:
                with retVal.sub_accum(Cd(target_folder_path)) as remove_prev_section:
                    remove_prev_section += Progress(f"remove previous versions {target_folder_path}")

                    for previous_source in previous_sources:
                        remove_prev_section += self.create_remove_previous_sources_instructions_for_source(target_folder_path, previous_source)
        return retVal
Ejemplo n.º 17
0
 def read_previous_requirements(self):
     require_file_path = config_vars["SITE_REQUIRE_FILE_PATH"].Path()
     with Chmod(require_file_path, "a+rw", ignore_all_errors=True) as chmoder:
         chmoder()
     try:
         self.read_yaml_file(require_file_path, ignore_if_not_exist=True)
     except Exception as ex:
         log.warning(f"Exception reading {require_file_path}: {ex}")
         renamed_require_file_path = Path(require_file_path.parent, config_vars.resolve_str("$(SITE_REQUIRE_FILE_NAME).failed_to_read"))
         try:
             require_file_path.rename(renamed_require_file_path)
             log.warning(f"moved require.yaml to {renamed_require_file_path}")
         except Exception as ex_in_ex:
             log.warning(f"failed to moved require.yaml to {renamed_require_file_path}: {ex_in_ex}")
Ejemplo n.º 18
0
    def get_default_db_file(self):
        if "__MAIN_DB_FILE__" not in config_vars:
            db_base_path = None
            if "__MAIN_OUT_FILE__" in config_vars:
                # try to set the db file next to the output file
                db_base_path = config_vars["__MAIN_OUT_FILE__"].Path()
            elif "__MAIN_INPUT_FILE__" in config_vars:
                # if no output file try next to the input file
                db_base_path = Path(
                    config_vars.resolve_str(
                        "$(__MAIN_INPUT_FILE__)-$(__MAIN_COMMAND__)"))
            else:
                # as last resort try the Logs folder
                logs_dir = utils.get_system_log_folder_path()
                if logs_dir.is_dir():
                    db_base_path = logs_dir.joinpath(
                        config_vars.resolve_str("instl-$(__MAIN_COMMAND__)"))

            if db_base_path:
                # set the proper extension
                db_base_path = db_base_path.parent.joinpath(
                    db_base_path.name +
                    config_vars.resolve_str(".$(DB_FILE_EXT)"))
                config_vars["__MAIN_DB_FILE__"] = db_base_path
        log.info(f'DB FILE: {config_vars["__MAIN_DB_FILE__"].str()}')
        if self._owner.refresh_db_file:
            if config_vars["__MAIN_DB_FILE__"].str() != ":memory:":
                db_base_path = config_vars["__MAIN_DB_FILE__"].Path()
                if db_base_path.is_file():
                    utils.safe_remove_file(db_base_path)
                    log.info(
                        f'DB FILE REMOVED: {config_vars["__MAIN_DB_FILE__"].str()}'
                    )
                else:
                    log.info(
                        f'DB FILE DOES NOT EXIST: {config_vars["__MAIN_DB_FILE__"].str()}'
                    )
Ejemplo n.º 19
0
    def get_default_db_file(self):
        if "__MAIN_DB_FILE__" not in config_vars:
            db_base_path = None
            if "__MAIN_OUT_FILE__" in config_vars:
                # try to set the db file next to the output file
                db_base_path = config_vars["__MAIN_OUT_FILE__"].Path()
            elif "__MAIN_INPUT_FILE__" in config_vars:
                # if no output file try next to the input file
                db_base_path = Path(config_vars.resolve_str("$(__MAIN_INPUT_FILE__)-$(__MAIN_COMMAND__)"))
            else:
                # as last resort try the Logs folder on desktop if one exists
                logs_dir = Path(os.path.expanduser("~"), "Desktop", "Logs")
                if logs_dir.is_dir():
                    db_base_path = logs_dir.joinpath(config_vars.resolve_str("instl-$(__MAIN_COMMAND__)"))

            if db_base_path:
                # set the proper extension
                db_base_path = db_base_path.parent.joinpath(db_base_path.name+config_vars.resolve_str(".$(DB_FILE_EXT)"))
                config_vars["__MAIN_DB_FILE__"] = db_base_path

        if self._owner.refresh_db_file:
            db_base_path = config_vars["__MAIN_DB_FILE__"].Path()
            if db_base_path.is_file():
                utils.safe_remove_file(db_base_path)
Ejemplo n.º 20
0
    def update_state(self, *args, **kwargs):  # ClientFrameController
        super().update_state(*args, **kwargs)
        self.update_client_input_file_combo()

        input_file_base_name = config_vars["CLIENT_GUI_IN_FILE"].Path().name
        config_vars["CLIENT_GUI_IN_FILE_NAME"] = input_file_base_name

        if self.tk_vars["CLIENT_GUI_CMD"].get() in list(config_vars["__COMMANDS_WITH_RUN_OPTION__"]):
            self.client_run_batch_file_checkbox.configure(state='normal')
        else:
            self.client_run_batch_file_checkbox.configure(state='disabled')

        command_line = " ".join(self.create_client_command_line())
        self.text_widget.configure(state='normal')
        self.text_widget.delete(1.0, END)
        self.text_widget.insert(END, config_vars.resolve_str(command_line))
        self.text_widget.configure(state='disabled')
Ejemplo n.º 21
0
 def should_copy_source(self, source, target_folder_path):
     retVal = True
     reason_not_to_copy = None
     if not self.update_mode:
         top_src = config_vars["COPY_SOURCES_ROOT_DIR"].Path(
             resolve=True).joinpath(source[0])
         top_trg = Path(config_vars.resolve_str(target_folder_path),
                        top_src.name)
         if top_trg.exists():
             if source[1] == "!dir":
                 trg = top_trg.joinpath("Contents")
                 if trg.exists():
                     # try to Info.xml or Info.plist under contents
                     src = top_src.joinpath("Contents")
                     for avoid_copy_marker in self.avoid_copy_markers:
                         src_marker = src.joinpath(avoid_copy_marker)
                         dst_marker = trg.joinpath(avoid_copy_marker)
                         same_checksums = utils.compare_files_by_checksum(
                             dst_marker, src_marker)
                         if same_checksums:
                             reason_not_to_copy = f"same checksum Contents/{avoid_copy_marker}"
                             retVal = False
                             break
                 else:
                     # try to Info.xml or Info.plist at top level
                     for avoid_copy_marker in self.avoid_copy_markers:
                         src_marker = top_src.joinpath(avoid_copy_marker)
                         dst_marker = top_trg.joinpath(avoid_copy_marker)
                         same_checksums = utils.compare_files_by_checksum(
                             dst_marker, src_marker)
                         if same_checksums:
                             reason_not_to_copy = f"same checksum {avoid_copy_marker} in top level"
                             retVal = False
                             break
             elif source[1] == "!file":
                 try:
                     if top_src.stat().st_ino == top_trg.stat().st_ino:
                         retVal = False
                         reason_not_to_copy = f"same inode"
                 except:
                     pass
     return retVal, reason_not_to_copy
Ejemplo n.º 22
0
    def handle_yaml_read_error(self, **kwargs):
        try:
            the_node_stack = kwargs.get('node-stack', "unknown")
            position_in_file = getattr(the_node_stack, "start_mark", "unknown")
            original_path_to_file = utils.ExpandAndResolvePath(
                config_vars.resolve_str(kwargs.get('original-path-to-file',
                                                   '')))
            yaml_read_errors = list()
            yaml_read_errors.append("yaml_read_error:")
            if os.fspath(original_path_to_file) not in position_in_file:
                yaml_read_errors.append(
                    f"""    path-to-file: {original_path_to_file}""")
            yaml_read_errors.append(
                f"""    position-in-file: {position_in_file}""")
            yaml_read_errors.append(
                f"""    permissions: {utils.single_disk_item_listing(original_path_to_file)}"""
            )
            yaml_read_errors.append(
                f"""    exception: {kwargs.get('exception', '')}""")

            log.error("\n".join(yaml_read_errors))
        except Exception as ex:
            pass
Ejemplo n.º 23
0
    def update_state(self, *args, **kwargs):  # AdminFrameController
        super().update_state(*args, **kwargs)
        self.read_admin_config_files()

        input_file_base_name = Path(config_vars["ADMIN_GUI_LOCAL_CONFIG_FILE"].raw()).name
        config_vars["ADMIN_GUI_CONFIG_FILE_NAME"] = input_file_base_name

        if self.tk_vars["ADMIN_GUI_CMD"].get() in list(config_vars["__COMMANDS_WITH_LIMIT_OPTION__"]):
            self.limit_path_entry_widget.configure(state='normal')
        else:
            self.limit_path_entry_widget.configure(state='disabled')

        if self.tk_vars["ADMIN_GUI_CMD"].get() in list(config_vars["__COMMANDS_WITH_RUN_OPTION__"]):
            self.admin_run_batch_file_checkbox.configure(state='normal')
        else:
            self.admin_run_batch_file_checkbox.configure(state='disabled')

        command_line = " ".join([shlex.quote(p) for p in self.create_admin_command_line()])

        self.text_widget.configure(state='normal')
        self.text_widget.delete(1.0, END)
        self.text_widget.insert(END, config_vars.resolve_str(command_line))
        self.text_widget.configure(state='disabled')
Ejemplo n.º 24
0
    def __call__(self, *args, **kwargs) -> None:
        pybatch.PythonBatchCommandBase.__call__(self, *args, **kwargs)
        with config_vars.push_scope_context() as scope_context:
            if self.temp_config_vars:
                config_vars.update(self.temp_config_vars)
            if self.config_files is not None:
                reader = ConfigVarYamlReader(config_vars)
                for config_file in self.config_files:
                    reader.read_yaml_file(config_file)
            with utils.utf8_open_for_read(self.unresolved_file, "r") as rfd:
                text_to_resolve = rfd.read()
            resolved_text = config_vars.resolve_str(text_to_resolve)

            if self.raise_if_unresolved:
                unresolved_re = re.compile(r"""\$\(.*?\)""")
                all_unresolved = unresolved_re.findall(resolved_text)
                if all_unresolved:
                    unresolved_references = ", ".join(list(
                        set(all_unresolved)))
                    raise ValueError(
                        f"unresolved config_vars in {self.unresolved_file}: {unresolved_references}"
                    )
            with utils.utf8_open_for_write(self.resolved_file, "w") as wfd:
                wfd.write(resolved_text)
Ejemplo n.º 25
0
    def __repr__(self):
        single_indent = "    "
        running_progress_count = self.initial_progress
        PythonBatchCommandBase.config_vars_for_repr = config_vars  # so __repr__ of object derived from PythonBatchCommandBase will resolve config_vars values

        def _create_unique_obj_name(obj, prog_count):
            try:
                _create_unique_obj_name.instance_counter += 1
            except AttributeError:
                _create_unique_obj_name.instance_counter = 1
            obj_name = camel_to_snake_case(
                f"{obj.__class__.__name__}_{_create_unique_obj_name.instance_counter:03}_{prog_count}"
            )
            return obj_name

        def _remark_helper(*the_remarks):
            retVal = ", ".join(
                str(remark) for remark in filter(None, the_remarks))
            if retVal:
                retVal = f"""  # {retVal}"""
            return retVal

        def _repr_helper(batch_items, io_str, indent):
            nonlocal running_progress_count
            indent_str = single_indent * indent
            if isinstance(batch_items, list):
                for item in batch_items:
                    _repr_helper(item, io_str, indent)
            else:
                running_progress_count += batch_items.own_progress_count
                batch_items.prog_num = running_progress_count
                if batch_items.call__call__ is False and batch_items.is_context_manager is False:
                    text_to_write = f"""{indent_str}{repr(batch_items)}\n"""
                    io_str.write(text_to_write)
                    _repr_helper(batch_items.child_batch_commands, io_str,
                                 indent)
                elif batch_items.call__call__ is False and batch_items.is_context_manager is True:
                    text_to_write = f"""{indent_str}with {repr(batch_items)}:\n"""
                    io_str.write(text_to_write)
                    if batch_items.child_batch_commands:
                        _repr_helper(batch_items.child_batch_commands, io_str,
                                     indent + 1)
                    else:
                        text_to_write = f"""{indent_str}{single_indent}pass\n"""
                        io_str.write(text_to_write)
                elif batch_items.call__call__ is True and batch_items.is_context_manager is False:
                    text_to_write = f"""{indent_str}{repr(batch_items)}()\n"""
                    io_str.write(text_to_write)
                    _repr_helper(batch_items.child_batch_commands, io_str,
                                 indent)
                elif batch_items.call__call__ is True and batch_items.is_context_manager is True:
                    obj_name = _create_unique_obj_name(batch_items,
                                                       running_progress_count)
                    text_to_write = f"""{indent_str}with {repr(batch_items)} as {obj_name}:\n"""
                    io_str.write(text_to_write)

                    text_to_write = f"""{indent_str}{single_indent}{obj_name}("""
                    text_to_write += ")\n"
                    io_str.write(text_to_write)
                    _repr_helper(batch_items.child_batch_commands, io_str,
                                 indent + 1)

        self.set_current_section('epilog')
        self += PatchPyBatchWithTimings(config_vars['__MAIN_OUT_FILE__'])

        PythonBatchCommandBase.total_progress = 0
        for name, section in self.sections.items():
            progress_count_for_section = section.total_progress_count()
            PythonBatchCommandBase.total_progress += progress_count_for_section
        PythonBatchCommandBase.total_progress += 1  # count the PythonBatchRuntime, todo: a better way to add PythonBatchRuntime's progress count to the total

        prolog_str = io.StringIO()
        prolog_str.write(self._python_opening_code())
        if 'assign' in self.sections:
            _repr_helper(self.sections['assign'], prolog_str, 0)

        main_str = io.StringIO()
        the_command = config_vars.get("__MAIN_COMMAND__", "woolly mammoth")
        runtimer = PythonBatchRuntime(the_command)
        for section_name in PythonBatchCommandAccum.section_order:
            if section_name in self.sections:
                if section_name not in PythonBatchCommandAccum.special_sections:
                    runtimer += self.sections[section_name]
        main_str.write("\n")
        _repr_helper(runtimer, main_str, 0)

        epilog_str = io.StringIO()
        if 'epilog' in self.sections:
            main_str.write("\n")
            _repr_helper(self.sections['epilog'], epilog_str, 0)

        epilog_str.write(self._python_closing_code())

        main_str_resolved = config_vars.resolve_str(main_str.getvalue())
        main_str_resolved = config_vars.replace_unresolved_with_native_var_pattern(
            main_str_resolved,
            list(config_vars["__CURRENT_OS_NAMES__"])[0])

        the_whole_repr = prolog_str.getvalue(
        ) + main_str_resolved + epilog_str.getvalue()

        PythonBatchCommandBase.config_vars_for_repr = None

        return the_whole_repr
Ejemplo n.º 26
0
 def do_resolve(self, param):
     if param:
         print(config_vars.resolve_str(param))
     return False
Ejemplo n.º 27
0
 def test_var_in_var_simple(self):
     config_vars["A"] = "$(B)"
     config_vars["B"] = "$(C)"
     config_vars["C"] = "ali baba"
     self.assertEqual("ali baba", config_vars["A"].str())
     self.assertEqual("ali baba", config_vars.resolve_str("$(A)"))
Ejemplo n.º 28
0
    def read_remote_info_map(self):
        """ Reads the info map of the static files available for syncing.
            Writes the map to local sync folder for reference and debugging.
        """
        info_map_file_url = None
        try:
            with self.instlObj.info_map_table.reading_files_context():
                os.makedirs(os.fspath(
                    config_vars["LOCAL_REPO_BOOKKEEPING_DIR"]),
                            exist_ok=True)
                os.makedirs(os.fspath(
                    config_vars["LOCAL_REPO_REV_BOOKKEEPING_DIR"]),
                            exist_ok=True)

                if "INSTL_FOLDER_BASE_URL" not in config_vars:
                    if "REPO_REV_FOLDER_HIERARCHY" not in config_vars:
                        config_vars[
                            "REPO_REV_FOLDER_HIERARCHY"] = self.instlObj.repo_rev_to_folder_hierarchy(
                                config_vars["REPO_REV"].str())
                    config_vars[
                        "INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(REPO_REV_FOLDER_HIERARCHY)/instl"

                if "INFO_MAP_FILE_URL" not in config_vars:
                    config_vars["INFO_MAP_FILE_URL"] = config_vars.resolve_str(
                        "$(INSTL_FOLDER_BASE_URL)/info_map.txt")

                info_map_file_url = config_vars["INFO_MAP_FILE_URL"].str()
                info_map_file_expected_checksum = None
                if "INFO_MAP_CHECKSUM" in config_vars:
                    info_map_file_expected_checksum = config_vars[
                        "INFO_MAP_CHECKSUM"].str()
                local_copy_of_info_map_in = os.fspath(
                    config_vars["LOCAL_COPY_OF_REMOTE_INFO_MAP_PATH"])
                local_copy_of_info_map_out = utils.download_from_file_or_url(
                    in_url=info_map_file_url,
                    config_vars=config_vars,
                    in_target_path=local_copy_of_info_map_in,
                    translate_url_callback=connectionBase.translate_url,
                    cache_folder=self.instlObj.get_default_sync_dir(
                        continue_dir="cache", make_dir=True),
                    expected_checksum=info_map_file_expected_checksum)

                self.instlObj.info_map_table.read_from_file(
                    local_copy_of_info_map_out)
                self.instlObj.progress(f"read info_map {info_map_file_url}")

                additional_info_maps = self.instlObj.items_table.get_details_for_active_iids(
                    "info_map", unique_values=True)
                for additional_info_map in additional_info_maps:
                    # try to get the zipped info_map
                    additional_info_map_file_name = config_vars.resolve_str(
                        f"{additional_info_map}$(WZLIB_EXTENSION)")
                    path_in_main_info_map = config_vars.resolve_str(
                        f"instl/{additional_info_map_file_name}")
                    additional_info_map_item = self.instlObj.info_map_table.get_file_item(
                        path_in_main_info_map)
                    if not additional_info_map_item:  # zipped not found try the unzipped inf_map
                        additional_info_map_file_name = additional_info_map
                        path_in_main_info_map = config_vars.resolve_str(
                            f"instl/{additional_info_map}")
                        additional_info_map_item = self.instlObj.info_map_table.get_file_item(
                            path_in_main_info_map)

                    checksum = additional_info_map_item.checksum if additional_info_map_item else None

                    info_map_file_url = config_vars.resolve_str(
                        f"$(INSTL_FOLDER_BASE_URL)/{additional_info_map_file_name}"
                    )
                    local_copy_of_info_map_in = config_vars.resolve_str(
                        f"$(LOCAL_REPO_REV_BOOKKEEPING_DIR)/{additional_info_map}"
                    )
                    local_copy_of_info_map_out = utils.download_from_file_or_url(
                        in_url=info_map_file_url,
                        config_vars=config_vars,
                        in_target_path=local_copy_of_info_map_in,
                        translate_url_callback=connectionBase.translate_url,
                        cache_folder=self.instlObj.get_default_sync_dir(
                            "cache", make_dir=True),
                        expected_checksum=checksum)

                    self.instlObj.info_map_table.read_from_file(
                        local_copy_of_info_map_out)
                    self.instlObj.progress(
                        f"read info_map {info_map_file_url}")

                new_have_info_map_path = os.fspath(
                    config_vars["NEW_HAVE_INFO_MAP_PATH"])
                self.instlObj.info_map_table.write_to_file(
                    new_have_info_map_path,
                    field_to_write=('path', 'flags', 'revision', 'checksum',
                                    'size'))
        except Exception:
            log.error(f"""Exception reading info_map: {info_map_file_url}""")
            raise
Ejemplo n.º 29
0
    def set_sync_locations_for_active_items(self):
        # get_sync_folders_and_sources_for_active_iids returns: [(iid, direct_sync_indicator, source, source_tag, install_folder),...]
        # direct_sync_indicator will be None unless the items has "direct_sync" section in index.yaml
        # source is the relative path as it appears in index.yaml
        # adjusted source is the source prefixed with $(SOURCE_PREFIX) -- it needed
        # source_tag is one of  '!dir', '!dir_cont', '!file'
        # install_folder is where the sources should be copied to OR, in case of direct syn where they should be synced to
        # install_folder will be None for those items that require only sync not copy (such as Icons)
        #
        # for each file item in the source this function will set the full path where to download the file: item.download_path
        # and the top folder common to all items in a single source: item.download_root
        sync_and_source = self.items_table.get_sync_folders_and_sources_for_active_iids(
        )

        items_to_update = list()
        local_repo_sync_dir = os.fspath(config_vars["LOCAL_REPO_SYNC_DIR"])
        config_vars.setdefault("ALL_SYNC_DIRS", local_repo_sync_dir)
        for iid, direct_sync_indicator, source, source_tag, install_folder in sync_and_source:
            direct_sync = self.get_direct_sync_status_from_indicator(
                direct_sync_indicator)
            resolved_source_parts = source.split("/")
            if install_folder:
                resolved_install_folder = config_vars.resolve_str(
                    install_folder)
            else:
                resolved_install_folder = install_folder

            if source_tag in ('!dir', '!dir_cont'):
                if direct_sync:
                    # for direct-sync source, if one of the sources is Info.xml and it exists on disk AND source & file
                    # have the same checksum, then no sync is needed at all. All the above is not relevant in repair mode.
                    need_to_sync = True
                    if not self.update_mode:
                        info_xml_item = self.info_map_table.get_file_item(
                            "/".join((source, "Info.xml")))
                        if info_xml_item:
                            info_xml_of_target = config_vars.resolve_str(
                                "/".join(
                                    (resolved_install_folder,
                                     resolved_source_parts[-1], "Info.xml")))
                            need_to_sync = not utils.check_file_checksum(
                                info_xml_of_target, info_xml_item.checksum)
                    if need_to_sync:
                        config_vars["ALL_SYNC_DIRS"].append(
                            resolved_install_folder)
                        item_paths = self.info_map_table.get_recursive_paths_in_dir(
                            dir_path=source, what="any")
                        self.progress(
                            f"mark for download {len(item_paths)} files of {iid}/{source}"
                        )
                        if source_tag == '!dir':
                            source_parent = "/".join(
                                resolved_source_parts[:-1])
                            for item in item_paths:
                                items_to_update.append({
                                    "_id":
                                    item['_id'],
                                    "download_path":
                                    config_vars.resolve_str("/".join(
                                        (resolved_install_folder,
                                         item['path'][len(source_parent) +
                                                      1:]))),
                                    "download_root":
                                    config_vars.resolve_str("/".join(
                                        (resolved_install_folder,
                                         resolved_source_parts[-1])))
                                })
                        else:  # !dir_cont
                            source_parent = source
                            for item in item_paths:
                                items_to_update.append({
                                    "_id":
                                    item['_id'],
                                    "download_path":
                                    config_vars.resolve_str("/".join(
                                        (resolved_install_folder,
                                         item['path'][len(source_parent) +
                                                      1:]))),
                                    "download_root":
                                    resolved_install_folder
                                })
                    else:
                        num_ignored_files = self.info_map_table.ignore_file_paths_of_dir(
                            dir_path=source)
                        if num_ignored_files < 1:
                            num_ignored_files = ""  # sqlite curs.rowcount does not always returns the number of effected rows
                        self.progress(
                            f"avoid download {num_ignored_files} files of {iid}, Info.xml has not changed"
                        )

                else:
                    item_paths = self.info_map_table.get_recursive_paths_in_dir(
                        dir_path=source)
                    self.progress(
                        f"mark for download {len(item_paths)} files of {iid}/{source}"
                    )
                    for item in item_paths:
                        items_to_update.append({
                            "_id":
                            item['_id'],
                            "download_path":
                            config_vars.resolve_str("/".join(
                                (local_repo_sync_dir, item['path']))),
                            "download_root":
                            None
                        })
            elif source_tag == '!file':
                # if the file was wtarred and split it would have multiple items
                items_for_file = self.info_map_table.get_required_paths_for_file(
                    source)
                self.progress(
                    f"mark for download {len(items_for_file)} files of {iid}/{source}"
                )
                if direct_sync:
                    config_vars["ALL_SYNC_DIRS"].append(
                        resolved_install_folder)
                    for item in items_for_file:
                        items_to_update.append({
                            "_id":
                            item['_id'],
                            "download_path":
                            config_vars.resolve_str("/".join(
                                (resolved_install_folder, item['leaf']))),
                            "download_root":
                            config_vars.resolve_str(item.download_path)
                        })
                else:
                    for item in items_for_file:
                        items_to_update.append(
                            {
                                "_id":
                                item['_id'],
                                "download_path":
                                config_vars.resolve_str("/".join(
                                    (local_repo_sync_dir, item['path']))),
                                "download_root":
                                None
                            }
                        )  # no need to set item.download_root here - it will not be used

        self.info_map_table.update_downloads(items_to_update)