Beispiel #1
0
    def error_dict(self, exc_type, exc_val, exc_tb) -> Dict:
        self.error_dict_self(exc_type, exc_val, exc_tb)
        if not self.doing:
            self.doing = self.progress_msg_self()
        self._error_dict.update({
            'instl_version':
            config_vars.get("__INSTL_VERSION_STR_LONG__",
                            "Unknown version").str(),
            'python_version':
            ".".join((str(v) for v in sys.version_info)),
            'doing':
            self.doing,
            'major_stage':
            self.major_stage_str(),
            'stage':
            ".".join(
                filter(None,
                       (stage.stage_str()
                        for stage in PythonBatchCommandBase.stage_stack))),
            'instl_class':
            repr(self),
            'obj__dict__':
            self.representative_dict(),
            'local_time':
            time.strftime("%Y-%m-%d_%H.%M.%S"),
            'progress_counter':
            PythonBatchCommandBase.running_progress,
            'current_working_dir':
            self.current_working_dir,
            'operating_system':
            utils.get_os_description(),
        })

        if "INSTL_MINIMAL_VERSION" in config_vars:
            min_version_as_list = [
                int(v) for v in config_vars["INSTL_MINIMAL_VERSION"].list()
            ]
            cur_version_as_list = [
                int(v) for v in config_vars["__INSTL_VERSION__"].list()
            ]
            if cur_version_as_list < min_version_as_list:
                self._error_dict['minimal_instl_version'] = min_version_as_list

        for cv in config_vars.get("CONFIG_VARS_FOR_ERROR_REPORT", []).list():
            self._error_dict[cv] = str(config_vars.get(cv, "unknown"))

        if exc_val:
            self._error_dict.update({
                'exception_type':
                str(type(exc_val).__name__),
                'exception_str':
                str(exc_val),
            })
        if exc_tb:
            self._error_dict.update({
                "batch_file": exc_tb.tb_frame.f_code.co_filename,
                "batch_line": exc_tb.tb_lineno
            })
        return self._error_dict
Beispiel #2
0
    def create_copy_instructions(self) -> None:
        self.progress("create copy instructions ...")
        # If we got here while in synccopy command, there is no need to read the info map again.
        # If we got here while in copy command, read HAVE_INFO_MAP_COPY_PATH which defaults to NEW_HAVE_INFO_MAP_PATH.
        # Copy might be called after the sync batch file was created but before it was executed
        if len(self.info_map_table.files_read_list) == 0:
            have_info_path = os.fspath(config_vars["HAVE_INFO_MAP_COPY_PATH"])
            self.info_map_table.read_from_file(have_info_path, disable_indexes_during_read=True)

        self.avoid_copy_markers = list(config_vars.get('AVOID_COPY_MARKERS', []))

        # copy and actions instructions for sources
        self.batch_accum.set_current_section('copy')
        self.batch_accum += self.create_sync_folder_manifest_command("before-copy", back_ground=True)
        self.batch_accum += Progress("Start copy from $(COPY_SOURCES_ROOT_DIR)")

        sorted_target_folder_list = sorted(self.all_iids_by_target_folder,
                                           key=lambda fold: config_vars.resolve_str(fold))

        # first create all target folders so to avoid dependency order problems such as creating links between folders
        self.create_create_folders_instructions(sorted_target_folder_list)

        self.batch_accum += self.accumulate_unique_actions_for_active_iids('pre_copy')

        if self.mac_current_and_target:
            self.pre_copy_mac_handling()

        remove_previous_sources = bool(config_vars.get("REMOVE_PREVIOUS_SOURCES",True))
        for target_folder_path in sorted_target_folder_list:
            if remove_previous_sources:
                with self.batch_accum.sub_accum(Stage("remove_previous_sources_instructions_for_target_folder", target_folder_path)) as seb_sec:
                    seb_sec += self.create_remove_previous_sources_instructions_for_target_folder(target_folder_path)
            self.create_copy_instructions_for_target_folder(target_folder_path)

        # actions instructions for sources that do not need copying, here folder_name is the sync folder
        for sync_folder_name in sorted(self.no_copy_iids_by_sync_folder.keys()):
            with self.batch_accum.sub_accum(CdStage("create_copy_instructions_for_no_copy_folder", sync_folder_name)) as folder_accum:
                folder_accum += self.create_copy_instructions_for_no_copy_folder(sync_folder_name)

        self.progress(self.bytes_to_copy, "bytes to copy")

        self.batch_accum += self.accumulate_unique_actions_for_active_iids('post_copy')

        self.batch_accum.set_current_section('post-copy')
        # Copy have_info file to "site" (e.g. /Library/Application support/... or c:\ProgramData\...)
        # for reference. But when preparing offline installers the site location is the same as the sync location
        # so copy should be avoided.
        if os.fspath(config_vars["HAVE_INFO_MAP_PATH"]) != os.fspath(config_vars["SITE_HAVE_INFO_MAP_PATH"]):
            self.batch_accum += MakeDir("$(SITE_REPO_BOOKKEEPING_DIR)", chowner=True)
            self.batch_accum += CopyFileToFile("$(HAVE_INFO_MAP_PATH)", "$(SITE_HAVE_INFO_MAP_PATH)", hard_links=False, copy_owner=True)

        self.create_require_file_instructions()

        # messages about orphan iids
        for iid in sorted(list(config_vars["__ORPHAN_INSTALL_TARGETS__"])):
            self.batch_accum += Echo(f"Don't know how to install {iid}")
        self.batch_accum += Progress("Done copy")
        self.progress("create copy instructions done")
        self.progress("")
    def __call__(self, *args, **kwargs) -> None:
        if "REPO_REV_FILE_VARS" not in config_vars:
            # must have a list of variable names to write to the repo-rev file
            raise ValueError("REPO_REV_FILE_VARS must be defined")
        repo_rev_vars = list(config_vars["REPO_REV_FILE_VARS"])  # list of configVars to write to the repo-rev file
        # check that the variable names from REPO_REV_FILE_VARS do not contain
        # names that must not be made public
        dangerous_intersection = set(repo_rev_vars).intersection(
            {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"})
        if dangerous_intersection:
            log.warning("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting")
            raise ValueError(f"file REPO_REV_FILE_VARS {dangerous_intersection} and so is forbidden to upload")

        use_zlib = bool(config_vars.get("USE_ZLIB", "False"))  # should we consider zipped files or not
        zip_extension = ""
        if use_zlib:
            zip_extension = config_vars.get("WZLIB_EXTENSION", ".wzip").str()

        revision_instl_folder_path = Path(config_vars["UPLOAD_REVISION_INSTL_FOLDER"])

        # create checksum for the main info_map file, either wzipped or not
        main_info_map_file_name = "info_map.txt"+zip_extension
        main_info_map_file = revision_instl_folder_path.joinpath(main_info_map_file_name)
        main_info_map_checksum = utils.get_file_checksum(main_info_map_file)

        config_vars["INFO_MAP_FILE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+main_info_map_file_name
        config_vars["INFO_MAP_CHECKSUM"] = main_info_map_checksum

        # create checksum for the main index.yaml file, either wzipped or not
        index_file_name = "index.yaml"+zip_extension
        index_file_path = revision_instl_folder_path.joinpath(index_file_name)

        config_vars["INDEX_CHECKSUM"] = utils.get_file_checksum(index_file_path)
        config_vars["INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+index_file_name

        short_index_file_name = "short-index.yaml"
        short_index_file_path = revision_instl_folder_path.joinpath(short_index_file_name)
        config_vars["SHORT_INDEX_CHECKSUM"] = utils.get_file_checksum(short_index_file_path)
        config_vars["SHORT_INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+short_index_file_name

        config_vars["INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl"
        config_vars["REPO_REV_FOLDER_HIERARCHY"] = "$(__CURR_REPO_FOLDER_HIERARCHY__)"

        # check that all variables are present
        # <class 'list'>: ['INSTL_FOLDER_BASE_URL', 'REPO_REV_FOLDER_HIERARCHY', 'SYNC_BASE_URL']
        missing_vars = [var for var in repo_rev_vars if var not in config_vars]
        if missing_vars:
            raise ValueError(f"{missing_vars} are missing cannot write repo rev file")

        # create yaml out of the variables
        variables_as_yaml = config_vars.repr_for_yaml(repo_rev_vars)
        repo_rev_yaml_doc = aYaml.YamlDumpDocWrap(variables_as_yaml, '!define', "",
                                              explicit_start=True, sort_mappings=True)
        repo_rev_file_path = config_vars["UPLOAD_REVISION_REPO_REV_FILE"]
        with utils.utf8_open_for_write(repo_rev_file_path, "w") as wfd:
            aYaml.writeAsYaml(repo_rev_yaml_doc, out_stream=wfd, indentor=None, sort=True)
            log.info(f"""create {repo_rev_file_path}""")
 def compile_exclude_regexi(self):
     forbidden_folder_regex_list = list(
         config_vars.get("FOLDER_EXCLUDE_REGEX", [".*"]))
     self.compiled_forbidden_folder_regex = utils.compile_regex_list_ORed(
         forbidden_folder_regex_list)
     forbidden_file_regex_list = list(
         config_vars.get("FILE_EXCLUDE_REGEX", [".*"]))
     self.compiled_forbidden_file_regex = utils.compile_regex_list_ORed(
         forbidden_file_regex_list)
    def test_Glober(self):
        file_to_remove_1: Path = self.pbt.path_inside_test_folder("file_to_remove_1")
        file_to_remove_2: Path = self.pbt.path_inside_test_folder("file_to_remove_2")
        file_to_stay_1: Path = self.pbt.path_inside_test_folder("file_to_stay_1")
        file_to_stay_2: Path = self.pbt.path_inside_test_folder("file_to_stay_2")
        united_file: Path = self.pbt.path_inside_test_folder("united_file")

        # create the files
        self.pbt.batch_accum.clear(section_name="doit")
        self.pbt.batch_accum += Touch(file_to_remove_1)
        self.pbt.batch_accum += Touch(file_to_remove_2)
        self.pbt.batch_accum += MakeRandomDataFile(file_to_stay_1, 68)
        self.pbt.batch_accum += MakeRandomDataFile(file_to_stay_2, 32)
        self.pbt.exec_and_capture_output()
        # check files were created
        self.assertTrue(file_to_remove_1.exists(), f"file not created {file_to_remove_1}")
        self.assertTrue(file_to_remove_2.exists(), f"file not created {file_to_remove_2}")
        self.assertTrue(file_to_stay_1.exists(), f"file not created {file_to_stay_1}")
        self.assertTrue(file_to_stay_2.exists(), f"file not created {file_to_stay_2}")
        self.assertFalse(united_file.exists(), f"file should not exist {united_file}")

        # test will:
        # 1) remove some files with a glob pattern.
        # 2) unite some files with another pattern and check expected size of new file
        # 3) run a glob that  matches nothing to make sure Glober can handler this as well
        self.pbt.batch_accum.clear(section_name="doit")
        to_stay_glob = os.fspath(self.pbt.test_folder.joinpath("file_to_stay*"))
        to_remove_glob = os.fspath(self.pbt.test_folder.joinpath("file_to_remove*"))
        no_match_glob = os.fspath(self.pbt.test_folder.joinpath("no_match*"))

        self.pbt.batch_accum += Glober(to_stay_glob,
                                       AppendFileToFile,
                                       None,
                                       united_file)
        self.pbt.batch_accum += Glober(to_remove_glob,
                                       RmFile,
                                       "path")
        self.pbt.batch_accum += Glober(no_match_glob,
                                       RmFile,
                                       "path")
        self.pbt.batch_accum += Glober(r"""$(NATIVE_INSTRUMENTS_SERVICE_CENTER_DIR)/Waves-*""", Chmod, None, "a+rw")
        self.pbt.batch_accum += Glober(r"""$(NATIVE_INSTRUMENTS_SERVICE_CENTER_DIR)/Waves-*""", Chown, "path",
                 user_id=int(config_vars.get("ACTING_UID", -1)), group_id=int(config_vars.get("ACTING_GID", -1)))
        self.pbt.batch_accum += Glober(r"""$(WAVESHELL_AAX_DIR)/WaveShell*.aaxplugin""", Chown, "path",
                 user_id=int(config_vars.get("ACTING_UID", -1)), group_id=int(config_vars.get("ACTING_GID", -1)),
                 recursive=True)
        self.pbt.batch_accum += Glober(r"""$(WAVESHELL_AAX_DIR)/WaveShell*.aaxplugin""", Chmod, None, "a+rwX", recursive=True)

        self.pbt.exec_and_capture_output()
        self.assertFalse(file_to_remove_1.exists(), f"file not created {file_to_remove_1}")
        self.assertFalse(file_to_remove_2.exists(), f"file not created {file_to_remove_2}")
        self.assertTrue(file_to_stay_1.exists(), f"file not created {file_to_stay_1}")
        self.assertTrue(file_to_stay_2.exists(), f"file not created {file_to_stay_2}")
        self.assertEqual(united_file.stat().st_size, 100, f"united file, wrong size {united_file.stat().st_size}")
Beispiel #6
0
    def test_defaults(self):
        empty_list = config_vars.get("MAMBO_JUMBO", []).list()
        self.assertEqual([], empty_list)

        full_list = config_vars.get("MAMBO_JUMBO", ["mambo", "jumbo"]).list()
        self.assertEqual(["mambo", "jumbo"], full_list)

        empty_str = config_vars.get("MAMBO_JUMBO", "").str()
        self.assertEqual("", empty_str)

        full_str = config_vars.get("MAMBO_JUMBO", "mambo jumbo").str()
        self.assertEqual("mambo jumbo", full_str)
Beispiel #7
0
    def create_copy_instructions_for_dir(
            self, source_path: str,
            name_for_progress_message: str) -> PythonBatchCommandBase:
        dir_item: svnTree.SVNRow = self.info_map_table.get_dir_item(
            source_path)
        if dir_item is not None:
            retVal = AnonymousAccum()
            source_items: List[
                svnTree.SVNRow] = self.info_map_table.get_items_in_dir(
                    dir_path=source_path)
            wtar_base_names = {
                source_item.unwtarred.split("/")[-1]
                for source_item in source_items if source_item.wtarFlag
            }
            ignores = list(wtar_base_names)
            source_path_abs = os.path.normpath("$(COPY_SOURCES_ROOT_DIR)/" +
                                               source_path)
            retVal += CopyDirToDir(source_path_abs,
                                   os.curdir,
                                   link_dest=True,
                                   ignore_patterns=ignores)
            self.bytes_to_copy += functools.reduce(
                lambda total, item: total + self.calc_size_of_file_item(item),
                source_items, 0)

            source_path_dir, source_path_name = os.path.split(source_path)

            if self.mac_current_and_target:
                retVal += ChmodAndChown(
                    path=source_path_name,
                    mode="a+rw",
                    user_id=int(config_vars.get("ACTING_UID", -1)),
                    group_id=int(config_vars.get("ACTING_GID", -1)),
                    recursive=True,
                    ignore_all_errors=True
                )  # all copied files and folders should be rw
                for source_item in source_items:
                    if not source_item.is_wtar_file(
                    ) and source_item.isExecutable():
                        source_path_relative_to_current_dir = source_item.path_starting_from_dir(
                            source_path_dir)
                        # executable files should also get exec bit
                        retVal += Chmod(source_path_relative_to_current_dir,
                                        source_item.chmod_spec())

            if len(wtar_base_names) > 0:
                retVal += Unwtar(source_path_abs, os.curdir)
        else:
            # it might be a dir that was wtarred
            retVal = self.create_copy_instructions_for_file(
                source_path, name_for_progress_message)
        return retVal
Beispiel #8
0
    def init_default_client_vars(self):

        if "SYNC_BASE_URL" in config_vars:
            resolved_sync_base_url = config_vars["SYNC_BASE_URL"].str()
            url_main_item = utils.main_url_item(resolved_sync_base_url)
            config_vars["SYNC_BASE_URL_MAIN_ITEM"] = url_main_item
        # TARGET_OS_NAMES defaults to __CURRENT_OS_NAMES__, which is not what we want if syncing to
        # an OS which is not the current
        if config_vars["TARGET_OS"].str() != config_vars["__CURRENT_OS__"].str(
        ):
            target_os_names = list(config_vars[config_vars.resolve_str(
                "$(TARGET_OS)_ALL_OS_NAMES")])
            config_vars["TARGET_OS_NAMES"] = target_os_names
            second_name: str = config_vars["TARGET_OS"].str()
            if len(target_os_names) > 1:
                second_name = target_os_names[1]
            config_vars["TARGET_OS_SECOND_NAME"] = second_name

        if "REPO_TYPE" in config_vars:  # some commands do not need to have REPO_TYPE
            self.read_defaults_file(str(config_vars["REPO_TYPE"]))

        # AUXILIARY_IIDS are iids that are not real products such as UNINSTALL_AS_... iids
        if "AUXILIARY_IIDS" not in config_vars:
            log.warning(f"could not find configVar 'AUXILIARY_IIDS'")
        self.auxiliary_iids.extend(list(config_vars.get("AUXILIARY_IIDS", [])))

        config_vars["__MAIN_DRIVE_NAME__"] = utils.get_main_drive_name()
Beispiel #9
0
    def write_batch_file(self, in_batch_accum, file_name_post_fix=""):
        assert "__MAIN_OUT_FILE__" in config_vars

        config_vars[
            "TOTAL_ITEMS_FOR_PROGRESS_REPORT"] = in_batch_accum.total_progress_count(
            )

        in_batch_accum.initial_progress = self.internal_progress
        self.create_variables_assignment(in_batch_accum)
        self.init_python_batch(in_batch_accum)

        exit_on_errors = self.the_command != 'uninstall'  # in case of uninstall, go on with batch file even if some operations failed

        final_repr = repr(in_batch_accum)

        out_file: Path = config_vars.get("__MAIN_OUT_FILE__", None).Path()
        if out_file:
            out_file = out_file.parent.joinpath(out_file.name +
                                                file_name_post_fix)
            with MakeDir(out_file.parent, report_own_progress=False) as md:
                md()
            self.out_file_realpath = os.fspath(out_file)
        else:
            self.out_file_realpath = "stdout"

        with utils.write_to_file_or_stdout(out_file) as fd:
            fd.write(final_repr)
            fd.write('\n')

        msg = " ".join(
            (self.out_file_realpath,
             str(in_batch_accum.total_progress_count()), "progress items"))
        log.info(msg)
Beispiel #10
0
    def __call__(self, *args, **kwargs) -> None:
        PythonBatchCommandBase.__call__(self, *args, **kwargs)
        resolved_what_to_zip = utils.ExpandAndResolvePath(self.what_to_wzip)

        if self.where_to_put_wzip:
            target_wzip_file = utils.ExpandAndResolvePath(
                self.where_to_put_wzip)
        else:
            target_wzip_file = resolved_what_to_zip.parent
            if not target_wzip_file:  # os.path.split might return empty string
                target_wzip_file = Path.cwd()
        if not target_wzip_file.is_file():
            # assuming it's a folder
            with MakeDir(target_wzip_file.parent,
                         report_own_progress=False) as md:
                md()
            target_wzip_file = target_wzip_file.joinpath(
                resolved_what_to_zip.name + ".wzip")

        self.doing = f"""wziping '{resolved_what_to_zip}' to '{target_wzip_file}'"""
        zlib_compression_level = int(
            config_vars.get("ZLIB_COMPRESSION_LEVEL", "8"))
        with open(target_wzip_file, "wb") as wfd, open(resolved_what_to_zip,
                                                       "rb") as rfd:
            wfd.write(zlib.compress(rfd.read(), zlib_compression_level))
Beispiel #11
0
    def check_yaml(self, path_to_yaml=None, config_var_containing_path_to_file=None):

        if not path_to_yaml:
            path_to_yaml = config_vars.get(config_var_containing_path_to_file, "").str()

        if path_to_yaml:

            command_line = [os.fspath(config_vars["__INSTL_EXE_PATH__"]), "read-yaml",
                            "--in", path_to_yaml, "--silent"]

            try:
                if getattr(os, "setsid", None):
                    check_yaml_process = subprocess.Popen(command_line, executable=command_line[0], shell=False, preexec_fn=os.setsid)  # Unix
                else:
                    check_yaml_process = subprocess.Popen(command_line, executable=command_line[0], shell=False)  # Windows
            except OSError:
                log.info(f"""Cannot run: {command_line}""")
                return

        unused_stdout, unused_stderr = check_yaml_process.communicate()
        return_code = check_yaml_process.returncode
        if return_code != 0:
            log.info(f"""{" ".join(command_line)} returned exit code {return_code}""")
        else:
            log.info(f"""{path_to_yaml} read OK""")
Beispiel #12
0
    def __call__(self, *args, **kwargs) -> None:

        PythonBatchCommandBase.__call__(self, *args, **kwargs)
        ignore_files = list(config_vars.get("WTAR_IGNORE_FILES", []))

        what_to_unwtar: Path = utils.ExpandAndResolvePath(self.what_to_unwtar)

        if what_to_unwtar.is_file():
            if utils.is_first_wtar_file(what_to_unwtar):
                if self.where_to_unwtar:
                    destination_folder: Path = utils.ExpandAndResolvePath(
                        self.where_to_unwtar)
                else:
                    destination_folder = what_to_unwtar.parent

                self.unwtar_a_file(what_to_unwtar,
                                   destination_folder,
                                   no_artifacts=self.no_artifacts,
                                   ignore=ignore_files,
                                   copy_owner=self.copy_owner)

        elif what_to_unwtar.is_dir():
            if self.where_to_unwtar:
                destination_folder: Path = Path(
                    utils.ExpandAndResolvePath(self.where_to_unwtar),
                    what_to_unwtar.name)
            else:
                destination_folder = what_to_unwtar
            self.doing = f"""unwtar folder '{what_to_unwtar}' to '{destination_folder}''"""
            if not can_skip_unwtar(what_to_unwtar, destination_folder):
                for root, dirs, files in os.walk(what_to_unwtar,
                                                 followlinks=False):
                    # a hack to prevent unwtarring of the sync folder. Copy command might copy something
                    # to the top level of the sync folder.
                    if "bookkeeping" in dirs:
                        dirs[:] = []
                        log.debug(
                            f"skipping {root} because bookkeeping folder was found"
                        )
                        continue

                    root_Path = Path(root)
                    tail_folder = root_Path.relative_to(what_to_unwtar)
                    for a_file in files:
                        a_file_path = root_Path.joinpath(a_file)
                        if utils.is_first_wtar_file(a_file_path):
                            where_to_unwtar_the_file = destination_folder.joinpath(
                                tail_folder)
                            self.unwtar_a_file(a_file_path,
                                               where_to_unwtar_the_file,
                                               no_artifacts=self.no_artifacts,
                                               ignore=ignore_files,
                                               copy_owner=self.copy_owner)
            else:
                log.debug(
                    f"unwtar {what_to_unwtar} to {self.where_to_unwtar} skipping unwtarring because both folders have the same Info.xml file"
                )

        else:
            raise FileNotFoundError(what_to_unwtar)
Beispiel #13
0
    def get_version_of_installed_binaries(self):
        binaries_version_list = list()
        try:

            ignore_regexes_filter = utils.check_binaries_versions_filter_with_ignore_regexes(
            )

            if "CHECK_BINARIES_VERSION_FOLDER_EXCLUDE_REGEX" in config_vars:
                ignore_folder_regex_list = list(
                    config_vars["CHECK_BINARIES_VERSION_FOLDER_EXCLUDE_REGEX"])
                ignore_regexes_filter.set_folder_ignore_regexes(
                    ignore_folder_regex_list)

            if "CHECK_BINARIES_VERSION_FILE_EXCLUDE_REGEX" in config_vars:
                ignore_file_regex_list = list(
                    config_vars["CHECK_BINARIES_VERSION_FILE_EXCLUDE_REGEX"])
                ignore_regexes_filter.set_file_ignore_regexes(
                    ignore_file_regex_list)

            current_os = config_vars["__CURRENT_OS__"].str()
            path_to_search = list(
                config_vars.get('CHECK_BINARIES_VERSION_FOLDERS', []))
            for a_path in path_to_search:
                binaries_version_from_folder = utils.check_binaries_versions_in_folder(
                    current_os, Path(a_path), ignore_regexes_filter)
                binaries_version_list.extend(binaries_version_from_folder)

            self.items_table.insert_binary_versions(binaries_version_list)

        except Exception as ex:
            log.warning(f"""exception while in check_binaries_versions {ex}""")
        return binaries_version_list
Beispiel #14
0
    def update_state(self, *args, **kwargs):
        super().update_state(*args, **kwargs)
        self.read_activate_config_files()

        host = config_vars.get("REDIS_HOST", "").str()
        port = config_vars.get("REDIS_PORT", 0).int()

        if self.redis_conn is not None:
            if self.redis_conn.host != host or self.redis_conn.port != port:
                self.stop_update_redis_table()
                log.info(f"disconnected from redis host: {self.redis_conn.host}, port: {self.redis_conn.port}")
                self.redis_conn.close()
                self.redis_conn = None
        if self.redis_conn is None and host and port:
            self.redis_conn = utils.RedisClient(host, port)
            log.info(f"connected to redis host: {self.redis_conn.host}, port: {self.redis_conn.port}")
            self.start_update_redis_table()
Beispiel #15
0
    def write_history(self):
        selected_tab = self.notebook.tab(self.notebook.select(), option='text')
        config_vars["SELECTED_TAB"] = selected_tab

        which_vars_for_yaml = config_vars.get("__GUI_CONFIG_FILE_VARS__", []).list()
        the_list_yaml_ready= config_vars.repr_for_yaml(which_vars=which_vars_for_yaml, resolve=False, ignore_unknown_vars=True)
        the_doc_yaml_ready = aYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True)
        with utils.utf8_open_for_write(config_vars["INSTL_GUI_CONFIG_FILE_NAME"].str(), "w") as wfd:
            aYaml.writeAsYaml(the_doc_yaml_ready, wfd)
Beispiel #16
0
 def read_activate_config_files(self):
     for config_file_var in ("ACTIVATE_CONFIG_FILE", ):
         config_path = config_vars.get(config_file_var, None).Path()
         if config_path:
             if config_path.is_file():
                 config_vars[ "__SEARCH_PATHS__"].clear() # so __include__ file will not be found on old paths
                 self.instl_obj.read_yaml_file(config_path)
             else:
                 log.info(f"""File not found: {config_path}""")
Beispiel #17
0
 def read_admin_config_files(self, *args, **kwargs):
     for config_file_var in ("ADMIN_GUI_TARGET_CONFIG_FILE", "ADMIN_GUI_LOCAL_CONFIG_FILE"):
         config_path = config_vars.get(config_file_var, None).Path()
         if config_path:
             if config_path.is_file():
                 config_vars[ "__SEARCH_PATHS__"].clear() # so __include__ file will not be found on old paths
                 self.instl_obj.read_yaml_file(config_path)
             else:
                 log.info(f"""File not found: {config_path}""")
Beispiel #18
0
    def do_report_versions(self):
        self.guids_to_ignore = set(
            list(config_vars.get("MAIN_IGNORED_TARGETS", [])))

        report_only_installed = bool(config_vars["__REPORT_ONLY_INSTALLED__"])
        report_data = self.items_table.versions_report(
            report_only_installed=report_only_installed)

        self.output_data.extend(report_data)
Beispiel #19
0
    def error_dict(self, exc_type, exc_val, exc_tb) -> Dict:
        if self._error_dict is None:
            self._error_dict = dict()
        self.error_dict_self(exc_type, exc_val, exc_tb)
        if not self.doing:
            self.doing = self.progress_msg_self()
        self._error_dict.update({
            'doing':
            self.doing,
            'major_stage':
            self.major_stage_str(),
            'stage':
            ".".join(
                filter(None,
                       (stage.stage_str()
                        for stage in PythonBatchCommandBase.stage_stack))),
            'instl_class':
            repr(self),
            'obj__dict__':
            self.representative_dict(),
            'local_time':
            time.strftime("%Y-%m-%d_%H.%M.%S"),
            'progress_counter':
            PythonBatchCommandBase.running_progress,
            'current_working_dir':
            self.current_working_dir,
        })

        for cv in config_vars.get("CONFIG_VARS_FOR_ERROR_REPORT", []).list():
            self._error_dict[cv] = str(config_vars.get(cv, "unknown"))

        if exc_val:
            self._error_dict.update({
                'exception_type':
                str(type(exc_val).__name__),
                'exception_str':
                str(exc_val),
            })
        if exc_tb:
            self._error_dict.update({
                "batch_file": exc_tb.tb_frame.f_code.co_filename,
                "batch_line": exc_tb.tb_lineno
            })
        return self._error_dict
Beispiel #20
0
    def create_copy_instructions_for_file(
            self, source_path: str,
            name_for_progress_message: str) -> PythonBatchCommandBase:
        retVal = AnonymousAccum()
        source_files = self.info_map_table.get_required_for_file(source_path)
        if not source_files:
            log.warning(f"""no source files for {source_path}""")
            return retVal
        num_wtars: int = functools.reduce(
            lambda total, item: total + item.wtarFlag, source_files, 0)
        assert (len(source_files) == 1
                and num_wtars == 0) or num_wtars == len(source_files)

        if num_wtars == 0:
            source_file = source_files[0]
            source_file_full_path = os.path.normpath(
                "$(COPY_SOURCES_ROOT_DIR)/" + source_file.path)

            retVal += CopyFileToDir(source_file_full_path,
                                    os.curdir,
                                    link_dest=True)

            if self.mac_current_and_target:
                if not source_file.path.endswith(".symlink"):
                    retVal += ChmodAndChown(
                        path=source_file.name(),
                        mode=source_file.chmod_spec(),
                        user_id=int(config_vars.get("ACTING_UID", -1)),
                        group_id=int(config_vars.get("ACTING_GID", -1)),
                        recursive=False)

            self.bytes_to_copy += self.calc_size_of_file_item(source_file)
        else:  # one or more wtar files
            # do not increment retVal - unwtar_instructions will add it's own instructions
            first_wtar_item = None
            for source_wtar in source_files:
                self.bytes_to_copy += self.calc_size_of_file_item(source_wtar)
                if source_wtar.is_first_wtar_file():
                    first_wtar_item = source_wtar
            assert first_wtar_item is not None
            first_wtar_full_path = os.path.normpath(
                "$(COPY_SOURCES_ROOT_DIR)/" + first_wtar_item.path)
            retVal += Unwtar(first_wtar_full_path, os.curdir)
        return retVal
Beispiel #21
0
 def get_run_args(self, run_args) -> None:
     run_args.append(config_vars.get("SVN_CLIENT_PATH", "svn").str())
     run_args.append(self.command)
     if self.url_with_repo_rev():
         run_args.append(self.url_with_repo_rev())
     if self.depth:
         run_args.append("--depth")
         run_args.append(self.depth)
     if self.working_copy_path is not None:
         run_args.append(self.working_copy_path)
Beispiel #22
0
 def resolve_defined_paths(self):
     self.path_searcher.add_search_paths(
         list(config_vars.setdefault("SEARCH_PATHS", [])))
     for path_var_to_resolve in list(config_vars.get(
             "PATHS_TO_RESOLVE", [])):
         if path_var_to_resolve in config_vars:
             resolved_path = self.path_searcher.find_file(
                 str(config_vars[path_var_to_resolve]),
                 return_original_if_not_found=True)
             config_vars[path_var_to_resolve] = resolved_path
Beispiel #23
0
 def should_check_for_binary_versions(self):
     """ checking versions inside binaries is heavy task.
         should_check_for_binary_versions returns if it's needed.
         True value will be returned if check was explicitly requested
         or if update of installed items was requested
     """
     explicitly_asked_for_binaries_check = 'CHECK_BINARIES_VERSIONS' in config_vars
     update_was_requested = "__UPDATE_INSTALLED_ITEMS__" in config_vars.get("MAIN_INSTALL_TARGETS", []).list()
     retVal = explicitly_asked_for_binaries_check or update_was_requested
     return retVal
Beispiel #24
0
    def init_copy_vars(self) -> None:
        self.action_type_to_progress_message: Dict[str, str] = {'pre_copy': "pre-install step",
                                                'post_copy': "post-install step",
                                                'pre_copy_to_folder': "pre-copy step",
                                                'post_copy_to_folder': "post-copy step"}
        self.bytes_to_copy = 0
        # ratio between wtar file and it's uncompressed contents
        self.wtar_ratio = float(config_vars.get("WTAR_RATIO", "1.3"))

        # when running on MacOS AND installation targets MacOS some special cases need to be considered
        self.mac_current_and_target = 'Mac' in list(config_vars["__CURRENT_OS_NAMES__"]) and 'Mac' in list(config_vars["TARGET_OS"])
        self.win_current_and_target = 'Win' in list(config_vars["__CURRENT_OS_NAMES__"]) and 'Win' in list(config_vars["TARGET_OS"])
 def __call__(self, *args, **kwargs):
     PythonBatchCommandBase.__call__(self, *args, **kwargs)
     session = kwargs['session']
     with MakeDir(self.path.parent, report_own_progress=False) as dir_maker:
         dir_maker()
     with open(self.path, "wb") as fo:
         timeout_seconds = int(config_vars.get("CURL_MAX_TIME", 480))
         read_data = session.get(self.url, timeout=timeout_seconds)
         read_data.raise_for_status()  # must raise in case of an error. Server might return json/xml with error details, we do not want that
         fo.write(read_data.content)
     checksum_ok = utils.check_file_checksum(self.path, self.checksum)
     if not checksum_ok:
         raise ValueError(f"bad checksum for {self.path} even after re-download")
Beispiel #26
0
 def __call__(self, *args, **kwargs):
     PythonBatchCommandBase.__call__(self, *args, **kwargs)
     for self.cur_path in self.paths_to_make:
         self.cur_path = utils.ExpandAndResolvePath(self.cur_path)
         if self.remove_obstacles:
             if self.cur_path.is_file():
                 self.doing = f"""removing file that should be a folder '{self.cur_path}'"""
                 self.cur_path.unlink()
         self.doing = f"""creating a folder '{self.cur_path}'"""
         self.cur_path.mkdir(parents=True, mode=0o777, exist_ok=True)
         if self.remove_obstacles:
             if sys.platform == 'win32':
                 with FullACLForEveryone(
                         self.cur_path,
                         own_progress_count=0) as grant_permissions:
                     grant_permissions()
             elif sys.platform == 'darwin':
                 with Chown(path=self.cur_path,
                            user_id=int(config_vars.get("ACTING_UID", -1)),
                            group_id=int(config_vars.get("ACTING_GID", -1)),
                            recursive=False,
                            own_progress_count=0) as grant_permissions:
                     grant_permissions()
Beispiel #27
0
    def open_file_for_edit(self, path_to_file=None, config_var_containing_path_to_file=None):
        if not path_to_file:
            path_to_file = config_vars.get(config_var_containing_path_to_file, "").str()
        if path_to_file:
            path_to_file = Path(path_to_file).resolve()
            if not path_to_file.is_file():
                log.info(f"""File not found:{path_to_file}""")
                return

            try:
                # noinspection PyUnresolvedReferences
                os.startfile(os.fspath(path_to_file), 'edit')  # windows
            except AttributeError:
                subprocess.call(['open', os.fspath(path_to_file)])
Beispiel #28
0
    def calculate_all_install_items(self):
        # mark ignored iids, so all subsequent operations not act on these iids
        ignored_iids = list(config_vars.get("MAIN_IGNORED_TARGETS", []))
        self.items_table.set_ignore_iids(ignored_iids)

        # mark main install items
        main_iids = list(config_vars["__MAIN_INSTALL_IIDS__"])
        self.items_table.change_status_of_iids_to_another_status(
            self.items_table.install_status["none"],
            self.items_table.install_status["main"],
            main_iids,
            progress_callback=self.progress)

        # find dependant of main install items
        main_iids_and_dependents = self.items_table.get_recursive_dependencies(
            look_for_status=self.items_table.install_status["main"])
        # mark dependants of main items, but only if they are not already in main items
        self.items_table.change_status_of_iids_to_another_status(
            self.items_table.install_status["none"],
            self.items_table.install_status["depend"],
            main_iids_and_dependents,
            progress_callback=self.progress)

        # mark update install items, but only those not already marked as main or depend
        update_iids = list(config_vars["__MAIN_UPDATE_IIDS__"])
        self.items_table.change_status_of_iids_to_another_status(
            self.items_table.install_status["none"],
            self.items_table.install_status["update"],
            update_iids,
            progress_callback=self.progress)

        # find dependants of update install items
        update_iids_and_dependents = self.items_table.get_recursive_dependencies(
            look_for_status=self.items_table.install_status["update"])
        # mark dependants of update items, but only if they are not already marked
        self.items_table.change_status_of_iids_to_another_status(
            self.items_table.install_status["none"],
            self.items_table.install_status["depend"],
            update_iids_and_dependents,
            progress_callback=self.progress)

        all_items_to_install = self.items_table.get_iids_by_status(
            self.items_table.install_status["main"],
            self.items_table.install_status["depend"])

        config_vars["__FULL_LIST_OF_INSTALL_TARGETS__"] = sorted(
            all_items_to_install)

        self.sort_all_items_by_target_folder(consider_direct_sync=True)
        self.calc_iid_to_name_and_version()
Beispiel #29
0
    def command_output(self):
        if not bool(config_vars.get('__SILENT__', "false")):

            output_format = str(config_vars.get("OUTPUT_FORMAT", 'text'))

            if output_format == "json":
                output_text = json.dumps(self.output_data,
                                         indent=1,
                                         default=utils.extra_json_serializer)
            elif output_format == "yaml":
                io_str = io.StringIO()
                for yaml_data in self.output_data:
                    aYaml.writeAsYaml(yaml_data, io_str)
                output_text = io_str.getvalue()
            else:  # output_format == "text":  text is the default format
                lines = [
                    ", ".join(line_data) for line_data in self.output_data
                ]
                output_text = "\n".join(lines)

            out_file = config_vars.get("__MAIN_OUT_FILE__", None).Path()
            with utils.write_to_file_or_stdout(out_file) as wfd:
                wfd.write(output_text)
                wfd.write("\n")
Beispiel #30
0
 def who_locks_file_error_dict(self, func, path_to_file, exc_info=None):
     """
         WINDOWS ONLY - will do nothing on other platforms
         updates _error_dict with describing who is locking a file - if any
         function signiture is structured to be suitable to pass to shutil.rmtree onerror param
     """
     who_locks_file_dll_path = config_vars.get(
         "__WHO_LOCKS_FILE_DLL_PATH__", None).Path()
     if who_locks_file_dll_path:
         locked_file_info = utils.who_locks_file(path_to_file,
                                                 who_locks_file_dll_path)
         if locked_file_info:
             self._error_dict["locked_file_info"] = locked_file_info
     if exc_info is not None:
         raise