Exemplo n.º 1
0
    def do_up2s3(self):
        root_links_folder = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)")
        # call svn info and to find out the last repo revision
        base_repo_rev = int(var_list.resolve("$(BASE_REPO_REV)"))
        last_repo_rev = self.get_last_repo_rev()
        revision_list = range(base_repo_rev, last_repo_rev+1)
        dirs_to_upload = list()
        no_need_upload_nums = list()
        yes_need_upload_nums = list()
        for dir_as_int in revision_list:
            dir_name = str(dir_as_int)
            if not os.path.isdir(var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name)):
                print("revision dir", dir_name, "is missing, run create-links to create this folder")
            else:
                create_links_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(CREATE_LINKS_STAMP_FILE_NAME)")
                if not os.path.isfile(create_links_done_stamp_file):
                    print("revision dir", dir_name, "does not have create-links stamp file:", create_links_done_stamp_file)
                else:
                    up_2_s3_done_stamp_file = var_list.resolve("$(ROOT_LINKS_FOLDER_REPO)/"+dir_name+"/$(UP_2_S3_STAMP_FILE_NAME)")
                    if os.path.isfile(up_2_s3_done_stamp_file):
                        no_need_upload_nums.append(dir_name)
                    else:
                        yes_need_upload_nums.append(dir_name)
                        dirs_to_upload.append(dir_name)

        if yes_need_upload_nums:
            if no_need_upload_nums:
                no_need_upload__str = ", ".join(no_need_upload_nums)
                msg = " ".join( ("Revisions already uploaded to S3:", no_need_upload__str) )
                print(msg)
            yes_need_upload_str = ", ".join(yes_need_upload_nums)
            msg = " ".join( ("Revisions will be uploaded to S3:", yes_need_upload_str) )
            print(msg)
        else:
            msg = " ".join( ("All revisions already uploaded to S3:", str(base_repo_rev), "...", str(last_repo_rev)) )
            print(msg)

        self.batch_accum.set_current_section('upload')
        for dir_name in dirs_to_upload:
            accum = BatchAccumulator()  # sub-accumulator serves as a template for each version
            accum.set_current_section('upload')
            save_dir_var = "REV_"+dir_name+"_SAVE_DIR"
            self.batch_accum += self.platform_helper.save_dir(save_dir_var)
            var_list.set_var("__CURR_REPO_REV__").append(dir_name)
            self.do_upload_to_s3_aws_for_revision(accum)
            revision_lines = accum.finalize_list_of_lines()  # will resolve with current  __CURR_REPO_REV__
            self.batch_accum += revision_lines
            self.batch_accum += self.platform_helper.restore_dir(save_dir_var)
            self.batch_accum += self.platform_helper.new_line()

        self.create_variables_assignment()
        self.write_batch_file()
        if "__RUN_BATCH_FILE__" in var_list:
            self.run_batch_file()
Exemplo n.º 2
0
    def do_create_links(self):
        self.check_prerequisite_var_existence(("REPO_NAME", "SVN_REPO_URL", "ROOT_LINKS_FOLDER_REPO"))

        self.batch_accum.set_current_section('links')

        info_as_io = None
        # call svn info and to find out the last repo revision
        last_repo_rev = self.get_last_repo_rev()

        self.batch_accum += self.platform_helper.mkdir("$(ROOT_LINKS_FOLDER_REPO)/Base")

        accum = BatchAccumulator()  # sub-accumulator serves as a template for each version
        accum.set_current_section('links')
        self.create_links_for_revision(accum)

        no_need_link_nums = list()
        yes_need_link_nums = list()
        base_rev = int(var_list.resolve("$(BASE_REPO_REV)"))
        if base_rev > last_repo_rev:
            raise ValueError("base_rev "+str(base_rev)+" > last_repo_rev "+str(last_repo_rev))
        for revision in range(base_rev, last_repo_rev+1):
            if self.needToCreatelinksForRevision(revision):
                yes_need_link_nums.append(str(revision))
                save_dir_var = "REV_"+str(revision)+"_SAVE_DIR"
                self.batch_accum += self.platform_helper.save_dir(save_dir_var)
                var_list.set_var("__CURR_REPO_REV__").append(str(revision))
                revision_lines = accum.finalize_list_of_lines()  # will resolve with current  __CURR_REPO_REV__
                self.batch_accum += revision_lines
                self.batch_accum += self.platform_helper.restore_dir(save_dir_var)
                self.batch_accum += self.platform_helper.new_line()
            else:
                no_need_link_nums.append(str(revision))

        if yes_need_link_nums:
            if no_need_link_nums:
                no_need_links_str = ", ".join(no_need_link_nums)
                msg = " ".join( ("Links already created for revisions:", no_need_links_str) )
                print(msg)
            yes_need_links_str = ", ".join(yes_need_link_nums)
            msg = " ".join( ("Need to create links for revisions:", yes_need_links_str) )
            print(msg)
        else:
            msg = " ".join( ("Links already created for all revisions:", str(base_rev), "...", str(last_repo_rev)) )
            print(msg)

        self.create_variables_assignment()
        self.write_batch_file()
        if "__RUN_BATCH_FILE__" in var_list:
            self.run_batch_file()
Exemplo n.º 3
0
    def __init__(self, initial_vars=None):
        # init objects owned by this class
        self.path_searcher = SearchPaths(var_list.get_configVar_obj("__SEARCH_PATHS__"))
        self.init_default_vars(initial_vars)
        # initialize the search paths helper with the current directory and dir where instl is now
        self.path_searcher.add_search_path(os.getcwd())
        self.path_searcher.add_search_path(os.path.dirname(os.path.realpath(sys.argv[0])))
        self.path_searcher.add_search_path(var_list.resolve("$(__INSTL_DATA_FOLDER__)"))

        self.platform_helper = PlatformSpecificHelperFactory(var_list.resolve("$(__CURRENT_OS__)"), self)
        self.platform_helper.init_copy_tool() # init initial copy tool, tool might be later overridden after reading variable COPY_TOOL from yaml.


        self.install_definitions_index = dict()
        self.batch_accum = BatchAccumulator()
        self.do_not_write_vars = ("INFO_MAP_SIG", "INDEX_SIG","PUBLIC_KEY")
        self.out_file_realpath = None
Exemplo n.º 4
0
    def __init__(self, initial_vars=None):
        # init objects owned by this class

        # only when allow_reading_of_internal_vars is true, variables who's name begins and ends with "__"
        # can be read from file
        self.allow_reading_of_internal_vars = False
        self.path_searcher = SearchPaths(var_stack.get_configVar_obj("__SEARCH_PATHS__"))
        self.init_default_vars(initial_vars)
        # initialize the search paths helper with the current directory and dir where instl is now
        self.path_searcher.add_search_path(os.getcwd())
        self.path_searcher.add_search_path(os.path.dirname(os.path.realpath(sys.argv[0])))
        self.path_searcher.add_search_path(var_stack.resolve("$(__INSTL_DATA_FOLDER__)"))

        self.read_user_config()

        self.platform_helper = PlatformSpecificHelperFactory(var_stack.resolve("$(__CURRENT_OS__)"), self)
        # init initial copy tool, tool might be later overridden after reading variable COPY_TOOL from yaml.
        self.platform_helper.init_copy_tool()

        self.install_definitions_index = dict()
        self.batch_accum = BatchAccumulator()
        self.do_not_write_vars = ("INFO_MAP_SIG", "INDEX_SIG", "PUBLIC_KEY", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "__CREDENTIALS__")
        self.out_file_realpath = None
Exemplo n.º 5
0
    def create_download_instructions(self):
        self.instlObj.batch_accum.set_current_section("sync")
        file_list, dir_list = self.work_info_map.sorted_sub_items()

        if len(file_list) + len(dir_list) == 0:
            print("0 files to sync")
            print("0 bytes to sync")
            return
        self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Starting sync from $(SYNC_BASE_URL)")
        self.instlObj.batch_accum += self.instlObj.platform_helper.mkdir("$(LOCAL_REPO_SYNC_DIR)")
        self.instlObj.batch_accum += self.instlObj.platform_helper.pushd("$(LOCAL_REPO_SYNC_DIR)")
        self.sync_base_url = var_stack.resolve("$(SYNC_BASE_URL)")

        self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        prefix_accum = BatchAccumulator()  # sub-accumulator for prefix instructions
        prefix_accum.set_current_section("sync")
        for need_item in file_list + dir_list:
            self.create_prefix_instructions_for_item(prefix_accum, need_item)
        if len(prefix_accum) > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Pre download processing")
            self.instlObj.batch_accum.merge_with(prefix_accum)
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        num_dirs_to_create = self.work_info_map.num_subs_in_tree(what="dir")
        logging.info("Num directories to create: %d", num_dirs_to_create)
        if num_dirs_to_create > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.create_folders("$(TO_SYNC_INFO_MAP_PATH)")
            self.instlObj.platform_helper.num_items_for_progress_report += num_dirs_to_create
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Create folders")
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        self.work_info_map.set_user_data_all_recursive(False)  # items that need checksum will be marked True

        for need_item in file_list + dir_list:
            self.create_download_instructions_for_item(need_item)

        var_stack.add_const_config_variable(
            "__NUM_FILES_TO_DOWNLOAD__",
            "create_download_instructions",
            self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(),
        )

        print(self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(), "files to sync")
        bytes_to_sync = self.work_info_map.safe_size  # backward compatibility for info_maps that do not have sizes
        if -1 == bytes_to_sync:
            print("Unknown number of bytes to sync")
        else:
            print(bytes_to_sync, "bytes to sync")

        if -1 != bytes_to_sync:
            free_bytes = get_disk_free_space(var_stack.resolve("$(LOCAL_REPO_SYNC_DIR)"))
            if bytes_to_sync > free_bytes:
                print("not enough disk space for sync:", bytes_to_sync - free_bytes, "bytes missing")

        logging.info("Num files to sync: %d", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download())

        curl_config_folder = var_stack.resolve("$(LOCAL_REPO_BOOKKEEPING_DIR)/curl", raise_on_fail=True)
        safe_makedirs(curl_config_folder)
        curl_config_file_path = var_stack.resolve(
            os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME)"), raise_on_fail=True
        )
        num_config_files = int(var_stack.resolve("$(PARALLEL_SYNC)"))
        config_file_list = self.instlObj.platform_helper.dl_tool.create_config_files(
            curl_config_file_path, num_config_files
        )
        logging.info("Num parallel syncs: %d", len(config_file_list))
        if len(config_file_list) > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress(
                "Downloading with " + str(len(config_file_list)) + " processes in parallel"
            )
            parallel_run_config_file_path = var_stack.resolve(
                os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME).parallel-run")
            )
            self.instlObj.batch_accum += self.instlObj.platform_helper.dl_tool.download_from_config_files(
                parallel_run_config_file_path, config_file_list
            )
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress(
                "Downloading " + str(self.files_to_download) + " files done", self.files_to_download
            )
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        num_files_to_check = self.work_info_map.num_subs_in_tree(what="file")
        logging.info("Num files to checksum check: %d", num_files_to_check)
        if num_files_to_check > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Checking checksum...")
            self.instlObj.batch_accum += self.instlObj.platform_helper.check_checksum_for_folder(
                "$(TO_SYNC_INFO_MAP_PATH)"
            )
            self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_check
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Check checksum done")
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        self.instlObj.batch_accum += self.instlObj.platform_helper.popd()
Exemplo n.º 6
0
class InstlInstanceBase(object):
    """ Main object of instl. Keeps the state of variables and install index
        and knows how to create a batch file for installation. InstlInstanceBase
        must be inherited by platform specific implementations, such as InstlInstance_mac
        or InstlInstance_win.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, initial_vars=None):
        # init objects owned by this class

        # only when allow_reading_of_internal_vars is true, variables who's name begins and ends with "__"
        # can be read from file
        self.allow_reading_of_internal_vars = False
        self.path_searcher = SearchPaths(var_stack.get_configVar_obj("__SEARCH_PATHS__"))
        self.init_default_vars(initial_vars)
        # initialize the search paths helper with the current directory and dir where instl is now
        self.path_searcher.add_search_path(os.getcwd())
        self.path_searcher.add_search_path(os.path.dirname(os.path.realpath(sys.argv[0])))
        self.path_searcher.add_search_path(var_stack.resolve("$(__INSTL_DATA_FOLDER__)"))

        self.read_user_config()

        self.platform_helper = PlatformSpecificHelperFactory(var_stack.resolve("$(__CURRENT_OS__)"), self)
        # init initial copy tool, tool might be later overridden after reading variable COPY_TOOL from yaml.
        self.platform_helper.init_copy_tool()

        self.install_definitions_index = dict()
        self.batch_accum = BatchAccumulator()
        self.do_not_write_vars = ("INFO_MAP_SIG", "INDEX_SIG", "PUBLIC_KEY", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "__CREDENTIALS__")
        self.out_file_realpath = None

    def get_version_str(self):
        retVal = var_stack.resolve(
            "$(INSTL_EXEC_DISPLAY_NAME) version $(__INSTL_VERSION__) $(__COMPILATION_TIME__) $(__PLATFORM_NODE__)",
            list_sep=".", default="")
        return retVal

    def init_default_vars(self, initial_vars):
        if initial_vars:
            var_description = "from initial_vars"
            for var, value in initial_vars.iteritems():
                if isinstance(value, basestring):
                    var_stack.add_const_config_variable(var, var_description, value)
                else:
                    var_stack.add_const_config_variable(var, var_description, *value)

        var_description = "from InstlInstanceBase.init_default_vars"

        # read defaults/main.yaml
        main_defaults_file_path = os.path.join(var_stack.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "main.yaml")
        self.read_yaml_file(main_defaults_file_path)

        # read defaults/compile-info.yaml
        compile_info_file_path = os.path.join(var_stack.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults",
                                              "compile-info.yaml")
        if os.path.isfile(compile_info_file_path):
            self.read_yaml_file(compile_info_file_path)
        if "__COMPILATION_TIME__" not in var_stack:
            if var_stack.resolve("$(__INSTL_COMPILED__)") == "True":
                var_stack.add_const_config_variable("__COMPILATION_TIME__", var_description, "unknown compilation time")
            else:
                var_stack.add_const_config_variable("__COMPILATION_TIME__", var_description, "(not compiled)")

        # read class specific defaults/*.yaml
        self.read_name_specific_defaults_file(type(self).__name__)

        log_file = pyinstl.log_utils.get_log_file_path(var_stack.resolve("$(INSTL_EXEC_DISPLAY_NAME)"),
                                                       var_stack.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=False)
        var_stack.set_var("LOG_FILE", var_description).append(log_file)
        debug_log_file = pyinstl.log_utils.get_log_file_path(var_stack.resolve("$(INSTL_EXEC_DISPLAY_NAME)"),
                                                             var_stack.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=True)
        var_stack.set_var("LOG_FILE_DEBUG", var_description).extend((
                        debug_log_file, logging.getLevelName(pyinstl.log_utils.debug_logging_level),
                        pyinstl.log_utils.debug_logging_started))

    def read_name_specific_defaults_file(self, file_name):
        """ read class specific file from defaults/class_name.yaml """
        name_specific_defaults_file_path = os.path.join(var_stack.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults",
                                                         file_name + ".yaml")
        if os.path.isfile(name_specific_defaults_file_path):
            self.read_yaml_file(name_specific_defaults_file_path)

    def read_user_config(self):
        user_config_path = var_stack.resolve("$(__USER_CONFIG_FILE_PATH__)")
        if os.path.isfile(user_config_path):
            previous_allow_reading_of_internal_vars = self.allow_reading_of_internal_vars
            self.allow_reading_of_internal_vars = True
            self.read_yaml_file(user_config_path)
            self.allow_reading_of_internal_vars = previous_allow_reading_of_internal_vars

    def check_prerequisite_var_existence(self, prerequisite_vars):
        missing_vars = [var for var in prerequisite_vars if var not in var_stack]
        if len(missing_vars) > 0:
            msg = "Prerequisite variables were not defined: " + ", ".join(missing_vars)
            logging.info("msg")
            raise ValueError(msg)

    def init_from_cmd_line_options(self, cmd_line_options_obj):
        """ turn command line options into variables """
        const_attrib_to_var = {
            "input_file": ("__MAIN_INPUT_FILE__", None),
            "output_file": ("__MAIN_OUT_FILE__", None),
            "props_file": ("__PROPS_FILE__", None),
            "config_file": ("__CONFIG_FILE__", None),
            "sh1_checksum": ("__SHA1_CHECKSUM__", None),
            "rsa_signature": ("__RSA_SIGNATURE__", None),
            "start_progress": ("__START_DYNAMIC_PROGRESS__", "0"),
            "total_progress": ("__TOTAL_DYNAMIC_PROGRESS__", "0"),
            "just_with_number": ("__JUST_WITH_NUMBER__", "0"),
            "limit_command_to": ("__LIMIT_COMMAND_TO__", None),
            "shortcut_path": ("__SHORTCUT_PATH__", None),
            "target_path": ("__SHORTCUT_TARGET_PATH__", None),
            "credentials": ("__CREDENTIALS__", None),
            "base_url": ("__BASE_URL__", None),
            "file_sizes_file": ("__FILE_SIZES_FILE__", None)
        }

        for attrib, var in const_attrib_to_var.iteritems():
            attrib_value = getattr(cmd_line_options_obj, attrib)
            if attrib_value:
                var_stack.add_const_config_variable(var[0], "from command line options", *attrib_value)
            elif var[1] is not None:  # there's a default
                var_stack.add_const_config_variable(var[0], "from default", var[1])

        non_const_attrib_to_var = {
            "filter_in": "__FILTER_IN_VERSION__",
            "target_repo_rev": "TARGET_REPO_REV",
            "base_repo_rev": "BASE_REPO_REV",
        }

        for attrib, var in non_const_attrib_to_var.iteritems():
            attrib_value = getattr(cmd_line_options_obj, attrib)
            if attrib_value:
                var_stack.set_var(var, "from command line options").append(attrib_value[0])

        if cmd_line_options_obj.command:
            var_stack.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command)

        if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None:
            var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options",
                                               cmd_line_options_obj.subject)
        else:
            var_stack.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "")

        if cmd_line_options_obj.state_file:
            var_stack.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options",
                                               cmd_line_options_obj.state_file)
        if cmd_line_options_obj.filter_out:
            var_stack.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options",
                                               *cmd_line_options_obj.filter_out)
        if cmd_line_options_obj.run:
            var_stack.add_const_config_variable("__RUN_BATCH__", "from command line options", "yes")

        if cmd_line_options_obj.no_wtar_artifacts:
            var_stack.add_const_config_variable("__NO_WTAR_ARTIFACTS__", "from command line options", "yes")

        # if credentials were given...
        credentials = None
        if "__CREDENTIALS__" in var_stack:
            credentials = var_stack.resolve_var("__CREDENTIALS__", default=None)

        connection_factory(credentials)

    def is_acceptable_yaml_doc(self, doc_node):
        acceptables = var_stack.resolve_to_list("$(ACCEPTABLE_YAML_DOC_TAGS)") + ["define", "define_const", "index", 'require']
        if "__INSTL_COMPILED__" in var_stack:
            if var_stack.resolve("$(__INSTL_COMPILED__)") == "True":
                acceptables.append("define_Compiled")
            else:
                acceptables.append("define_Uncompiled")
        acceptables = ["!" + acceptibul for acceptibul in acceptables]
        retVal = doc_node.tag in acceptables
        return retVal

    def read_yaml_from_stream(self, the_stream):
        for a_node in yaml.compose_all(the_stream):
            if self.is_acceptable_yaml_doc(a_node):
                if a_node.tag.startswith('!define_const'):
                    self.read_const_defines(a_node)
                elif a_node.tag.startswith('!define'):
                    self.read_defines(a_node)
                elif a_node.tag.startswith('!index'):
                    self.read_index(a_node)
                elif a_node.tag.startswith('!require'):
                    self.read_require(a_node)
                else:
                    logging.error(
                        "Unknown document tag '%s' while reading file %s; Tag should be one of: !define, !index'",
                        a_node.tag, file_path)
        if not self.check_version_compatibility():
            raise ValueError(var_stack.resolve("Minimal instl version $(INSTL_MINIMAL_VERSION) > current version $(__INSTL_VERSION__); ")+var_stack.get_configVar_obj("INSTL_MINIMAL_VERSION").description)

    def read_yaml_file(self, file_path):
        logging.info("%s", file_path)
        with open_for_read_file_or_url(file_path, self.path_searcher) as file_fd:
            buffer = StringIO.StringIO(file_fd.read())
            self.read_yaml_from_stream(buffer)
        var_stack.get_configVar_obj("__READ_YAML_FILES__").append(file_path)

    def read_require(self, a_node):
        #dependencies_file_path = var_stack.resolve("$(SITE_REQUIRE_FILE_PATH)")
        if a_node.isMapping():
            for identifier, contents in a_node:
                logging.debug("%s: %s", identifier, str(contents))
                if identifier in self.install_definitions_index:
                    self.install_definitions_index[identifier].required_by.extend([required_iid.value for required_iid in contents])
                else:
                    # require file might contain IIDs form previous installations that are no longer in the index
                    item_not_in_index = InstallItem()
                    item_not_in_index.iid = identifier
                    item_not_in_index.required_by.extend([required_iid.value for required_iid in contents])
                    self.install_definitions_index[identifier] = item_not_in_index


    def write_require_file(self, file_path):
        require_dict = dict()
        for IID in sorted(self.install_definitions_index.iterkeys()):
            if len(self.install_definitions_index[IID].required_by) > 0:
                require_dict[IID] = sorted(self.install_definitions_index[IID].required_by)
        with open(file_path, "w") as wfd:
            make_open_file_read_write_for_all(wfd)
            require_dict = augmentedYaml.YamlDumpDocWrap(require_dict, '!require', "requirements",
                                                        explicit_start=True, sort_mappings=True)
            augmentedYaml.writeAsYaml(require_dict, wfd)

    internal_identifier_re = re.compile("""
                                        __                  # dunder here
                                        (?P<internal_identifier>\w*)
                                        __                  # dunder there
                                        """, re.VERBOSE)

    def resolve_defined_paths(self):
        self.path_searcher.add_search_paths(var_stack.resolve_to_list("$(SEARCH_PATHS)"))
        for path_var_to_resolve in var_stack.resolve_to_list("$(PATHS_TO_RESOLVE)"):
            if path_var_to_resolve in var_stack:
                resolved_path = self.path_searcher.find_file(var_stack.resolve_var(path_var_to_resolve),
                                                             return_original_if_not_found=True)
                var_stack.set_var(path_var_to_resolve, "resolve_defined_paths").append(resolved_path)

    def read_defines(self, a_node):
        # if document is empty we get a scalar node
        if a_node.isMapping():
            for identifier, contents in a_node:
                logging.debug("%s: %s", identifier, str(contents))
                if self.allow_reading_of_internal_vars or not self.internal_identifier_re.match(identifier):  # do not read internal state identifiers
                    var_stack.set_var(identifier, str(contents.start_mark)).extend([item.value for item in contents])
                elif identifier == '__include__':
                    self.read_include_node(contents)

    def read_const_defines(self, a_node):
        """ Read a !define_const sub-doc. All variables will be made const.
            Reading of internal state identifiers is allowed.
            __include__ is not allowed.
        """
        if a_node.isMapping():
            for identifier, contents in a_node:
                if identifier == "__include__":
                    raise ValueError("!define_const doc cannot except __include__")
                logging.debug("%s: %s", identifier, str(contents))
                var_stack.add_const_config_variable(identifier, "from !define_const section",
                                                   *[item.value for item in contents])

    def provision_public_key_text(self):
        if "PUBLIC_KEY" not in var_stack:
            if "PUBLIC_KEY_FILE" in var_stack:
                public_key_file = var_stack.resolve("$(PUBLIC_KEY_FILE)")
                with open_for_read_file_or_url(public_key_file, self.path_searcher) as file_fd:
                    public_key_text = file_fd.read()
                    var_stack.set_var("PUBLIC_KEY", "from " + public_key_file).append(public_key_text)
            else:
                raise ValueError("No public key, variables PUBLIC_KEY & PUBLIC_KEY_FILE are not defined")
        resolved_public_key = var_stack.resolve("$(PUBLIC_KEY)")
        return resolved_public_key

    def read_include_node(self, i_node):
        if i_node.isScalar():
            resolved_file_name = var_stack.resolve(i_node.value)
            self.read_yaml_file(resolved_file_name)
        elif i_node.isSequence():
            for sub_i_node in i_node:
                self.read_include_node(sub_i_node)
        elif i_node.isMapping():
            if "url" in i_node:
                resolved_file_url = var_stack.resolve(i_node["url"].value)
                cached_files_dir = self.get_default_sync_dir(continue_dir="cache", make_dir=True)
                cached_file_path = None
                expected_checksum = None
                if "checksum" in i_node:
                    expected_checksum = var_stack.resolve(i_node["checksum"].value)
                    cached_file_path = os.path.join(cached_files_dir, expected_checksum)

                expected_signature = None
                public_key_text = None
                if "sig" in i_node:
                    expected_signature = var_stack.resolve(i_node["sig"].value)
                    public_key_text = self.provision_public_key_text()

                if expected_checksum is None:
                    self.read_yaml_file(resolved_file_url)
                    cached_file_path = resolved_file_url
                else:
                    download_from_file_or_url(resolved_file_url, cached_file_path, cache=True,
                                              public_key=public_key_text,
                                              textual_sig=expected_signature,
                                              expected_checksum=expected_checksum)
                    self.read_yaml_file(cached_file_path)

                if "copy" in i_node:
                    self.batch_accum.set_current_section('post')
                    for copy_destination in i_node["copy"]:
                        destination_folder, destination_file_name = os.path.split(copy_destination.value)
                        self.batch_accum += self.platform_helper.mkdir(destination_folder)
                        self.batch_accum += self.platform_helper.copy_tool.copy_file_to_file(cached_file_path,
                                                                                             var_stack.resolve(
                                                                                                copy_destination.value),
                                                                                             link_dest=True)

    def create_variables_assignment(self):
        self.batch_accum.set_current_section("assign")
        for identifier in var_stack:
            if identifier not in self.do_not_write_vars:
                self.batch_accum += self.platform_helper.var_assign(identifier, var_stack.resolve_var(identifier),
                                                                    None)  # var_stack[identifier].resolved_num

    def calc_user_cache_dir_var(self, make_dir=True):
        if "USER_CACHE_DIR" not in var_stack:
            os_family_name = var_stack.resolve("$(__CURRENT_OS__)")
            if os_family_name == "Mac":
                user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
                user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
            elif os_family_name == "Win":
                user_cache_dir = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)")
            elif os_family_name == "Linux":
                user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
                user_cache_dir = appdirs.user_cache_dir(user_cache_dir_param)
            var_description = "from InstlInstanceBase.get_user_cache_dir"
            var_stack.set_var("USER_CACHE_DIR", var_description).append(user_cache_dir)
        if make_dir:
            user_cache_dir_resolved = var_stack.resolve("$(USER_CACHE_DIR)", raise_on_fail=True)
            safe_makedirs(user_cache_dir_resolved)

    def get_default_sync_dir(self, continue_dir=None, make_dir=True):
        self.calc_user_cache_dir_var()
        if continue_dir:
            retVal = os.path.join("$(USER_CACHE_DIR)", continue_dir)
        else:
            retVal = "$(USER_CACHE_DIR)"
        # print("1------------------", user_cache_dir, "-", from_url, "-", retVal)
        if make_dir and retVal:
            retVal = var_stack.resolve(retVal, raise_on_fail=True)
            safe_makedirs(retVal)
        return retVal

    def relative_sync_folder_for_source(self, source):
        if source[1] in ('!dir', '!file'):
            retVal = "/".join(source[0].split("/")[0:-1])
        elif source[1] in ('!dir_cont', '!files'):
            retVal = source[0]
        else:
            raise ValueError("unknown tag for source " + source[0] + ": " + source[1])
        return retVal

    def write_batch_file(self):
        self.batch_accum.set_current_section('pre')
        self.batch_accum += self.platform_helper.get_install_instructions_prefix()
        self.batch_accum.set_current_section('post')
        var_stack.set_var("TOTAL_ITEMS_FOR_PROGRESS_REPORT").append(
            str(self.platform_helper.num_items_for_progress_report))
        self.batch_accum += self.platform_helper.get_install_instructions_postfix()
        lines = self.batch_accum.finalize_list_of_lines()
        lines_after_var_replacement = '\n'.join(
            [value_ref_re.sub(self.platform_helper.var_replacement_pattern, line) for line in lines])

        from utils import write_to_file_or_stdout

        out_file = var_stack.resolve("$(__MAIN_OUT_FILE__)", raise_on_fail=True)
        with write_to_file_or_stdout(out_file) as fd:
            fd.write(lines_after_var_replacement)
            fd.write('\n')

        if out_file != "stdout":
            self.out_file_realpath = os.path.realpath(out_file)
            # chmod to 0777 so that file created under sudo, can be re-written under regular user.
            # However regular user cannot chmod for file created under sudo, hence the try/except
            try:
                os.chmod(self.out_file_realpath, 0777)
            except:
                pass
        else:
            self.out_file_realpath = "stdout"
        msg = " ".join(
            (self.out_file_realpath, str(self.platform_helper.num_items_for_progress_report), "progress items"))
        print(msg)
        logging.info(msg)

    def run_batch_file(self):
        logging.info("running batch file %s", self.out_file_realpath)
        from subprocess import Popen

        p = Popen([self.out_file_realpath], executable=self.out_file_realpath, shell=False)
        unused_stdout, unused_stderr = p.communicate()
        retcode = p.returncode
        if retcode != 0:
            raise SystemExit(self.out_file_realpath + " returned exit code " + str(retcode))

    def write_program_state(self):
        from utils import write_to_file_or_stdout

        state_file = var_stack.resolve("$(__MAIN_STATE_FILE__)", raise_on_fail=True)
        with write_to_file_or_stdout(state_file) as fd:
            augmentedYaml.writeAsYaml(self, fd)

    def read_index(self, a_node):
        self.install_definitions_index.update(read_index_from_yaml(a_node))

    def find_cycles(self):
        if not self.install_definitions_index:
            print("index empty - nothing to check")
        else:
            try:
                from pyinstl import installItemGraph

                depend_graph = installItemGraph.create_dependencies_graph(self.install_definitions_index)
                depend_cycles = installItemGraph.find_cycles(depend_graph)
                if not depend_cycles:
                    print("No depend cycles found")
                else:
                    for cy in depend_cycles:
                        print("depend cycle:", " -> ".join(cy))
                inherit_graph = installItemGraph.create_inheritItem_graph(self.install_definitions_index)
                inherit_cycles = installItemGraph.find_cycles(inherit_graph)
                if not inherit_cycles:
                    print("No inherit cycles found")
                else:
                    for cy in inherit_cycles:
                        print("inherit cycle:", " -> ".join(cy))
            except ImportError:  # no installItemGraph, no worry
                print("Could not load installItemGraph")

    def read_info_map_file(self, in_file_path, a_format="guess"):
        self.svnTree.read_info_map_from_file(in_file_path, a_format)

    def write_info_map_file(self):
        self.svnTree.write_to_file(var_stack.resolve("$(__MAIN_OUT_FILE__)", raise_on_fail=True))

    def check_version_compatibility(self):
        retVal = True
        if "INSTL_MINIMAL_VERSION" in var_stack:
            inst_ver =     map(int, var_stack.resolve_to_list("$(__INSTL_VERSION__)"))
            required_ver = map(int, var_stack.resolve_to_list("$(INSTL_MINIMAL_VERSION)"))
            retVal = inst_ver >= required_ver
        return retVal

    wtar_file_re = re.compile("""(?P<base_name>.+?)(\.wtar(\.[a-z]{2})?)?$""")

    # Given a name remove the trailing wtar or wtar.?? if any
    # E.g. "a" => "a", "a.wtar" => "a", "a.wtar.aa" => "a"
    def original_name_from_wtar_name(self, wtar_name):
        original_name = self.wtar_file_re.match(wtar_name).group('base_name')
        return original_name

    # Given a list of file/folder names, replace those which a rewtared with the original file name.
    # E.g. ['a', 'b.wtar', 'c.wtar.aa', 'c.wtar.ab'] => ['a', 'b', 'c']
    # We must work on the whole list since several wtar file names might merge to a single original file name.
    def replace_wtar_names_with_real_names(self, original_list):
        replaced_list = unique_list()
        replaced_list.extend([self.original_name_from_wtar_name(file_name) for file_name in original_list])
        return replaced_list

    def needs(self, iid, out_list):
        """ return iids of all items that a specific iid depends on"""
        if iid not in self.install_definitions_index:
            raise KeyError(iid + " is not in index")
        InstallItem.begin_get_for_all_oses()
        with self.install_definitions_index[iid]:
            for dep in var_stack.resolve_var_to_list("iid_depend_list"):
                if dep in self.install_definitions_index:
                    out_list.append(dep)
                    self.needs(dep, out_list)
                else:
                    out_list.append(dep + "(missing)")
        InstallItem.reset_get_for_all_oses()

    def needed_by(self, iid):
        try:
            from pyinstl import installItemGraph

            InstallItem.begin_get_for_all_oses()
            graph = installItemGraph.create_dependencies_graph(self.install_definitions_index)
            needed_by_list = installItemGraph.find_needed_by(graph, iid)
            InstallItem.reset_get_for_all_oses()
            return needed_by_list
        except ImportError:  # no installItemGraph, no worry
            print("Could not load installItemGraph")
            return None

    def resolve_index_inheritance(self):
        for install_def in self.install_definitions_index.values():
            install_def.resolve_inheritance(self.install_definitions_index)
Exemplo n.º 7
0
    def create_download_instructions(self):
        self.instlObj.batch_accum.set_current_section('sync')
        self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Starting sync from $(SYNC_BASE_URL)/$(SOURCE_PREFIX)")
        self.instlObj.batch_accum += self.instlObj.platform_helper.mkdir("$(LOCAL_REPO_SYNC_DIR)")
        self.instlObj.batch_accum += self.instlObj.platform_helper.cd("$(LOCAL_REPO_SYNC_DIR)")
        self.sync_base_url = var_list.resolve("$(SYNC_BASE_URL)")

        self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        file_list, dir_list = self.work_info_map.sorted_sub_items()

        prefix_accum = BatchAccumulator() # sub-accumulator for prefix instructions
        prefix_accum.set_current_section('sync')
        for need_item in file_list + dir_list:
            self.create_prefix_instructions_for_item(prefix_accum, need_item)
        if len(prefix_accum) > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Pre download processing")
            self.instlObj.batch_accum.merge_with(prefix_accum)
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        num_dirs_to_create = self.work_info_map.num_subs_in_tree(what="dir")
        logging.info("Num directories to create: %d", num_dirs_to_create)
        self.instlObj.batch_accum += self.instlObj.platform_helper.create_folders("$(TO_SYNC_INFO_MAP_PATH)")
        self.instlObj.platform_helper.num_items_for_progress_report += num_dirs_to_create
        self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Create folders")
        self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        self.work_info_map.set_user_data_all_recursive(False) # items that need checksum will be marked True
        for need_item in file_list + dir_list:
            self.create_download_instructions_for_item(need_item)

        var_list.add_const_config_variable("__NUM_FILES_TO_DOWNLOAD__", "create_download_instructions", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download())

        print(self.instlObj.platform_helper.dl_tool.get_num_urls_to_download(), "files to sync")
        logging.info("Num files to sync: %d", self.instlObj.platform_helper.dl_tool.get_num_urls_to_download())

        curl_config_folder = var_list.resolve(os.path.join("$(LOCAL_REPO_SYNC_DIR)", "curl"))
        safe_makedirs(curl_config_folder)
        curl_config_file_path = var_list.resolve(os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME)"))
        num_config_files = int(var_list.resolve("$(PARALLEL_SYNC)"))
        config_file_list = self.instlObj.platform_helper.dl_tool.create_config_files(curl_config_file_path, num_config_files)
        logging.info("Num parallel syncs: %d", len(config_file_list))
        if len(config_file_list) > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Downloading with "+str(len(config_file_list))+" processes in parallel")
            parallel_run_config_file_path = var_list.resolve(os.path.join(curl_config_folder, "$(CURL_CONFIG_FILE_NAME).parallel-run"))
            self.instlObj.batch_accum += self.instlObj.platform_helper.dl_tool.download_from_config_files(parallel_run_config_file_path, config_file_list)
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Downloading "+str(self.files_to_download)+" files done", self.files_to_download)
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        num_files_to_check = self.work_info_map.num_subs_in_tree(what="file")
        logging.info("Num files to checksum check: %d", num_files_to_check)
        if num_files_to_check > 0:
            self.instlObj.batch_accum += self.instlObj.platform_helper.check_checksum_for_folder("$(TO_SYNC_INFO_MAP_PATH)")
            self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_check
            self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Check checksum done")
            self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()

        num_files_to_unwtar_estimation = self.estimate_num_unwtar_actions()
        logging.info("Num files to unwtar: %d", num_files_to_unwtar_estimation)
        self.instlObj.batch_accum += self.instlObj.platform_helper.unwtar_current_folder()
        self.instlObj.platform_helper.num_items_for_progress_report += num_files_to_unwtar_estimation
        self.instlObj.batch_accum += self.instlObj.platform_helper.progress("Unwtar done")
        self.instlObj.batch_accum += self.instlObj.platform_helper.new_line()
Exemplo n.º 8
0
class InstlInstanceBase(object):
    """ Main object of instl. Keeps the state of variables and install index
        and knows how to create a batch file for installation. InstlInstanceBase
        must be inherited by platform specific implementations, such as InstlInstance_mac
        or InstlInstance_win.
    """
    __metaclass__ = abc.ABCMeta
    def __init__(self, initial_vars=None):
        # init objects owned by this class
        self.path_searcher = SearchPaths(var_list.get_configVar_obj("__SEARCH_PATHS__"))
        self.init_default_vars(initial_vars)
        # initialize the search paths helper with the current directory and dir where instl is now
        self.path_searcher.add_search_path(os.getcwd())
        self.path_searcher.add_search_path(os.path.dirname(os.path.realpath(sys.argv[0])))
        self.path_searcher.add_search_path(var_list.resolve("$(__INSTL_DATA_FOLDER__)"))

        self.platform_helper = PlatformSpecificHelperFactory(var_list.resolve("$(__CURRENT_OS__)"), self)
        self.platform_helper.init_copy_tool() # init initial copy tool, tool might be later overridden after reading variable COPY_TOOL from yaml.


        self.install_definitions_index = dict()
        self.batch_accum = BatchAccumulator()
        self.do_not_write_vars = ("INFO_MAP_SIG", "INDEX_SIG","PUBLIC_KEY")
        self.out_file_realpath = None


    def get_version_str(self):
        retVal = var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME) version $(__INSTL_VERSION__) $(__COMPILATION_TIME__) $(__PLATFORM_NODE__)", list_sep=".", default="")
        return retVal

    def init_default_vars(self, initial_vars):
        if initial_vars:
            var_description = "from initial_vars"
            for var, value in initial_vars.iteritems():
                if isinstance(value, basestring):
                    var_list.add_const_config_variable(var, var_description, value)
                else:
                    var_list.add_const_config_variable(var, var_description, *value)

        var_description = "from InstlInstanceBase.init_default_vars"

        # read defaults/main.yaml
        main_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "main.yaml")
        self.read_yaml_file(main_defaults_file_path)

        # read defaults/compile-info.yaml
        compile_info_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", "compile-info.yaml")
        if os.path.isfile(compile_info_file_path):
            self.read_yaml_file(compile_info_file_path)
        if "__COMPILATION_TIME__" not in var_list:
            if var_list.resolve("$(__INSTL_COMPILED__)") == "True":
                var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "unknown compilation time")
            else:
                var_list.add_const_config_variable("__COMPILATION_TIME__", var_description, "(not compiled)")

        # read class specific defaults/*.yaml
        class_specific_defaults_file_path = os.path.join(var_list.resolve("$(__INSTL_DATA_FOLDER__)"), "defaults", type(self).__name__+".yaml")
        if os.path.isfile(class_specific_defaults_file_path):
            self.read_yaml_file(class_specific_defaults_file_path)

        log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=False)
        var_list.set_var("LOG_FILE", var_description).append(log_file)
        debug_log_file = pyinstl.log_utils.get_log_file_path(var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), var_list.resolve("$(INSTL_EXEC_DISPLAY_NAME)"), debug=True)
        var_list.set_var("LOG_FILE_DEBUG", var_description).extend( (debug_log_file, logging.getLevelName(pyinstl.log_utils.debug_logging_level), pyinstl.log_utils.debug_logging_started) )

    def check_prerequisite_var_existence(self, prerequisite_vars):
        missing_vars = [var for var in prerequisite_vars if var not in var_list]
        if len(missing_vars) > 0:
            msg = "Prerequisite variables were not defined: "+", ".join(missing_vars)
            logging.info("msg")
            raise ValueError(msg)

    def init_from_cmd_line_options(self, cmd_line_options_obj):
        """ turn command line options into variables """
        const_attrib_to_var = {
                         "input_file":      ("__MAIN_INPUT_FILE__", None),
                         "output_file":     ("__MAIN_OUT_FILE__", None),
                         "props_file":      ("__PROPS_FILE__", None),
                         "config_file":     ("__CONFIG_FILE__", None),
                         "sh1_checksum":    ("__SHA1_CHECKSUM__", None),
                         "rsa_signature":   ("__RSA_SIGNATURE__", None),
                         "start_progress":  ("__START_DYNAMIC_PROGRESS__", "0"),
                         "total_progress":  ("__TOTAL_DYNAMIC_PROGRESS__", "0"),
                         "just_with_number":  ("__JUST_WITH_NUMBER__", "0"),
                         "limit_command_to":  ("__LIMIT_COMMAND_TO__", None),
                         }

        for attrib, var in const_attrib_to_var.iteritems():
            attrib_value = getattr(cmd_line_options_obj, attrib)
            if attrib_value:
                var_list.add_const_config_variable(var[0], "from command line options", *attrib_value)
            elif var[1]: # there's a default
                var_list.add_const_config_variable(var[0], "from default", var[1])

        non_const_attrib_to_var = {
                        "filter_in":       "__FILTER_IN_VERSION__",
                         "target_repo_rev": "TARGET_REPO_REV",
                         "base_repo_rev":   "BASE_REPO_REV",
                         }

        for attrib, var in non_const_attrib_to_var.iteritems():
            attrib_value = getattr(cmd_line_options_obj, attrib)
            if attrib_value:
                var_list.set_var(var, "from command line options").append(attrib_value[0])

        if cmd_line_options_obj.command:
            var_list.set_var("__MAIN_COMMAND__", "from command line options").append(cmd_line_options_obj.command)

        if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None:
            var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", cmd_line_options_obj.subject)
        else:
            var_list.add_const_config_variable("__HELP_SUBJECT__", "from command line options", "")


        if cmd_line_options_obj.state_file:
            var_list.add_const_config_variable("__MAIN_STATE_FILE__", "from command line options", cmd_line_options_obj.state_file)
        if cmd_line_options_obj.filter_out:
            var_list.add_const_config_variable("__FILTER_OUT_PATHS__", "from command line options", *cmd_line_options_obj.filter_out)
        if cmd_line_options_obj.run:
            var_list.add_const_config_variable("__RUN_BATCH_FILE__", "from command line options", "yes")

    def is_acceptable_yaml_doc(self, doc_node):
        acceptables = var_list.resolve_to_list("$(ACCEPTABLE_YAML_DOC_TAGS)") + ["define", "define_const", "index"]
        acceptables = ["!"+acceptibul for acceptibul in acceptables]
        retVal = doc_node.tag in acceptables
        return retVal

    def read_yaml_file(self, file_path):
        logging.info("%s", file_path)
        with open_for_read_file_or_url(file_path, self.path_searcher) as file_fd:
            for a_node in yaml.compose_all(file_fd):
                if self.is_acceptable_yaml_doc(a_node):
                    if a_node.tag.startswith('!define_const'):
                        self.read_const_defines(a_node)
                    elif a_node.tag.startswith('!define'):
                        self.read_defines(a_node)
                    elif a_node.tag.startswith('!index'):
                        self.read_index(a_node)
                    else:
                        logging.error("Unknown document tag '%s' while reading file %s; Tag should be one of: !define, !index'", a_node.tag, file_path)
        var_list.get_configVar_obj("__READ_YAML_FILES__").append(file_path)

    internal_identifier_re = re.compile("""
                                        __                  # dunder here
                                        (?P<internal_identifier>\w*)
                                        __                  # dunder there
                                        """, re.VERBOSE)

    def resolve_defined_paths(self):
        self.path_searcher.add_search_paths(var_list.resolve_to_list("$(SEARCH_PATHS)"))
        for path_var_to_resolve in var_list.resolve_to_list("$(PATHS_TO_RESOLVE)"):
            if path_var_to_resolve in var_list:
                resolved_path = self.path_searcher.find_file(var_list.resolve_var(path_var_to_resolve), return_original_if_not_found=True)
                var_list.set_var(path_var_to_resolve, "resolve_defined_paths").append(resolved_path)



    def read_defines(self, a_node):
        # if document is empty we get a scalar node
        if a_node.isMapping():
            for identifier, contents in a_node:
                logging.debug("%s: %s", identifier, str(contents))
                if not self.internal_identifier_re.match(identifier): # do not read internal state identifiers
                    var_list.set_var(identifier, str(contents.start_mark)).extend([item.value for item in contents])
                elif identifier == '__include__':
                    self.read_include_node(contents)

    def read_const_defines(self, a_node):
        """ Read a !define_const sub-doc. All variables will be made const.
            Reading of internal state identifiers is allowed.
            __include__ is not allowed.
        """
        if a_node.isMapping():
            for identifier, contents in a_node:
                if identifier == "__include__":
                    raise ValueError("!define_const doc cannot except __include__")
                logging.debug("%s: %s", identifier, str(contents))
                var_list.add_const_config_variable(identifier, "from !define_const section", *[item.value for item in contents])

    def read_include_node(self, i_node):
        if i_node.isScalar():
            resolved_file_name = var_list.resolve(i_node.value)
            self.read_yaml_file(resolved_file_name)
        elif i_node.isSequence():
            for sub_i_node in i_node:
                self.read_include_node(sub_i_node)
        elif i_node.isMapping():
            if "url" in i_node and "checksum" in i_node and "sig" in i_node:
                resolved_file_url = var_list.resolve(i_node["url"].value)
                resolved_checksum = var_list.resolve(i_node["checksum"].value)
                resolved_signature = var_list.resolve(i_node["sig"].value)
                cached_files_dir = self.get_default_sync_dir(continue_dir="cache", mkdir=True)
                cached_file = os.path.join(cached_files_dir, resolved_checksum)

                if "PUBLIC_KEY" not in var_list:
                    if "PUBLIC_KEY_FILE" in var_list:
                        public_key_file = var_list.resolve("$(PUBLIC_KEY_FILE)")
                        with open_for_read_file_or_url(public_key_file, self.path_searcher) as file_fd:
                            public_key_text = file_fd.read()
                            var_list.set_var("PUBLIC_KEY", "from "+public_key_file).append(public_key_text)


                public_key_text = var_list.resolve("$(PUBLIC_KEY)")
                download_from_file_or_url(resolved_file_url, cached_file, cache=True,
                                          public_key=public_key_text,
                                          textual_sig=resolved_signature,
                                          expected_checksum=resolved_checksum)
                self.read_yaml_file(cached_file)
                if "copy" in i_node:
                    self.batch_accum.set_current_section('post-sync')
                    self.batch_accum += self.platform_helper.copy_tool.copy_file_to_file(cached_file, var_list.resolve(i_node["copy"].value), link_dest=True)

    def create_variables_assignment(self):
        self.batch_accum.set_current_section("assign")
        for identifier in var_list:
            if identifier not in self.do_not_write_vars:
                self.batch_accum += self.platform_helper.var_assign(identifier,var_list.resolve_var(identifier), None) # var_list[identifier].resolved_num

    def get_default_sync_dir(self, continue_dir=None, mkdir=True):
        retVal = None
        os_family_name = var_list.resolve("$(__CURRENT_OS__)")
        if os_family_name == "Mac":
            user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
            retVal = appdirs.user_cache_dir(user_cache_dir_param)
        elif os_family_name == "Win":
            retVal = appdirs.user_cache_dir("$(INSTL_EXEC_DISPLAY_NAME)", "$(COMPANY_NAME)")
        elif os_family_name == "Linux":
            user_cache_dir_param = "$(COMPANY_NAME)/$(INSTL_EXEC_DISPLAY_NAME)"
            retVal = appdirs.user_cache_dir(user_cache_dir_param)
        if continue_dir:
            #from_url = from_url.lstrip("/\\")
            #from_url = from_url.rstrip("/\\")
            retVal = os.path.join(retVal, continue_dir)
        #print("1------------------", user_cache_dir, "-", from_url, "-", retVal)
        if mkdir and retVal:
            retVal = var_list.resolve(retVal)
            safe_makedirs(retVal)
        return retVal

    def relative_sync_folder_for_source(self, source):
        retVal = None
        if source[1] in ('!dir', '!file'):
            retVal = "/".join(source[0].split("/")[0:-1])
        elif source[1] in ('!dir_cont', '!files'):
            retVal = source[0]
        else:
            raise ValueError("unknown tag for source "+source[0]+": "+source[1])
        return retVal

    def write_batch_file(self):
        self.batch_accum.set_current_section('pre')
        self.batch_accum += self.platform_helper.get_install_instructions_prefix()
        self.batch_accum.set_current_section('post')
        var_list.set_var("TOTAL_ITEMS_FOR_PROGRESS_REPORT").append(str(self.platform_helper.num_items_for_progress_report))
        self.batch_accum += self.platform_helper.get_install_instructions_postfix()
        lines = self.batch_accum.finalize_list_of_lines()
        lines_after_var_replacement = '\n'.join([value_ref_re.sub(self.platform_helper.var_replacement_pattern, line) for line in lines])

        from utils import write_to_file_or_stdout
        out_file = var_list.resolve("$(__MAIN_OUT_FILE__)")
        with write_to_file_or_stdout(out_file) as fd:
            fd.write(lines_after_var_replacement)
            fd.write('\n')

        if out_file != "stdout":
            self.out_file_realpath = os.path.realpath(out_file)
            # chmod to 0777 so that file created under sudo, can be re-written under regular user.
            # However regular user cannot chmod for file created under sudo, hence the try/except
            try: os.chmod(self.out_file_realpath, 0777)
            except: pass
        else:
            self.out_file_realpath = "stdout"
        msg = " ".join( (self.out_file_realpath, str(self.platform_helper.num_items_for_progress_report), "progress items") )
        print(msg)
        logging.info(msg)

    def run_batch_file(self):
        logging.info("running batch file %s", self.out_file_realpath)
        from subprocess import Popen
        p = Popen([self.out_file_realpath], shell=False)
        unused_stdout, unused_stderr = p.communicate()
        retcode = p.returncode
        if retcode != 0:
            raise SystemExit(self.out_file_realpath + " returned exit code " + str(retcode))

    def write_program_state(self):
        from utils import write_to_file_or_stdout
        state_file = var_list.resolve("$(__MAIN_STATE_FILE__)")
        with write_to_file_or_stdout(state_file) as fd:
            augmentedYaml.writeAsYaml(self, fd)

    def read_index(self, a_node):
        self.install_definitions_index.update(read_index_from_yaml(a_node))

    def find_cycles(self):
            if not self.install_definitions_index:
                print ("index empty - nothing to check")
            else:
                try:
                    from pyinstl import installItemGraph
                    depend_graph = installItemGraph.create_dependencies_graph(self.install_definitions_index)
                    depend_cycles = installItemGraph.find_cycles(depend_graph)
                    if not depend_cycles:
                        print ("No depend cycles found")
                    else:
                        for cy in depend_cycles:
                            print("depend cycle:", " -> ".join(cy))
                    inherit_graph = installItemGraph.create_inheritItem_graph(self.install_definitions_index)
                    inherit_cycles = installItemGraph.find_cycles(inherit_graph)
                    if not inherit_cycles:
                        print ("No inherit cycles found")
                    else:
                        for cy in inherit_cycles:
                            print("inherit cycle:", " -> ".join(cy))
                except ImportError: # no installItemGraph, no worry
                    print("Could not load installItemGraph")

    def read_info_map_file(self, in_file_path):
        self.svnTree.read_info_map_from_file(in_file_path)

    def write_info_map_file(self):
        self.svnTree.write_to_file(var_list.resolve("$(__MAIN_OUT_FILE__)"))