Exemplo n.º 1
0
    def do_tag_extract(self):
        tag_path = self.tag_path.get()

        def_id = self.tag_class_ext_to_fcc.get(
            Path(tag_path).suffix[1:].lower())
        if def_id is None or def_id not in self.tag_data_extractors:
            print("Cannot extract data from this kind of tag.")
            return
        else:
            if not is_in_dir(tag_path, self.handler.tagsdir):
                print("Tag %s is not located within tags directory: %s" %
                      (tag_path, self.handler.tagsdir))
                return

        print("Extracting %s" % tag_path)
        self.extract(os.path.relpath(tag_path, str(self.handler.tagsdir)),
                     self.tag_data_extractors[def_id],
                     out_dir=str(
                         Path(path_split(self.handler.tagsdir,
                                         "tags")).joinpath("data")),
                     overwrite=self.overwrite.get(),
                     engine="yelo",
                     decode_adpcm=self.decode_adpcm.get())

        print("Extraction completed.\n")
Exemplo n.º 2
0
    def populate_dependency_tree(self):
        filepath = self.tag_filepath.get()
        if not filepath:
            return

        app = self.app_root
        handler = self.handler = app.handler
        handler_name = app.handler_names[app._curr_handler_index]
        if handler_name not in app.tags_dir_relative:
            print("Change the current tag set.")
            return

        filepath = path_normalize(filepath)

        if not is_in_dir(filepath, self.handler.tagsdir):
            print("%s\nis not in tagsdir\n%s" %
                  (filepath, self.handler.tagsdir))
            return

        rel_filepath = Path(filepath).relative_to(self.handler.tagsdir)
        tag = self.get_tag(rel_filepath)
        if tag is None:
            print("Could not load tag:\n    %s" % filepath)
            return

        self.dependency_frame.handler = handler
        self.dependency_frame.tags_dir = self.handler.tagsdir
        self.dependency_frame.root_tag_path = tag.filepath
        self.dependency_frame.root_tag_text = rel_filepath

        self.dependency_frame.reload()
Exemplo n.º 3
0
    def tag_browse(self):
        if self._extracting:
            return
        filetypes = [('All', '*')]

        for def_id in sorted(self.tag_data_extractors.keys()):
            if def_id in self.tag_class_fcc_to_ext:
                filetypes.append(
                    (def_id, "." + self.tag_class_fcc_to_ext[def_id]))

        fp = askopenfilename(initialdir=str(self.app_root.last_load_dir),
                             filetypes=filetypes,
                             parent=self,
                             title="Select a tag to extract from")

        if not fp:
            return

        fp = Path(fp)
        self.app_root.last_load_dir = fp.parent
        if not is_in_dir(fp, self.handler.tagsdir):
            print("Tag %s is not located in tags directory %s" %
                  (fp, self.handler.tagsdir))
            return

        self.app_root.last_load_dir = fp.parent
        self.tag_path.set(fp)
Exemplo n.º 4
0
    def browse_shader_path(self):
        if self._compiling or self._loading or self._saving:
            return

        tags_dir = self.tags_dir.get()
        if not tags_dir or not os.path.exists(tags_dir):
            return

        shader_dir = os.path.dirname(
            os.path.join(tags_dir, self.shader_path_string_var.get()))

        shader_exts = tuple((typ, "*.shader_%s" % typ) for typ in shader_types)
        fp = asksaveasfilename(
            initialdir=shader_dir,
            parent=self,
            title="Select the shader to use(or where to make one)",
            filetypes=shader_exts + (('All', '*'), ))

        fp, ext = os.path.splitext(fp)
        if fp:
            if not is_in_dir(fp, tags_dir):
                print("Specified shader is not located in the tags directory.")
                return

            ext = ext.strip(".").lower()
            self.shader_path_string_var.set(os.path.relpath(fp, tags_dir))
            mat = self.get_material(self.shader_names_menu.sel_index)
            if mat and ext in shader_type_map:
                self.shader_types_menu.sel_index = shader_type_map[ext]
                mat.shader_type = ext
Exemplo n.º 5
0
    def jms_dir_browse(self):
        if self._compiling or self._loading or self._saving:
            return

        tags_dir = self.tags_dir.get()
        data_dir = path_replace(tags_dir, "tags", "data")
        jms_dir = self.jms_dir.get()
        if tags_dir and not jms_dir:
            jms_dir = data_dir

        dirpath = askdirectory(
            initialdir=jms_dir,
            parent=self,
            title="Select the folder of models to compile...")

        if not dirpath:
            return

        dirpath = str(Path(dirpath))
        if tags_dir and data_dir and os.path.basename(
                dirpath).lower() == "models":
            object_dir = os.path.dirname(dirpath)

            if object_dir and is_in_dir(object_dir, data_dir):
                tag_path = os.path.join(object_dir,
                                        os.path.basename(object_dir))
                tag_path = os.path.join(tags_dir,
                                        os.path.relpath(tag_path, data_dir))
                self.gbxmodel_path.set(tag_path + ".gbxmodel")

        self.app_root.last_load_dir = os.path.dirname(dirpath)
        self.jms_dir.set(dirpath)
        if not self.tags_dir.get():
            self.tags_dir.set(
                Path(path_split(self.app_root.last_load_dir, "data"), "tags"))
Exemplo n.º 6
0
    def get_backup_dir(self, filepath=None):
        filepath = Path(os.path.realpath(str(filepath)))
        fallback_dir = filepath.parent.joinpath(self.backup_dir_basename)
        if not self.tagsdir_relative:
            return fallback_dir

        # TODO: Test that is_in_dir works here
        if not is_in_dir(filepath, self.tagsdir):
            return fallback_dir

        return self.tagsdir.joinpath(
            self.backup_dir_basename,
            os.path.dirname(os.path.relpath(str(filepath), str(self.tagsdir))))
Exemplo n.º 7
0
    def dir_browse(self):
        if self._scanning:
            return
        dirpath = askdirectory(initialdir=self.directory_path.get(),
                               parent=self,
                               title="Select directory to scan")

        if not dirpath:
            return

        self.app_root.last_load_dir = dirpath
        if not is_in_dir(dirpath, self.handler.tagsdir):
            print(
                "Specified directory is not located within the tags directory")
            return

        self.directory_path.set(dirpath)
Exemplo n.º 8
0
    def dir_browse(self):
        if self._extracting:
            return
        dirpath = askdirectory(
            initialdir=self.dir_path.get(),
            parent=self,
            title="Select the directory of tags to extract from")

        if not dirpath:
            return

        self.app_root.last_load_dir = dirpath
        if not is_in_dir(dirpath, self.handler.tagsdir):
            print('Directory "%s" is not located inside tags dir: "%s"' %
                  (dirpath, self.handler.tagsdir))
            return

        self.dir_path.set(dirpath)
Exemplo n.º 9
0
    def get_next_backup_filepath(self, filepath, backup_count=1):
        filepath = Path(filepath)
        backup_count = max(backup_count, 1)
        backup_dir = self.get_backup_dir(filepath)

        existing_backup_paths = self.get_backup_paths_by_timestamps(filepath)
        if existing_backup_paths and len(
                existing_backup_paths) >= backup_count:
            return existing_backup_paths[sorted(existing_backup_paths)[0]]

        if self.tagsdir_relative and is_in_dir(filepath, self.tagsdir):
            backup_path = self.tagsdir.joinpath(
                self.backup_dir_basename,
                os.path.relpath(str(filepath), str(self.tagsdir)))
        else:
            backup_path = backup_dir.joinpath(filepath.stem)

        return self.get_unique_filename(backup_path,
                                        set(existing_backup_paths.values()),
                                        ())
    def jma_dir_browse(self):
        if self._compiling or self._loading or self._saving:
            return

        tags_dir = self.tags_dir.get()
        # Add data to the path and then use path_replace to match the case of any
        # data directory that might already be here.
        data_dir = str(
            path_replace(
                Path(tags_dir).parent.joinpath("data"), "data", "data"))
        jma_dir = self.jma_dir.get()
        if tags_dir and not jma_dir:
            jma_dir = data_dir

        dirpath = path_normalize(
            askdirectory(
                initialdir=jma_dir,
                parent=self,
                title="Select the folder of animations to compile..."))

        if not dirpath:
            return

        dirpath = str(Path(dirpath))
        if tags_dir and data_dir and os.path.basename(
                dirpath).lower() == "animations":
            object_dir = os.path.dirname(dirpath)

            if object_dir and is_in_dir(object_dir, data_dir):
                tag_path = os.path.join(object_dir,
                                        os.path.basename(object_dir))
                tag_path = os.path.join(tags_dir,
                                        os.path.relpath(tag_path, data_dir))
                self.model_animations_path.set(tag_path + ".model_animations")

        self.app_root.last_load_dir = os.path.dirname(dirpath)
        self.jma_dir.set(dirpath)
        if not self.tags_dir.get():
            self.tags_dir.set(
                os.path.join(path_split(self.app_root.last_load_dir, "data"),
                             "tags"))
    def tags_dir_browse(self):
        if self._compiling or self._loading or self._saving:
            return

        old_tags_dir = self.tags_dir.get()
        tags_dir = askdirectory(initialdir=old_tags_dir,
                                parent=self,
                                title="Select the root of the tags directory")

        if not tags_dir:
            return

        tags_dir = str(Path(tags_dir))

        antr_path = self.model_animations_path.get()
        if old_tags_dir and antr_path and not is_in_dir(antr_path, tags_dir):
            # adjust antr filepath to be relative to the new tags directory
            antr_path = os.path.join(tags_dir,
                                     os.path.relpath(antr_path, old_tags_dir))
            self.model_animations_path.set(antr_path)

        self.app_root.last_load_dir = os.path.dirname(tags_dir)
        self.tags_dir.set(tags_dir)
Exemplo n.º 12
0
    def tags_dir_browse(self):
        if self._compiling or self._loading or self._saving:
            return

        old_tags_dir = self.tags_dir.get()
        tags_dir = askdirectory(initialdir=old_tags_dir,
                                parent=self,
                                title="Select the root of the tags directory")

        if not tags_dir:
            return

        tags_dir = path_normalize(tags_dir)

        mod2_path = self.gbxmodel_path.get()
        if old_tags_dir and mod2_path and not is_in_dir(mod2_path, tags_dir):
            # adjust mod2 filepath to be relative to the new tags directory
            mod2_path = os.path.join(tags_dir,
                                     os.path.relpath(mod2_path, old_tags_dir))
            self.gbxmodel_path.set(mod2_path)

        self.app_root.last_load_dir = os.path.dirname(tags_dir)
        self.tags_dir.set(tags_dir)
Exemplo n.º 13
0
    def do_recursive_zip(self):
        tag_path = self.tag_filepath.get()
        if not tag_path:
            return

        app = self.app_root
        handler = self.handler = app.handler
        handler_name = app.handler_names[app._curr_handler_index]
        if handler_name not in app.tags_dir_relative:
            print("Change the current tag set.")
            return

        tag_path = Path(tag_path)
        if not is_in_dir(tag_path, self.handler.tagsdir):
            print("Specified tag is not located within the tags directory")
            return

        tagzip_path = asksaveasfilename(initialdir=self.app_root.last_load_dir,
                                        parent=self,
                                        title="Save zipfile to...",
                                        filetypes=(("zipfile", "*.zip"), ))

        if not tagzip_path:
            return

        try:
            rel_filepath = tag_path.relative_to(self.handler.tagsdir)
            tag = self.get_tag(rel_filepath)
        except ValueError:
            tag = None

        if tag is None:
            print("Could not load tag:\n    %s" % tag_path)
            return

        # make the zipfile to put everything in
        tagzip_path = os.path.splitext(tagzip_path)[0] + ".zip"

        tags_to_zip = [rel_filepath]
        new_tags_to_zip = []
        seen_tags = set()

        with zipfile.ZipFile(str(tagzip_path), mode='w') as tagzip:
            # loop over all the tags and add them to the zipfile
            while tags_to_zip:
                for rel_filepath in tags_to_zip:
                    tag_path = tagpath_to_fullpath(
                        self.handler.tagsdir, PureWindowsPath(rel_filepath))
                    if self.stop_zipping:
                        print('Recursive zip operation cancelled.\n')
                        return

                    if rel_filepath in seen_tags:
                        continue
                    seen_tags.add(rel_filepath)

                    try:
                        print("Adding '%s' to zipfile" % rel_filepath)
                        app.update_idletasks()
                        tag = self.get_tag(rel_filepath)
                        new_tags_to_zip.extend(self.get_dependencies(tag))

                        # try to conserve memory a bit
                        del tag

                        tagzip.write(str(tag_path), arcname=str(rel_filepath))
                    except Exception:
                        print(format_exc())
                        print("    Could not add '%s' to zipfile." %
                              rel_filepath)

                # replace the tags to zip with the newly collected ones
                tags_to_zip[:] = new_tags_to_zip
                del new_tags_to_zip[:]

        print("\nRecursive zip completed.\n")
Exemplo n.º 14
0
    def do_dir_extract(self):
        tags_path = self.dir_path.get()
        data_path = self.handler.tagsdir.parent.joinpath("data")

        if not is_in_dir(tags_path, self.handler.tagsdir):
            print("Directory %s is not located inside tags dir: %s" %
                  (tags_path, self.handler.tagsdir))
            return

        settings = dict(out_dir=str(data_path),
                        overwrite=self.overwrite.get(),
                        decode_adpcm=self.decode_adpcm.get(),
                        engine="yelo")

        print("Beginning tag data extracton in:\t%s" % self.handler.tagsdir)

        s_time = time()
        c_time = s_time
        p_int = self.print_interval

        all_tag_paths = {
            self.listbox_index_to_def_id[int(i)]: []
            for i in self.def_ids_listbox.curselection()
        }

        print("Locating tags...")

        for root, directories, files in os.walk(tags_path):
            root = Path(root)
            try:
                root = root.relative_to(self.handler.tagsdir)
            except ValueError:
                continue

            for filename in files:
                filepath = root.joinpath(filename)

                if time() - c_time > p_int:
                    c_time = time()
                    print(' ' * 4, filepath, sep="")
                    self.app_root.update_idletasks()

                if self.stop_extracting:
                    print('Tag data extraction cancelled.\n')
                    return

                tag_paths = all_tag_paths.get(
                    self.tag_class_ext_to_fcc.get(filepath.suffix[1:].lower()))

                if tag_paths is not None:
                    tag_paths.append(filepath)

        for def_id in sorted(all_tag_paths):
            extractor = self.tag_data_extractors[def_id]
            if self.stop_extracting:
                print('Tag data extraction cancelled.\n')
                return

            print("Extracting %s" % def_id)
            for filepath in all_tag_paths[def_id]:
                if self.stop_extracting:
                    print('Tag data extraction cancelled.\n')
                    return

                print(' ' * 4, filepath, sep="")
                self.extract(filepath, extractor, **settings)

        print("Extraction completed.\n")
Exemplo n.º 15
0
    def scan(self):
        handler = self.handler
        self.stop_scanning = False

        logpath = path_normalize(self.logfile_path.get())
        dirpath = path_normalize(self.directory_path.get())

        if not is_in_dir(dirpath, self.handler.tagsdir):
            print(
                "Specified directory is not located within the tags directory")
            return

        #this is the string to store the entire debug log
        log_name = "HEK Tag Scanner log"
        debuglog = "\n%s%s%s\n\n" % ("-" * 30, log_name, "-" *
                                     (50 - len(log_name)))
        debuglog += "tags directory = %s\nscan directory = %s\n\n" % (
            self.handler.tagsdir, dirpath)
        debuglog += "Broken dependencies are listed below.\n"
        tag_specific_errors = {}

        get_nodes = handler.get_nodes_by_paths
        get_tagref_invalid = handler.get_tagref_invalid

        s_time = time()
        c_time = s_time
        p_int = self.print_interval

        all_tag_paths = {
            self.listbox_index_to_def_id[int(i)]: []
            for i in self.def_ids_listbox.curselection()
        }
        ext_id_map = handler.ext_id_map
        id_ext_map = handler.id_ext_map

        print("Locating tags...")

        for root, directories, files in os.walk(dirpath):
            root = path_normalize(os.path.join(root, ""))

            rel_root = Path(root).relative_to(self.handler.tagsdir)

            for filename in files:
                filepath = rel_root.joinpath(filename)

                if time() - c_time > p_int:
                    c_time = time()
                    print(' ' * 4, filepath, sep="")
                    self.app_root.update_idletasks()

                if self.stop_scanning:
                    print('Tag scanning operation cancelled.\n')
                    return

                tag_paths = all_tag_paths.get(
                    ext_id_map.get(os.path.splitext(filename)[-1].lower()))

                if tag_paths is not None:
                    tag_paths.append(filepath)

        # make the debug string by scanning the tags directory
        for def_id in sorted(all_tag_paths.keys()):
            tag_ref_paths = handler.tag_ref_cache.get(def_id)

            self.app_root.update_idletasks()
            print("Scanning '%s' tags..." % id_ext_map[def_id][1:])
            tags_coll = all_tag_paths[def_id]

            # always display the first tag's filepath
            c_time = time() - (p_int + 100)

            for filepath in sorted(tags_coll):
                if self.stop_scanning:
                    print('Tag scanning operation cancelled.\n')
                    break

                if time() - c_time > p_int:
                    c_time = time()
                    print(' ' * 4, filepath, sep="")
                    self.app_root.update_idletasks()

                tag = self.get_tag(self.handler.tagsdir.joinpath(filepath))
                if tag is None:
                    print("    Could not load '%s'" % filepath)
                    continue

                # find tag specific errors
                self.tag_specific_scan(tag, tag_specific_errors)

                try:
                    if tag_ref_paths is None:
                        # no dependencies for this tag. continue on
                        continue

                    missed = get_nodes(tag_ref_paths, tag.data,
                                       get_tagref_invalid)

                    if not missed:
                        continue

                    debuglog += "\n\n%s\n" % filepath
                    block_name = None

                    for block in missed:
                        if block.NAME != block_name:
                            debuglog += '%s%s\n' % (' ' * 4, block.NAME)
                            block_name = block.NAME
                        try:
                            ext = '.' + block.tag_class.enum_name
                        except Exception:
                            ext = ''
                        debuglog += '%s%s\n' % (' ' * 8, block.STEPTREE + ext)

                except Exception:
                    print(format_exc())
                    print("    Could not scan '%s'" % tag.filepath)
                    continue

            if self.stop_scanning:
                break

        if tag_specific_errors:
            debuglog += "\nTag specific errors are listed below.\n"

        for def_id in sorted(tag_specific_errors.keys()):
            debuglog += "\n\n%s specific errors:\n%s" % (
                def_id, tag_specific_errors[def_id])

        print("\nScanning took %s seconds." % int(time() - s_time))
        print("Writing logfile to %s..." % logpath)
        self.app_root.update_idletasks()

        # make and write to the logfile
        try:
            handler.make_log_file(debuglog, logpath)
            try:
                print("Scan completed.\n")
                if self.open_logfile.get():
                    open_in_default_program(logpath)

            except Exception:
                print("Could not open written log.")
            return
        except Exception:
            print("Could not create log. Printing log to console instead.\n\n")
            for line in debuglog.split('\n'):
                try:
                    print(line)
                except Exception:
                    print("<COULD NOT PRINT THIS LINE>")

            print("Scan completed.\n")
Exemplo n.º 16
0
    def _load_models(self):
        models_dir = self.jms_dir.get()
        if not models_dir:
            return

        start = time.time()
        print("Locating jms files...")
        fps = []
        for _, __, files in os.walk(models_dir):
            for fname in files:
                ext = os.path.splitext(fname)[-1].lower()
                #if ext in ".jms.obj.dae":
                if ext in ".jms.obj":
                    fps.append(os.path.join(models_dir, fname))

            break

        if not fps:
            print("    No valid jms files found in the folder.")
            return

        self.mod2_tag = self.merged_jms = None
        optimize_level = max(0, self.optimize_menu.sel_index)

        jms_models = self.jms_models = []
        print("Loading jms files...")
        self.app_root.update()
        for fp in fps:
            try:
                print("    %s" % fp.replace('/', '\\').split("\\")[-1])
                self.app_root.update()

                model_name = os.path.basename(fp).split('.')[0]
                ext = os.path.splitext(fp)[-1].lower()

                jms_model = None
                if ext == ".jms":
                    with open(fp, "r") as f:
                        jms_model = read_jms(f.read(), '', model_name)
                elif ext == ".obj":
                    with open(fp, "r") as f:
                        jms_model = jms_model_from_obj(f.read(), model_name)
                elif ext == ".dae":
                    jms_model = jms_model_from_dae(fp, model_name)

                if not jms_model:
                    continue

                jms_models.append(jms_model)

                if optimize_level:
                    old_vert_ct = len(jms_model.verts)
                    print("        Optimizing...", end='')
                    jms_model.optimize_geometry(optimize_level == 1)
                    print(" Removed %s verts" %
                          (old_vert_ct - len(jms_model.verts)))

                print("        Calculating normals...")
                jms_model.calculate_vertex_normals()
            except Exception:
                print(format_exc())
                print("    Could not parse jms file.")
                self.app_root.update()

        if not jms_models:
            print("    No valid jms files found.")
            return

        first_crc = None
        for jms_model in jms_models:
            if first_crc is None:
                first_crc = jms_model.node_list_checksum
            elif first_crc != jms_model.node_list_checksum:
                print("    Warning, not all node list checksums match.")
                break

        # make sure the highest lod for each permutation is set as superhigh
        # this is necessary, as only superhigh jms markers are used
        jms_models_by_name = {}
        for jms_model in jms_models:
            lod_models = jms_models_by_name.setdefault(jms_model.perm_name,
                                                       [None] * 5)
            lod_index = {
                "high": 1,
                "medium": 2,
                "low": 3,
                "superlow": 4
            }.get(jms_model.lod_level, 0)
            lod_models[lod_index] = jms_model

        for lod_models in jms_models_by_name.values():
            for jms_model in lod_models:
                if jms_model is not None:
                    jms_model.lod_level = "superhigh"
                    break

        print("Merging jms data...")
        self.app_root.update()
        self.merged_jms = merged_jms = MergedJmsModel()
        errors_occurred = False
        for jms_model in jms_models:
            errors = merged_jms.merge_jms_model(jms_model)
            errors_occurred |= bool(errors)
            if errors:
                print("    Errors in '%s'" % jms_model.name)
                for error in errors:
                    print("        ", error, sep='')

            self.app_root.update()

        mod2_path = self.gbxmodel_path.get()
        tags_dir = self.tags_dir.get().replace('/', '\\')
        self.shader_names_menu.max_index = len(merged_jms.materials) - 1

        shaders_dir = ""
        if mod2_path:
            shaders_dir = os.path.join(os.path.dirname(mod2_path), "shaders",
                                       '')
        tags_dir = self.tags_dir.get()
        has_local_shaders = os.path.exists(shaders_dir) and os.path.exists(
            tags_dir)
        if errors_occurred:
            print("    Errors occurred while loading jms files.")
        elif os.path.isfile(mod2_path):
            try:
                self.mod2_tag = mod2_def.build(filepath=mod2_path)

                tagdata = self.mod2_tag.data.tagdata
                self.merged_jms.node_list_checksum = tagdata.node_list_checksum
                self.superhigh_lod_cutoff.set(str(
                    tagdata.superhigh_lod_cutoff))
                self.high_lod_cutoff.set(str(tagdata.high_lod_cutoff))
                self.medium_lod_cutoff.set(str(tagdata.medium_lod_cutoff))
                self.low_lod_cutoff.set(str(tagdata.low_lod_cutoff))
                self.superlow_lod_cutoff.set(str(tagdata.superlow_lod_cutoff))

                # get any shaders in the gbxmodel and set the shader_path
                # and shader_type for any matching materials in the jms
                shdr_refs = {}
                for shdr_ref in tagdata.shaders.STEPTREE:
                    shdr_name = shdr_ref.shader.filepath.split(
                        "\\")[-1].lower()
                    shdr_refs.setdefault(shdr_name, []).append(shdr_ref)

                for mat in merged_jms.materials:
                    shdr_ref = shdr_refs.get(mat.name, [""]).pop(0)
                    if shdr_ref:
                        mat.shader_type = shdr_ref.shader.tag_class.enum_name
                        mat.shader_path = shdr_ref.shader.filepath

                local_shaders = {}
                if has_local_shaders and is_in_dir(shaders_dir, tags_dir):
                    # fill in any missing shader paths with ones found nearby
                    for _, __, files in os.walk(shaders_dir):
                        for filename in files:
                            name, ext = os.path.splitext(filename)
                            ext = ext.lower()
                            if ext.startswith(".shader"):
                                local_shaders.setdefault(
                                    name.split("\\")[-1].lower(), []).append(
                                        os.path.join(shaders_dir, filename))
                        break

                    for mat in merged_jms.materials:
                        shader_path = local_shaders.get(mat.name, [""]).pop(0)
                        if "shader_" in mat.shader_type or not shader_path:
                            continue

                        # shader type isnt set. Try to detect its location and
                        # type if possible, or set it to a default value if not
                        shader_path = shader_path.lower().replace("/", "\\")
                        name, ext = os.path.splitext(shader_path)
                        mat.shader_path = os.path.relpath(name,
                                                          tags_dir).strip("\\")
                        mat.shader_type = ext.strip(".")

            except Exception:
                print(format_exc())
        else:
            self.superhigh_lod_cutoff.set("0.0")
            self.high_lod_cutoff.set("0.0")
            self.medium_lod_cutoff.set("0.0")
            self.low_lod_cutoff.set("0.0")
            self.superlow_lod_cutoff.set("0.0")

        for mat in merged_jms.materials:
            shader_path = mat.shader_path
            if mat.shader_type in ("shader", ""):
                assume_shaders_dir = not shaders_dir

                if not assume_shaders_dir:
                    try:
                        shader_path = os.path.relpath(
                            os.path.join(shaders_dir, shader_path), tags_dir)
                        shader_path = shader_path.strip("\\")
                    except ValueError:
                        assume_shaders_dir = True

                mat.shader_type = "shader_model"
            else:
                assume_shaders_dir = False

            if assume_shaders_dir or shader_path.startswith("..\\"):
                shader_path = "shaders\\" + os.path.basename(shader_path)

            mat.shader_path = shader_path.lstrip("..\\")

        if not self.mod2_tag:
            print(
                "    Existing gbxmodel tag not detected or could not be loaded.\n"
                "        A new gbxmodel tag will be created.")

        print("Finished loading models. Took %.6f seconds.\n" %
              (time.time() - start))
        self.select_shader(0)