Exemplo n.º 1
0
def get_tags(coll_path, model_in_path):
    mod2_path = str(Path(model_in_path).with_suffix('')) + "_COLL.gbxmodel"

    # get whether or not the collision tag is stubbs
    stubbs = tag_header_def.build(filepath=coll_path).version == 11

    if stubbs:
        coll_tag = stubbs_coll_def.build(filepath=coll_path)
    else:
        coll_tag = coll_def.build(filepath=coll_path)

    mod2_tag = mod2_def.build()
    mod2_tag.filepath = mod2_path
    model_in_rawdata = None

    guessed_mode = False
    while model_in_rawdata is None and model_in_path:
        try:
            model_in_rawdata = get_rawdata(filepath=model_in_path)
        except Exception:
            if guessed_mode:
                model_in_rawdata = None
                model_in_path = Path(askopenfilename(
                    initialdir=model_in_path.parent, filetypes=(
                        ('All', '*'), ('Gbxmodel', '*.gbxmodel')),
                    title="Select the gbxmodel to extract nodes from"))
            else:
                model_in_path = model_in_path.with_suffix(".model")
                guessed_mode = True

    if model_in_rawdata is not None:
        # we dont actually care about the geometries or shaders of the gbxmodel
        # tag we're loading, so null them out to speed up the loading process.
        geom_off = 64 + 4*9 + 2*5 + 126 + 12*3

        # make a copy so we dont edit the file
        model_in_rawdata = bytearray(model_in_rawdata)
        model_in_rawdata[geom_off:64 + 232] = b'\x00'*(64 + 232 - geom_off)

        if model_in_rawdata[36:40] == b"mod2":
            model_in_tag = mod2_def.build(rawdata=model_in_rawdata)
        elif stubbs:
            model_in_tag = stubbs_mode_def.build(rawdata=model_in_rawdata)
        else:
            model_in_tag = mode_def.build(rawdata=model_in_rawdata)

        mod2_tag.data.tagdata.nodes = model_in_tag.data.tagdata.nodes
    else:
        model_in_tag = None
        mod2_tag.data.tagdata.nodes.STEPTREE.append()
        node = mod2_tag.data.tagdata.nodes.STEPTREE[-1]
        node.name = "COLLISION ROOT"
        print("    %s" % model_in_path)
        print("    Could not load gbxmodel. Gbxmodel wont have nodes and "
              "the geometry will not be positioned or rotated properly.")

    return coll_tag, model_in_tag, mod2_tag
Exemplo n.º 2
0
def read_halo1model(filepath):
    '''Takes a halo1 model file and turns it into a jms object.'''

    # TODO: Use a tag handler to see if these files actually are what they
    # say they are. We can get really nasty parsing problems if they aren't.

    # These two model types can be imported the same way because of their
    # nearly matching structures when built into a python object.
    if (filepath.lower().endswith('.gbxmodel')
            or filepath.lower().endswith('.model')):
        # Load model
        tag = None
        if filepath.lower().endswith('.gbxmodel'):
            tag = mod2_def.build(filepath=filepath)
        else:
            tag = mode_def.build(filepath=filepath)
        #TODO: Get all lod permutations.
        #Only getting the superhigh perms for now
        jms = extract_model(tag.data.tagdata, write_jms=False)
        jms = list(filter(lambda m: m.lod_level == "superhigh", jms))
        return jms

    if filepath.lower().endswith('.jms'):
        # Read jms file into string.
        jms_string = ""
        with open(filepath, 'r') as jms_file:
            jms_string = jms_file.read()
        # Read Jms data from string.
        jms = read_jms(jms_string)
        # Make sure it's a Halo 1 jms
        if jms.version != JMS_VERSION_HALO_1:
            raise ValueError('Not a Halo 1 jms!')

        return [jms]
Exemplo n.º 3
0
def AmfToMod2(amf_model, purge_helpers, do_output):
    gbx_model = mod2_def.build()
    target = gbx_model.data.tagdata
    source = amf_model.data

    target.base_map_u_scale = 1.0
    target.base_map_v_scale = 1.0

    if do_output:
        print("Setting up nodes...", end='')
        sys.stdout.flush()

    t_nodes = target.nodes.STEPTREE
    s_nodes = source.nodes_header.STEPTREE

    node_id_translations, is_node_purged_list = RemoveHelpersAndFixups(s_nodes)

    for s_node in s_nodes:
        node_id = len(t_nodes)  #node num if no translation is needed
        t_nodes.append()
        t_node = t_nodes[-1]

        if len(s_node.name) > 31:
            t_node.name = s_node.name[0:31]
            if not fixup:  #We don't need to alert the user with this action if this is a fixup bone, as it will be removed.
                print(
                    "Warning: The name of node #%d : %s is longer than 31 characters, got: %d"
                    % (len(t_nodes), s_node.name, len(s_node.name)))
                print("Cutting it short to:", t_node.name)
        else:
            t_node.name = s_node.name

        t_node.next_sibling_node = s_node.sibling_index
        t_node.first_child_node = s_node.child_index
        t_node.parent_node = s_node.parent_index
        t_node.translation.x = s_node.position.x / 100
        t_node.translation.y = s_node.position.y / 100
        t_node.translation.z = s_node.position.z / 100
        t_node.rotation[:] = model.InvertQuat(s_node.orientation)
        t_node.distance_from_parent = (t_node.translation[0]**2 +
                                       t_node.translation[1]**2 +
                                       t_node.translation[2]**2)**0.5

    if purge_helpers:
        if do_output:
            print("Calculating new node positions and rotations...", end='')
            sys.stdout.flush()
        leftover_nodes = list(set(node_id_translations))

        for i in range(len(node_id_translations)):
            for j in range(len(leftover_nodes)):
                if leftover_nodes[j] == node_id_translations[i]:
                    node_id_translations[i] = j
                    break

        old_nodes = t_nodes
        old_node_transforms = model.GetAbsNodetransforms(old_nodes)

        new_nodes = CreateCutDownListUsingLeftoverIds(old_nodes,
                                                      leftover_nodes)[:]
        new_node_transforms = CreateCutDownListUsingLeftoverIds(
            old_node_transforms, leftover_nodes)

        # Find the right node to parent to
        for new_node in new_nodes:
            found = False
            while new_node.parent_node != -1 and is_node_purged_list[
                    new_node.parent_node] != False:
                new_node.parent_node = old_nodes[
                    new_node.parent_node].parent_node

            if new_node.parent_node != -1:
                new_node.parent_node = node_id_translations[
                    new_node.parent_node]

        # Find child and sibling nodes
        for i in range(len(new_nodes) - 1):
            new_nodes[i].next_sibling_node = -1
            if new_nodes[i].parent_node == new_nodes[i + 1].parent_node:
                new_nodes[i].next_sibling_node = i + 1

            new_nodes[i].first_child_node = -1
            for j in range(i + 1, len(new_nodes)):
                if new_nodes[j].parent_node == i:
                    new_nodes[i].first_child_node = j
                    break

        new_nodes = model.SetRelNodetransforms(new_nodes, new_node_transforms)

        t_nodes[:] = new_nodes[:]

        if do_output:
            print("\nNode purging lowered the node count from %d to %d" %
                  (len(s_nodes), len(t_nodes)))

    if len(t_nodes) > 62:
        print(
            "Warning, node count is over the max supported amount. Supported range: 1-62. Nodecount is: %d"
            % len(s_nodes))

    if do_output:
        print("done")
        print("Setting up markers...", end='')
        sys.stdout.flush()

    t_markers = target.markers.STEPTREE
    s_markers = source.markers_header.STEPTREE
    for s_marker in s_markers:
        t_markers.append()
        t_marker = t_markers[-1]

        t_instances = t_marker.marker_instances.STEPTREE
        s_instances = s_marker.marker_instances.STEPTREE
        if len(s_marker.name) > 31:
            t_marker.name = s_marker.name[0:31]
            print(
                "Warning: The name of node #%d : %s is longer than 31 characters, got: %d."
                % (len(t_markers), s_marker.name, len(s_marker.name)))
            print("Cutting it short to:", t_marker.name)
        else:
            t_marker.name = s_marker.name

        for s_instance in s_instances:
            t_instances.append()
            t_instance = t_instances[-1]

            t_instance[0:3] = s_instance[0:3]
            t_instance.translation.x = s_instance.position.x / 100
            t_instance.translation.y = s_instance.position.y / 100
            t_instance.translation.z = s_instance.position.z / 100
            t_instance.rotation[:] = model.InvertQuat(s_instance.orientation)

            if purge_helpers:
                while t_instance.node_index != -1 and is_node_purged_list[
                        t_instance.node_index] != False:
                    t_instance.node_index = old_nodes[
                        t_instance.node_index].parent_node

                if t_instance.node_index != -1:
                    t_instance.node_index = node_id_translations[
                        t_instance.node_index]

    if do_output:
        print("done")
        print("Setting up regions...")
        sys.stdout.flush()

    t_regions = target.regions.STEPTREE
    s_regions = source.regions_header.STEPTREE
    t_geometries = target.geometries.STEPTREE
    if len(s_regions) > 8:
        print("Too many regions, max: 8, got: %s." % len(s_regions))

    for s_region in s_regions:
        t_regions.append()
        t_region = t_regions[-1]

        if len(s_region.name) > 31:
            t_region.name = s_region.name[0:31]
            print(
                "Warning: The name of node #%d : %s is longer than 31 characters, got: %d."
                % (len(t_regions), s_region.name, len(s_region.name)))
            print("Cutting it short to:", t_region.name)
        else:
            t_region.name = s_region.name

        t_permutations = t_region.permutations.STEPTREE
        s_permutations = s_region.permutations_header.STEPTREE
        for s_permutation in s_permutations:
            t_permutations.append()
            t_permutation = t_permutations[-1]

            perm_name = s_permutation.name
            if t_region.name == "Instances":
                perm_name = perm_name.replace("%", "", 1)

            if len(perm_name) > 31:
                t_permutation.name = perm_name[0:31]
                print(
                    "Warning: The name of permutation #%d : %s in region: #%d : %s is longer than 31 characters, got: %d."
                    % (len(t_permutations), perm_name, len(t_regions),
                       s_region.name, len(perm_name)))
                print("Cutting it short to:", t_permutation.name)
            else:
                t_permutation.name = perm_name

            if do_output:
                print("Setting up region: ",
                      t_region.name,
                      ", permutation: ",
                      t_permutation.name,
                      "...",
                      sep='',
                      end='')
                sys.stdout.flush()
            # set superlow-superhigh geometry block indices
            t_permutation[2:7] = [len(t_geometries)] * 5

            t_geometries.append()
            t_geometry = t_geometries[-1]

            t_parts = t_geometry.parts.STEPTREE

            bounds = None
            if s_permutation.format_info.compression_format != 0:
                bounds = s_permutation.vertices_header.bounds
            s_verts = s_permutation.vertices_header.vertices.vertices
            s_tris = s_permutation.faces_header.STEPTREE
            s_sections = s_permutation.sections_header.STEPTREE
            for s_section in s_sections:
                t_parts.append()
                t_part = t_parts[-1]

                t_part.shader_index = s_section.shader_index

                # Get all the triangles that use this shader
                used_vert_list = [False] * len(s_verts)
                triangles = []
                for i in range(s_section.starting_face,
                               s_section.starting_face + s_section.face_count):
                    triangles.append(s_tris[i][:])

                    used_vert_list[triangles[-1][0]] = True
                    used_vert_list[triangles[-1][1]] = True
                    used_vert_list[triangles[-1][2]] = True

                # Get all vertices that are used by these triangles shader
                vert_translation_list = [0] * len(used_vert_list)
                verts = []
                for i in range(len(used_vert_list)):
                    if used_vert_list[i] == True:
                        verts.append(s_verts[i])
                    vert_translation_list[i] = len(verts) - 1

                ## Get all relevant info from each vert and add it to the GBX Model Part
                t_verts = t_part.uncompressed_vertices.STEPTREE
                vertex_format = s_permutation.format_info.vertex_format
                compression_format = s_permutation.format_info.compression_format
                for s_vert in verts:
                    t_verts.append()
                    t_vert = t_verts[-1]

                    if compression_format == 0:
                        t_vert[0] = s_vert.data.position[0] / 100
                        t_vert[1] = s_vert.data.position[1] / 100
                        t_vert[2] = s_vert.data.position[2] / 100
                        t_vert[3:6] = s_vert.data.normal[0:3]
                        # binormals and tangents are calculated when all verts are added to the STEPTREE
                        t_vert.u = s_vert.data.u
                        t_vert.v = 1 - s_vert.data.v
                    else:
                        bounds = s_permutation.vertices_header.bounds
                        t_vert.position_x = (
                            (s_vert.data.position.x / 32767) *
                            (bounds.x.upper - bounds.x.lower) +
                            bounds.x.lower) / 100
                        t_vert.position_y = (
                            (s_vert.data.position.y / 32767) *
                            (bounds.y.upper - bounds.y.lower) +
                            bounds.y.lower) / 100
                        t_vert.position_z = (
                            (s_vert.data.position.z / 32767) *
                            (bounds.z.upper - bounds.z.lower) +
                            bounds.z.lower) / 100

                        t_vert.normal_i = s_vert.data.normal.i / 1023
                        t_vert.normal_j = s_vert.data.normal.j / 1023
                        t_vert.normal_k = s_vert.data.normal.k / 511

                        t_vert.u = (s_vert.data.u / 32767) * (
                            bounds.u.upper - bounds.u.lower) + bounds.u.lower
                        t_vert.v = 1 - (
                            (s_vert.data.v / 32767) *
                            (bounds.v.upper - bounds.v.lower) + bounds.v.lower)

                    if vertex_format == 0:
                        t_vert.node_0_index = node_id_translations[
                            s_permutation.node_index]
                        t_vert.node_0_weight = 1.0

                    elif vertex_format == 1:
                        t_vert.node_0_index = node_id_translations[
                            s_vert.node_indices[0]]
                        if s_vert.node_indices[1] != 255:
                            if node_id_translations[s_vert.node_indices[
                                    1]] == t_vert.node_0_index:
                                t_vert.node_0_weight = 1.0
                            else:
                                t_vert.node_1_index = node_id_translations[
                                    s_vert.node_indices[1]]
                                t_vert.node_0_weight = 0.5
                                t_vert.node_1_weight = 0.5
                        else:
                            t_vert.node_0_weight = 1.0

                    elif vertex_format == 2:
                        index_count = 1
                        if s_vert.node_indices[1] == 255: index_count = 1
                        elif s_vert.node_indices[2] == 255: index_count = 2
                        elif s_vert.node_indices[3] == 255: index_count = 3
                        else: index_count = 4

                        # Take all the node indices and weights and put them in a neat list
                        available_nodes = []
                        for i in range(0, index_count):
                            this_vert = []
                            this_vert.append(
                                node_id_translations[s_vert.node_indices[i]])
                            this_vert.append(s_vert.node_weights[i])
                            found = False
                            for a_vert in available_nodes:
                                if a_vert[0] == this_vert[0]:
                                    a_vert[1] += this_vert[1]
                                    found = True
                                    break
                            if not found:
                                available_nodes.append(this_vert)

                        vert_weights_to_collect = 1
                        if len(available_nodes) > 1:
                            vert_weights_to_collect = 2

                        # Get the two highest weighted node indices and weights and apply them to the target vertex
                        for v in range(vert_weights_to_collect):
                            highest_weight = 0.0
                            highest_weight_index = 0
                            highest_weight_ref = 0

                            for i in range(len(available_nodes)):
                                if available_nodes[i][1] > available_nodes[i][
                                        1]:
                                    highest_weight = available_nodes[i][1]
                                    highest_weight_index = available_nodes[i][
                                        0]
                                    highest_weight_ref = i

                            if v == 0:
                                t_vert.node_0_index = highest_weight_index
                                t_vert.node_0_weight = highest_weight
                            else:
                                t_vert.node_1_index = highest_weight_index
                                t_vert.node_1_weight = highest_weight

                            available_nodes.pop(highest_weight_ref)

                        #Normalize vert weights so we end up with them totalling 1.0
                        total_weight = t_vert.node_0_weight + t_vert.node_1_weight
                        t_vert.node_0_weight /= total_weight
                        t_vert.node_1_weight /= total_weight

                ## Convert triangles to strips and add them to the GBX Model Part
                # Translate the triangle vert ids to match the correct verts in the Part
                for triangle in triangles:
                    triangle[0] = vert_translation_list[triangle[0]]
                    triangle[1] = vert_translation_list[triangle[1]]
                    triangle[2] = vert_translation_list[triangle[2]]

                # Calculate the Binormals and Tangents of each vert
                model.CalcVertBiNormsAndTangents(t_verts, triangles)

                triangle_strip = model.TrianglesToStrips(triangles)

                # The triangle strip needs to be divisible by 3
                needed_padding = (3 - len(triangle_strip) % 3) % 3

                # Any unused/padding slots in the triangles array need to have the number -1
                for i in range(needed_padding):
                    triangle_strip.append(-1)

                # Write the strip to the array
                t_tris = t_part.triangles.STEPTREE
                for i in range(0, len(triangle_strip), 3):
                    t_tris.append()
                    t_tris[-1][:] = triangle_strip[i:i + 3]

                # Calculate the centroid translation by averaging all vertices!
                t_part.centroid_translation[:] = [0.0, 0.0, 0.0]
                for v in t_verts:
                    #First 3 indices in a vertex are the translation.
                    for c in range(3):
                        t_part.centroid_translation[c] += v[c]
                for c in range(3):
                    t_part.centroid_translation[c] /= len(t_verts)
            if do_output: print("done")

    if do_output:
        print("Setting up shaders...", end='')
        sys.stdout.flush()

    t_shaders = target.shaders.STEPTREE
    s_shaders = source.shaders_header.STEPTREE
    shaders_already_exist = []
    for s_shader in s_shaders:
        t_shaders.append()
        t_shader = t_shaders[-1]
        t_shader.shader.filepath = s_shader.name
        for exists in shaders_already_exist:
            if exists[0][0] == s_shader.name:
                exists[0][1] += 1
        exists = [s_shader.name, 1]
        shaders_already_exist.append(exists)
    if do_output:
        print("done")
        sys.stdout.flush()
    return gbx_model
Exemplo n.º 4
0
def sbsp_to_mod2(sbsp_path,
                 include_lens_flares=True,
                 include_markers=True,
                 include_weather_polyhedra=True,
                 include_fog_planes=True,
                 include_portals=True,
                 include_collision=True,
                 include_renderable=True,
                 include_mirrors=True,
                 include_lightmaps=True,
                 fan_weather_polyhedra=True,
                 fan_fog_planes=True,
                 fan_portals=True,
                 fan_collision=True,
                 fan_mirrors=True,
                 optimize_fog_planes=False,
                 optimize_portals=False,
                 weather_polyhedra_tolerance=0.0000001):

    print("    Loading sbsp tag...")
    sbsp_tag = sbsp_def.build(filepath=sbsp_path)
    mod2_tag = mod2_def.build()

    sbsp_body = sbsp_tag.data.tagdata
    coll_mats = [
        JmsMaterial(mat.shader.filepath.split("\\")[-1])
        for mat in sbsp_body.collision_materials.STEPTREE
    ]

    base_nodes = [JmsNode("frame")]
    jms_models = []

    if include_markers:
        print("    Converting markers...")
        try:
            jms_models.append(
                make_marker_jms_model(sbsp_body.markers.STEPTREE, base_nodes))
        except Exception:
            print(format_exc())
            print("    Could not convert markers")

    if include_lens_flares:
        print("    Converting lens flares...")
        try:
            jms_models.append(
                make_lens_flare_jms_model(
                    sbsp_body.lens_flare_markers.STEPTREE,
                    sbsp_body.lens_flares.STEPTREE, base_nodes))
        except Exception:
            print(format_exc())
            print("    Could not convert lens flares")

    if include_fog_planes:
        print("    Converting fog planes...")
        try:
            jms_models.extend(
                make_fog_plane_jms_models(sbsp_body.fog_planes.STEPTREE,
                                          base_nodes, fan_fog_planes,
                                          optimize_fog_planes))
        except Exception:
            print(format_exc())
            print("    Could not convert fog planes")

    if include_mirrors:
        print("    Converting mirrors...")
        try:
            jms_models.extend(
                make_mirror_jms_models(sbsp_body.clusters.STEPTREE, base_nodes,
                                       fan_mirrors))
        except Exception:
            print(format_exc())
            print("    Could not convert mirrors")

    if include_portals and sbsp_body.collision_bsp.STEPTREE:
        print("    Converting portals...")
        try:
            jms_models.extend(
                make_cluster_portal_jms_models(
                    sbsp_body.collision_bsp.STEPTREE[0].planes.STEPTREE,
                    sbsp_body.clusters.STEPTREE,
                    sbsp_body.cluster_portals.STEPTREE, base_nodes,
                    fan_portals, optimize_portals))
        except Exception:
            print(format_exc())
            print("    Could not convert portals")

    if include_weather_polyhedra:
        print("    Converting weather polyhedra...")
        try:
            jms_models.extend(
                make_weather_polyhedra_jms_models(
                    sbsp_body.weather_polyhedras.STEPTREE, base_nodes,
                    fan_weather_polyhedra, weather_polyhedra_tolerance))
        except Exception:
            print(format_exc())
            print("    Could not convert weather polyhedra")

    if include_collision:
        print("    Converting collision...")
        try:
            jms_models.extend(
                make_bsp_coll_jms_models(sbsp_body.collision_bsp.STEPTREE,
                                         coll_mats, base_nodes, None, False,
                                         fan_collision))
        except Exception:
            print(format_exc())
            print("    Could not convert collision")

    if include_renderable:
        print("    Converting renderable...")
        try:
            jms_models.extend(
                make_bsp_renderable_jms_models(sbsp_body, base_nodes))
        except Exception:
            print(format_exc())
            print("    Could not convert renderable")

    if include_lightmaps:
        print("    Converting lightmaps...")
        try:
            jms_models.extend(
                make_bsp_lightmap_jms_models(sbsp_body, base_nodes))
        except Exception:
            print(format_exc())
            print("    Could not convert lightmaps")

    print("    Compiling gbxmodel...")
    mod2_tag.filepath = str(Path(sbsp_path).with_suffix('')) + "_SBSP.gbxmodel"
    compile_gbxmodel(mod2_tag, MergedJmsModel(*jms_models), True)
    return mod2_tag
Exemplo n.º 5
0
def AmfToMod2(amf, strip_helpers, dirty_rig_fix):
    gbx = mod2_def.build()
    t = gbx.data.tagdata  #target
    s = amf.data  #source

    log.info("Building nodes block...")

    # Node lists
    t_nodes = t.nodes.STEPTREE
    s_nodes = s.nodes_header.STEPTREE

    # Vertex weights use ids, so we will build lists for translating and ignoring.
    # We need this because the node setup in the gbx model will be different,
    # which would mean that the ids don't match up.
    node_s_to_t = [
    ]  # list for translating node ids from our s_nodes set to our t_nodes set.
    node_leftover_id = []  # list of ids of nodes from s_nodes we should keep.
    node_ignore_id = [
    ]  # list that determines which node weights the skinning process should ignore.

    # Set up our lists appropriately.
    # Get conversion and rig fix lists if applicable. And just create dummy lists if not.
    if strip_helpers:
        node_s_to_t, node_leftover_id = GetTransListForHelperStripping(s_nodes)
    else:
        for i in range(len(s_nodes)):
            node_s_to_t.append(i)
            node_leftover_id.append(i)

    if dirty_rig_fix:
        node_ignore_id = GetDirtyRigFixList(s_nodes)
    else:
        node_ignore_id = [False] * len(s_nodes)

    # Build the node list in the target gbx.
    for id in node_leftover_id:
        s_node = s_nodes[id]
        t_nodes.append()
        t_node = t_nodes[-1]

        if len(s_node.name) > 31:
            t_node.name = s_node.name[0:31]
            log.warning(
                "Warning: The name of node #%d : %s is longer than 31 characters, got: %d\nCutting it short to: %s"
                % (len(t_nodes), s_node.name, len(s_node.name), t_node.name))
        else:
            t_node.name = s_node.name

        if s_node.parent_node > -1:
            t_node.parent_node = node_s_to_t[s_node.parent_node]
        else:
            t_node.parent_node = -1
        trans = Vec3d(s_node.translation) / 100
        t_node.translation[:] = trans[:]
        t_node.rotation[:] = Quaternion(s_node.rotation).inverse[:]
        t_node.distance_from_parent = trans.magnitude

    # Fix positions and rotations when stripping helpers.
    if strip_helpers:
        # Get absolute transforms.
        abs_transforms = fmodel.GetAbsNodetransforms(s_nodes)
        # Cut down list to only include the abs transforms for the leftover nodes.
        abs_transforms = fstruct.CreateNewListUsingIds(abs_transforms,
                                                       node_leftover_id)
        # Fix size and rotation inconsistencies between AMF and GBX.
        for transform in abs_transforms:
            transform[0] = transform[0] / 100
            transform[1] = transform[1].inverse
        # Apply changes.
        t_nodes[:] = fmodel.SetRelNodetransforms(t_nodes, abs_transforms)[:]

    # Fix node order to abide by Halo's node sorting rules. Per level, alphabetical.
    t_nodes, translation_list = fmodel.SortNodes(t_nodes)
    # Update translation list to reflect the new order.
    for ta in node_s_to_t:
        ta = translation_list[ta]

    # Safety reassignment
    t.nodes.STEPTREE = t_nodes

    log.info("Building markers block...")

    # Marker lists
    t_markers = t.markers.STEPTREE
    s_markers = s.markers_header.STEPTREE

    # Build markers block
    for s_marker in s_markers:
        t_markers.append()
        t_marker = t_markers[-1]

        if len(s_marker.name) > 31:
            t_marker.name = s_marker.name[0:31]
            log.warning(
                "Warning: The name of marker #%d : %s is longer than 31 characters, got: %d\nCutting it short to: %s"
                % (len(t_markers), s_marker.name, len(
                    s_markers.name), t_marker.name))
        else:
            t_marker.name = s_marker.name

        t_instances = t_marker.marker_instances.STEPTREE
        s_instances = s_marker.marker_instances.STEPTREE

        for s_instance in s_instances:
            t_instances.append()
            t_instance = t_instances[-1]

            t_instance[0:3] = s_instance[0:3]
            t_instance.translation[:] = (Vec3d(s_instance.position) / 100)[:]
            t_instance.rotation[:] = Quaternion(
                s_instance.orientation).inverse[:]
            if t_instance.node_index != -1:
                t_instance.node_index = node_s_to_t[t_instance.node_index]
            else:
                t_instance.node_index = -1

    log.info("Building regions and geometries blocks...")

    t_regions = t.regions.STEPTREE
    s_regions = s.regions_header.STEPTREE

    t_geometries = t.geometries.STEPTREE

    for s_region in s_regions:
        t_regions.append()
        t_region = t_regions[-1]

        if len(s_region.name) > 31:
            t_region.name = s_region.name[0:31]
            log.warning(
                "Warning: The name of region #%d : %s is longer than 31 characters, got: %d\nCutting it short to: %s"
                % (len(t_regions), s_region.name, len(
                    s_regions.name), t_region.name))
        else:
            t_region.name = s_region.name

        t_permutations = t_region.permutations.STEPTREE
        s_permutations = s_region.permutations_header.STEPTREE

        for s_permutation in s_permutations:
            t_permutations.append()
            t_permutation = t_permutations[-1]

            perm_name = s_permutation.name
            if t_region.name == "Instances":
                perm_name = perm_name.replace("%", "", 1)

            if len(s_permutation.name) > 31:
                t_permutation.name = perm_name[0:31]
                log.warning(
                    "Warning: The name of permutation #%d : %s is longer than 31 characters, got: %d\nCutting it short to: %s"
                    % (len(t_permutations), perm_name, len(perm_name),
                       t_permutation.name))
            else:
                t_permutation.name = perm_name

            # set superlow-superhigh geometry block indices
            t_permutation[2:7] = [len(t_geometries)] * 5

            t_geometries.append()
            t_geometry = t_geometries[-1]

            t_parts = t_geometry.parts.STEPTREE

            bounds = None
            if s_permutation.format_info.compression_format != 0:
                bounds = s_permutation.vertices_header.bounds
            s_verts = s_permutation.vertices_header.vertices.vertices
            s_tris = s_permutation.faces_header.STEPTREE
            s_sections = s_permutation.sections_header.STEPTREE

            for s_section in s_sections:
                t_parts.append()
                t_part = t_parts[-1]

                t_part.shader_index = s_section.shader_index

                # Get all the triangles that use this shader
                used_vert_list = [False] * len(s_verts)
                triangles = []
                for i in range(s_section.starting_face,
                               s_section.starting_face + s_section.face_count):
                    triangles.append(s_tris[i][:])

                    used_vert_list[triangles[-1][0]] = True
                    used_vert_list[triangles[-1][1]] = True
                    used_vert_list[triangles[-1][2]] = True

                # Get all vertices that are used by these triangles shader
                vert_translation_list = [0] * len(used_vert_list)
                verts = []
                for i in range(len(used_vert_list)):
                    if used_vert_list[i] == True:
                        verts.append(s_verts[i])
                    vert_translation_list[i] = len(verts) - 1

                ## Get all relevant info from each vert and add it to the GBX Model Part
                t_verts = t_part.uncompressed_vertices.STEPTREE
                vertex_format = s_permutation.format_info.vertex_format
                compression_format = s_permutation.format_info.compression_format

                for s_vert in verts:
                    t_verts.append()
                    t_vert = t_verts[-1]

                    if compression_format == 0:
                        t_vert[0] = s_vert.data.position[0] / 100
                        t_vert[1] = s_vert.data.position[1] / 100
                        t_vert[2] = s_vert.data.position[2] / 100
                        t_vert[3:6] = s_vert.data.normal[0:3]
                        # binormals and tangents are calculated when all verts are added to the STEPTREE
                        t_vert.u = s_vert.data.u
                        t_vert.v = 1 - s_vert.data.v
                    else:
                        bounds = s_permutation.vertices_header.bounds
                        t_vert.position_x = (
                            (s_vert.data.position.x / 32767) *
                            (bounds.x.upper - bounds.x.lower) +
                            bounds.x.lower) / 100
                        t_vert.position_y = (
                            (s_vert.data.position.y / 32767) *
                            (bounds.y.upper - bounds.y.lower) +
                            bounds.y.lower) / 100
                        t_vert.position_z = (
                            (s_vert.data.position.z / 32767) *
                            (bounds.z.upper - bounds.z.lower) +
                            bounds.z.lower) / 100

                        t_vert.normal_i = s_vert.data.normal.i / 1023
                        t_vert.normal_j = s_vert.data.normal.j / 1023
                        t_vert.normal_k = s_vert.data.normal.k / 511

                        t_vert.u = (s_vert.data.u / 32767) * (
                            bounds.u.upper - bounds.u.lower) + bounds.u.lower
                        t_vert.v = 1 - (
                            (s_vert.data.v / 32767) *
                            (bounds.v.upper - bounds.v.lower) + bounds.v.lower)

                    if vertex_format == 0:
                        t_vert.node_0_index = node_s_to_t[
                            s_permutation.node_index]
                        t_vert.node_0_weight = 1.0

                    elif vertex_format == 1:
                        t_vert.node_0_index = node_s_to_t[
                            s_vert.node_indices[0]]
                        if s_vert.node_indices[1] != 255:
                            if node_s_to_t[s_vert.node_indices[
                                    1]] == t_vert.node_0_index:
                                t_vert.node_0_weight = 1.0
                                t_vert.node_1_index = -1
                            else:
                                t_vert.node_1_index = node_s_to_t[
                                    s_vert.node_indices[1]]
                                t_vert.node_0_weight = 0.5
                                t_vert.node_1_weight = 0.5
                        else:
                            t_vert.node_0_weight = 1.0
                            t_vert.node_1_index = -1

                    elif vertex_format == 2:
                        index_count = 1
                        if s_vert.node_indices[1] == 255: index_count = 1
                        elif s_vert.node_indices[2] == 255: index_count = 2
                        elif s_vert.node_indices[3] == 255: index_count = 3
                        else: index_count = 4

                        # Make a list of all different ids this vert is skinned to, adding up weight of dupes.
                        v_node_ids = []
                        v_weights = []
                        for i in range(index_count):
                            if not node_ignore_id[s_vert.node_indices[i]]:
                                match = None
                                effective_node_id = node_s_to_t[
                                    s_vert.node_indices[i]]
                                for i in range(len(v_node_ids)):
                                    if v_node_ids[i] == effective_node_id:
                                        match = i
                                        break
                                if match == None:
                                    v_node_ids.append(effective_node_id)
                                    v_node_weights.append(
                                        s_vert.node_weights[i])
                                else:
                                    v_weights[match] += s_vert.node_weights[i]

                        if not len(v_node_ids) > 1:
                            t_vert.node_0_index = v_node_ids[0]
                            t_vert.node_0_weight = 1.0
                            t_vert.node_1_index = -1
                            t_vert.node_1_weight = 0.0
                        else:
                            # Get two highest weight nodes
                            highest_weight_id = v_weights.index(max(v_weights))
                            t_vert.node_0_index = v_node_ids.pop(
                                highest_weight_id)
                            t_vert.node_0_weight = v_weights.pop(
                                highest_weight_id)
                            highest_weight_id = v_weights.index(max(v_weights))
                            t_vert.node_1_index = v_node_ids.pop(
                                highest_weight_id)
                            t_vert.node_1_weight = v_weights.pop(
                                highest_weight_id)
                            # Normalize the weights
                            total_weight = t_vert.node_0_weight + t_vert.node_1_weight
                            t_vert.node_0_weight /= total_weight
                            t_vert.node_1_weight /= total_weight

                ## Convert triangles to strips and add them to the GBX Model Part
                # Translate the triangle vert ids to match the correct verts in the Part
                for triangle in triangles:
                    triangle[0] = vert_translation_list[triangle[0]]
                    triangle[1] = vert_translation_list[triangle[1]]
                    triangle[2] = vert_translation_list[triangle[2]]

                # Calculate the Binormals and Tangents of each vert
                fmodel.CalcVertBiNormsAndTangents(t_verts, triangles)

                triangle_strip = fmodel.TrianglesToStrips(triangles)

                # The triangle strip needs to be divisible by 3
                needed_padding = (3 - len(triangle_strip) % 3) % 3

                # Any unused/padding slots in the triangles array need to have the number -1
                for i in range(needed_padding):
                    triangle_strip.append(-1)

                # Write the strip to the array
                t_tris = t_part.triangles.STEPTREE
                for i in range(0, len(triangle_strip), 3):
                    t_tris.append()
                    t_tris[-1][:] = triangle_strip[i:i + 3]

                xs = []
                ys = []
                zs = []
                for v in t_verts:
                    xs.append(v[0])
                    ys.append(v[1])
                    zs.append(v[2])

                x_hi = max(xs)
                y_hi = max(ys)
                z_hi = max(zs)

                x_lo = min(xs)
                y_lo = min(ys)
                z_lo = min(zs)

                t_part.centroid_translation[0] = (x_hi - x_lo) / 2.0 + x_lo
                t_part.centroid_translation[1] = (y_hi - y_lo) / 2.0 + y_lo
                t_part.centroid_translation[2] = (z_hi - z_lo) / 2.0 + z_lo

    log.info("Building shaders block...")
    # Todo, make this less shit or make main process this
    t_shaders = t.shaders.STEPTREE
    s_shaders = s.shaders_header.STEPTREE
    shaders_already_exist = []
    for s_shader in s_shaders:
        t_shaders.append()
        t_shader = t_shaders[-1]
        t_shader.shader.filepath = s_shader.name
        for exists in shaders_already_exist:
            if exists[0][0] == s_shader.name:
                exists[0][1] += 1
        exists = [s_shader.name, 1]
        shaders_already_exist.append(exists)

    if len(s_regions) > 8:
        print(
            "Too many regions, max: 8, got: %s.\nYou'll have to fix this manually."
            % len(s_regions))

    return gbx
Exemplo n.º 6
0
    def _load_models(self):
        models_dir = self.jms_dir.get()
        if not models_dir:
            return

        start = time.time()
        print("Locating jms files...")
        fps = []
        for _, __, files in os.walk(models_dir):
            for fname in files:
                ext = os.path.splitext(fname)[-1].lower()
                #if ext in ".jms.obj.dae":
                if ext in ".jms.obj":
                    fps.append(os.path.join(models_dir, fname))

            break

        if not fps:
            print("    No valid jms files found in the folder.")
            return

        self.mod2_tag = self.merged_jms = None
        optimize_level = max(0, self.optimize_menu.sel_index)

        jms_models = self.jms_models = []
        print("Loading jms files...")
        self.app_root.update()
        for fp in fps:
            try:
                print("    %s" % fp.replace('/', '\\').split("\\")[-1])
                self.app_root.update()

                model_name = os.path.basename(fp).split('.')[0]
                ext = os.path.splitext(fp)[-1].lower()

                jms_model = None
                if ext == ".jms":
                    with open(fp, "r") as f:
                        jms_model = read_jms(f.read(), '', model_name)
                elif ext == ".obj":
                    with open(fp, "r") as f:
                        jms_model = jms_model_from_obj(f.read(), model_name)
                elif ext == ".dae":
                    jms_model = jms_model_from_dae(fp, model_name)

                if not jms_model:
                    continue

                jms_models.append(jms_model)

                if optimize_level:
                    old_vert_ct = len(jms_model.verts)
                    print("        Optimizing...", end='')
                    jms_model.optimize_geometry(optimize_level == 1)
                    print(" Removed %s verts" %
                          (old_vert_ct - len(jms_model.verts)))

                print("        Calculating normals...")
                jms_model.calculate_vertex_normals()
            except Exception:
                print(format_exc())
                print("    Could not parse jms file.")
                self.app_root.update()

        if not jms_models:
            print("    No valid jms files found.")
            return

        first_crc = None
        for jms_model in jms_models:
            if first_crc is None:
                first_crc = jms_model.node_list_checksum
            elif first_crc != jms_model.node_list_checksum:
                print("    Warning, not all node list checksums match.")
                break

        # make sure the highest lod for each permutation is set as superhigh
        # this is necessary, as only superhigh jms markers are used
        jms_models_by_name = {}
        for jms_model in jms_models:
            lod_models = jms_models_by_name.setdefault(jms_model.perm_name,
                                                       [None] * 5)
            lod_index = {
                "high": 1,
                "medium": 2,
                "low": 3,
                "superlow": 4
            }.get(jms_model.lod_level, 0)
            lod_models[lod_index] = jms_model

        for lod_models in jms_models_by_name.values():
            for jms_model in lod_models:
                if jms_model is not None:
                    jms_model.lod_level = "superhigh"
                    break

        print("Merging jms data...")
        self.app_root.update()
        self.merged_jms = merged_jms = MergedJmsModel()
        errors_occurred = False
        for jms_model in jms_models:
            errors = merged_jms.merge_jms_model(jms_model)
            errors_occurred |= bool(errors)
            if errors:
                print("    Errors in '%s'" % jms_model.name)
                for error in errors:
                    print("        ", error, sep='')

            self.app_root.update()

        mod2_path = self.gbxmodel_path.get()
        tags_dir = self.tags_dir.get().replace('/', '\\')
        self.shader_names_menu.max_index = len(merged_jms.materials) - 1

        shaders_dir = ""
        if mod2_path:
            shaders_dir = os.path.join(os.path.dirname(mod2_path), "shaders",
                                       '')
        tags_dir = self.tags_dir.get()
        has_local_shaders = os.path.exists(shaders_dir) and os.path.exists(
            tags_dir)
        if errors_occurred:
            print("    Errors occurred while loading jms files.")
        elif os.path.isfile(mod2_path):
            try:
                self.mod2_tag = mod2_def.build(filepath=mod2_path)

                tagdata = self.mod2_tag.data.tagdata
                self.merged_jms.node_list_checksum = tagdata.node_list_checksum
                self.superhigh_lod_cutoff.set(str(
                    tagdata.superhigh_lod_cutoff))
                self.high_lod_cutoff.set(str(tagdata.high_lod_cutoff))
                self.medium_lod_cutoff.set(str(tagdata.medium_lod_cutoff))
                self.low_lod_cutoff.set(str(tagdata.low_lod_cutoff))
                self.superlow_lod_cutoff.set(str(tagdata.superlow_lod_cutoff))

                # get any shaders in the gbxmodel and set the shader_path
                # and shader_type for any matching materials in the jms
                shdr_refs = {}
                for shdr_ref in tagdata.shaders.STEPTREE:
                    shdr_name = shdr_ref.shader.filepath.split(
                        "\\")[-1].lower()
                    shdr_refs.setdefault(shdr_name, []).append(shdr_ref)

                for mat in merged_jms.materials:
                    shdr_ref = shdr_refs.get(mat.name, [""]).pop(0)
                    if shdr_ref:
                        mat.shader_type = shdr_ref.shader.tag_class.enum_name
                        mat.shader_path = shdr_ref.shader.filepath

                local_shaders = {}
                if has_local_shaders and is_in_dir(shaders_dir, tags_dir):
                    # fill in any missing shader paths with ones found nearby
                    for _, __, files in os.walk(shaders_dir):
                        for filename in files:
                            name, ext = os.path.splitext(filename)
                            ext = ext.lower()
                            if ext.startswith(".shader"):
                                local_shaders.setdefault(
                                    name.split("\\")[-1].lower(), []).append(
                                        os.path.join(shaders_dir, filename))
                        break

                    for mat in merged_jms.materials:
                        shader_path = local_shaders.get(mat.name, [""]).pop(0)
                        if "shader_" in mat.shader_type or not shader_path:
                            continue

                        # shader type isnt set. Try to detect its location and
                        # type if possible, or set it to a default value if not
                        shader_path = shader_path.lower().replace("/", "\\")
                        name, ext = os.path.splitext(shader_path)
                        mat.shader_path = os.path.relpath(name,
                                                          tags_dir).strip("\\")
                        mat.shader_type = ext.strip(".")

            except Exception:
                print(format_exc())
        else:
            self.superhigh_lod_cutoff.set("0.0")
            self.high_lod_cutoff.set("0.0")
            self.medium_lod_cutoff.set("0.0")
            self.low_lod_cutoff.set("0.0")
            self.superlow_lod_cutoff.set("0.0")

        for mat in merged_jms.materials:
            shader_path = mat.shader_path
            if mat.shader_type in ("shader", ""):
                assume_shaders_dir = not shaders_dir

                if not assume_shaders_dir:
                    try:
                        shader_path = os.path.relpath(
                            os.path.join(shaders_dir, shader_path), tags_dir)
                        shader_path = shader_path.strip("\\")
                    except ValueError:
                        assume_shaders_dir = True

                mat.shader_type = "shader_model"
            else:
                assume_shaders_dir = False

            if assume_shaders_dir or shader_path.startswith("..\\"):
                shader_path = "shaders\\" + os.path.basename(shader_path)

            mat.shader_path = shader_path.lstrip("..\\")

        if not self.mod2_tag:
            print(
                "    Existing gbxmodel tag not detected or could not be loaded.\n"
                "        A new gbxmodel tag will be created.")

        print("Finished loading models. Took %.6f seconds.\n" %
              (time.time() - start))
        self.select_shader(0)
Exemplo n.º 7
0
    def _compile_gbxmodel(self):
        if not self.merged_jms:
            return

        try:
            superhigh_lod_cutoff = self.superhigh_lod_cutoff.get().strip(" ")
            high_lod_cutoff = self.high_lod_cutoff.get().strip(" ")
            medium_lod_cutoff = self.medium_lod_cutoff.get().strip(" ")
            low_lod_cutoff = self.low_lod_cutoff.get().strip(" ")
            superlow_lod_cutoff = self.superlow_lod_cutoff.get().strip(" ")

            if not superhigh_lod_cutoff: superhigh_lod_cutoff = "0"
            if not high_lod_cutoff: high_lod_cutoff = "0"
            if not medium_lod_cutoff: medium_lod_cutoff = "0"
            if not low_lod_cutoff: low_lod_cutoff = "0"
            if not superlow_lod_cutoff: superlow_lod_cutoff = "0"

            superhigh_lod_cutoff = float(superhigh_lod_cutoff)
            high_lod_cutoff = float(high_lod_cutoff)
            medium_lod_cutoff = float(medium_lod_cutoff)
            low_lod_cutoff = float(low_lod_cutoff)
            superlow_lod_cutoff = float(superlow_lod_cutoff)
        except ValueError:
            print("LOD cutoffs are invalid.")
            return

        updating = self.mod2_tag is not None
        if updating:
            print("Updating existing gbxmodel tag.")
            mod2_tag = self.mod2_tag
        else:
            print("Creating new gbxmodel tag.")
            mod2_tag = mod2_def.build()

            while not self.gbxmodel_path.get():
                self.gbxmodel_path_browse(True)
                if not self.gbxmodel_path.get():
                    if messagebox.askyesno(
                            "Unsaved gbxmodel",
                            "Are you sure you wish to cancel saving?",
                            icon='warning',
                            parent=self):
                        print("    Gbxmodel compilation cancelled.")
                        return

            mod2_tag.filepath = self.gbxmodel_path.get()

        self.app_root.update()

        errors = compile_gbxmodel(mod2_tag, self.merged_jms)
        if errors:
            for error in errors:
                print(error)
            print("Gbxmodel compilation failed.")
            return

        tags_dir = self.tags_dir.get()
        if tags_dir:
            data_dir = os.path.join(os.path.dirname(os.path.dirname(tags_dir)),
                                    "data", "")
            for mat in self.merged_jms.materials:
                try:
                    generate_shader(mat, tags_dir, data_dir)
                except Exception:
                    print(format_exc())
                    print("Failed to generate shader tag.")

        tagdata = mod2_tag.data.tagdata
        tagdata.superhigh_lod_cutoff = superhigh_lod_cutoff
        tagdata.high_lod_cutoff = high_lod_cutoff
        tagdata.medium_lod_cutoff = medium_lod_cutoff
        tagdata.low_lod_cutoff = low_lod_cutoff
        tagdata.superlow_lod_cutoff = superlow_lod_cutoff

        try:
            mod2_tag.calc_internal_data()
            mod2_tag.serialize(temp=False,
                               backup=False,
                               calc_pointers=False,
                               int_test=False)
            print("    Finished")
        except Exception:
            print(format_exc())
            print("    Could not save compiled gbxmodel.")
Exemplo n.º 8
0
    parser.add_argument('-s', '--remove-duplicate-shaders', dest='remove_shader_dupes', action='store_const',
                        const=True, default=False,
                        help='Removes duplicate shaders in the model tag without breaking indices.')
    parser.add_argument('-a', '--remove-local-nodes', dest='remove_local_nodes', action='store_const',
                        const=True, default=False,
                        help='Rereferences all local nodes to use absolute nodes.')
    parser.add_argument('-p', '--condense-geometry-parts', dest='condense_geometry_parts', action='store_const',
                        const=True, default=False,
                        help='For each geometry combines all parts that use the same shader. (Automatically enables --remove-duplicate-shaders and --remove-local-nodes)')
    parser.add_argument('-v', '--remove-duplicate-vertices', dest='remove_duplicate_vertices', action='store_const',
                        const=True, default=False,
                        help='For each geometry part removes all duplicate vertices.')
    parser.add_argument('model_tag', metavar='model_tag', type=str,
                        help='The tag we want to operate on.', nargs='+')
    args = parser.parse_args()

    from shared.SharedFunctions import GetAbsFilepath
    model_tag_path = GetAbsFilepath(args.model_tag, mod2_ext)

    print("\nLoading model " + model_tag_path + "...", end='')
    sys.stdout.flush()
    model_tag = mod2_def.build(filepath=(model_tag_path + mod2_ext))
    print("done\n")

    ModelOptimize(model_tag, True, args.remove_shader_dupes, args.remove_local_nodes, args.condense_geometry_parts, args.remove_duplicate_vertices)

    print("Saving model tag...", end='')
    sys.stdout.flush()
    model_tag.serialize(backup=True, temp=False)
    print("finished\n")