Esempio n. 1
0
def serialize_frame_data(jma_anim, endian=">"):
    data = bytearray(jma_anim.frame_data_frame_size *
                     (jma_anim.frame_count - 1))

    rot_flags = jma_anim.rot_flags
    trans_flags = jma_anim.trans_flags
    scale_flags = jma_anim.scale_flags

    pack_1_float_into = PyStruct(endian + "f").pack_into
    pack_3_float_into = PyStruct(endian + "3f").pack_into
    pack_4_int16_into = PyStruct(endian + "4h").pack_into

    is_overlay = jma_anim.anim_type == "overlay"

    sqrt = math.sqrt

    i = 0
    for f in range(jma_anim.frame_count):
        if not is_overlay and f + 1 == jma_anim.frame_count:
            # skip the last frame for non-overlays
            break
        elif f == 0 and is_overlay:
            # skip the first frame for overlays
            continue

        # write to the data
        for n in range(jma_anim.node_count):
            node_state = jma_anim.frames[f][n]

            if rot_flags[n]:
                # components are ones-signed
                qi = node_state.rot_i
                qj = node_state.rot_j
                qk = node_state.rot_k
                qw = node_state.rot_w
                nmag = qi**2 + qj**2 + qk**2 + qw**2
                if nmag:
                    nmag = 32767.5 / sqrt(nmag)
                    qi = int(qi * nmag)
                    qj = int(qj * nmag)
                    qk = int(qk * nmag)
                    qw = int(qw * nmag)
                else:
                    qi = qj = qk = 0
                    qw = 32767

                pack_4_int16_into(data, i, qi, qj, qk, qw)
                i += 8

            if trans_flags[n]:
                pack_3_float_into(data, i, node_state.pos_x / 100,
                                  node_state.pos_y / 100,
                                  node_state.pos_z / 100)
                i += 12

            if scale_flags[n]:
                pack_1_float_into(data, i, node_state.scale)
                i += 4

    return data
def rawdata_ref_move_bitm(tag_id, index_array, map_data, magic, engine,
                          diffs_by_offsets):
    packer   = PyStruct("<L").pack
    unpacker = PyStruct("<H8xL").unpack
    tag_offset = index_array[tag_id].meta_offset
    for moff in iter_reflexive_offs(map_data, tag_offset + 96 - magic, 48):
        ptr_off = moff - magic

        map_data.seek(ptr_off + 14)
        flags, raw_ptr = unpacker(map_data.read(14))
        if flags & (1<<8):
            # data in resource map
            continue

        ptr_diff = 0
        for off, diff in diffs_by_offsets.items():
            if off <= raw_ptr:
                ptr_diff = diff

        if not ptr_diff:
            continue

        # fix bitmap pointers
        map_data.seek(ptr_off + 24)
        map_data.write(packer(raw_ptr + ptr_diff))
Esempio n. 3
0
    def compress_part_verts(self, geometry_index, part_index):
        part = self.data.tagdata.geometries.STEPTREE\
               [geometry_index].parts.STEPTREE[part_index]
        uncomp_verts_reflexive = part.uncompressed_vertices
        comp_verts_reflexive = part.compressed_vertices

        comp_norm = compress_normal32_normalize
        unpack = PyStruct(">11f2hf").unpack
        pack_into = PyStruct(">12s3I2h2bh").pack_into

        comp_verts = bytearray(b'\x00' * 32 * uncomp_verts_reflexive.size)
        uncomp_verts = uncomp_verts_reflexive.STEPTREE

        in_off = out_off = 0
        # compress each of the verts and write them to the buffer
        for i in range(uncomp_verts_reflexive.size):
            ni, nj, nk, bi, bj, bk, ti, tj, tk,\
                u, v, ni_0, ni_1, nw = unpack(uncomp_verts[in_off + 12:
                                                           in_off + 64])

            # write the compressed data
            pack_into(comp_verts, out_off, uncomp_verts[in_off:in_off + 12],
                      comp_norm(ni, nj, nk), comp_norm(bi, bj, bk),
                      comp_norm(ti, tj, tk), int(max(0, min(1, u)) * 32767.5),
                      int(max(0, min(1, v)) * 32767.5), ni_0 * 3, ni_1 * 3,
                      int(max(0, min(1, nw)) * 32767.5))
            in_off += 68
            out_off += 32

        comp_verts_reflexive.STEPTREE = comp_verts
Esempio n. 4
0
def serialize_frame_info(jma_anim, endian=">"):
    frame_ct = jma_anim.frame_count - 1
    data = bytearray(jma_anim.root_node_info_frame_size * frame_ct)

    pack_2_float_into = PyStruct(endian + "2f").pack_into
    pack_3_float_into = PyStruct(endian + "3f").pack_into
    pack_4_float_into = PyStruct(endian + "4f").pack_into
    frame_info_node_size = jma_anim.root_node_info_frame_size

    i = 0
    # write to the data
    if jma_anim.has_dz:
        for info in jma_anim.root_node_info[:frame_ct]:
            pack_4_float_into(data, i, info.dx / 100, info.dy / 100,
                              info.dz / 100, info.dyaw)
            i += frame_info_node_size

    elif jma_anim.has_dyaw:
        for info in jma_anim.root_node_info[:frame_ct]:
            pack_3_float_into(data, i, info.dx / 100, info.dy / 100, info.dyaw)
            i += frame_info_node_size

    elif jma_anim.has_dxdy:
        for info in jma_anim.root_node_info[:frame_ct]:
            pack_2_float_into(data, i, info.dx / 100, info.dy / 100)
            i += frame_info_node_size

    return data
Esempio n. 5
0
    def decompress_part_verts(self, geometry_index, part_index):
        part = self.data.tagdata.geometries.STEPTREE\
               [geometry_index].parts.STEPTREE[part_index]
        uncomp_verts_reflexive = part.uncompressed_vertices
        comp_verts_reflexive = part.compressed_vertices

        decomp_norm = decompress_normal32
        unpack = PyStruct(">3I2h2bh").unpack
        pack_into = PyStruct(">12s11f2h2f").pack_into

        uncomp_verts = bytearray(b'\x00' * 68 * comp_verts_reflexive.size)
        comp_verts = comp_verts_reflexive.STEPTREE

        in_off = out_off = 0
        # uncompress each of the verts and write them to the buffer
        for i in range(comp_verts_reflexive.size):
            n, b, t, u, v, ni_0, ni_1, nw = unpack(comp_verts[in_off +
                                                              12:in_off + 32])
            ni, nj, nk = decomp_norm(n)
            bi, bj, bk = decomp_norm(b)
            ti, tj, tk = decomp_norm(t)

            # write the uncompressed data
            pack_into(uncomp_verts, out_off, comp_verts[in_off:in_off + 12],
                      ni, nj, nk, bi, bj, bk, ti, tj, tk, u / 32767.5,
                      v / 32767.5, ni_0 // 3, ni_1 // 3, nw / 32767.5,
                      1.0 - nw / 32767.5)
            in_off += 32
            out_off += 68

        uncomp_verts_reflexive.STEPTREE = uncomp_verts
Esempio n. 6
0
def make_bsp_lightmap_jms_models(sbsp_body, base_nodes):
    jms_models = []

    lightmaps = sbsp_body.lightmaps.STEPTREE
    all_tris = sbsp_body.surfaces.STEPTREE

    shader_index_by_mat_name = {}
    shader_mats = []
    for i in range(len(lightmaps)):
        lm_index = lightmaps[i].bitmap_index
        if lm_index not in shader_index_by_mat_name and lm_index >= 0:
            shader_index_by_mat_name[lm_index] = len(shader_index_by_mat_name)
            shader_mats.append(JmsMaterial("lightmap_%s" % lm_index))

    uncomp_vert_xyz_unpacker = PyStruct("<3f").unpack_from
    uncomp_vert_ijkuv_unpacker = PyStruct("<5f").unpack_from

    for lightmap in lightmaps:
        verts = []
        tris = []
        mat_index = shader_index_by_mat_name.get(lightmap.bitmap_index, -1)
        if mat_index < 0:
            continue

        for material in lightmap.materials.STEPTREE:
            v_base = len(verts)
            tris.extend(
                JmsTriangle(0, mat_index, tri[0] + v_base, tri[2] +
                            v_base, tri[1] + v_base)
                for tri in all_tris[material.surfaces:material.surfaces +
                                    material.surface_count])

            vert_off = 0
            lm_vert_off = 56 * material.vertices_count
            vert_data = material.uncompressed_vertices.data
            for i in range(material.lightmap_vertices_count):
                x, y, z = uncomp_vert_xyz_unpacker(vert_data, vert_off)
                i, j, k, u, v = uncomp_vert_ijkuv_unpacker(
                    vert_data, lm_vert_off)
                vert_off += 56
                lm_vert_off += 20
                verts.append(
                    JmsVertex(0, x * 100, y * 100, z * 100, i, j, k, -1, 0, u,
                              1 - v))

        jms_models.append(
            JmsModel("bsp", 0, base_nodes, shader_mats, [],
                     ("lightmap_%s" % lightmap.bitmap_index, ), verts, tris))

    return jms_models
Esempio n. 7
0
def serialize_default_data(jma_anim, endian=">"):
    data = bytearray(jma_anim.default_data_size)

    rot_flags = jma_anim.rot_flags
    trans_flags = jma_anim.trans_flags
    scale_flags = jma_anim.scale_flags

    pack_1_float_into = PyStruct(endian + "f").pack_into
    pack_3_float_into = PyStruct(endian + "3f").pack_into
    pack_4_int16_into = PyStruct(endian + "4h").pack_into

    sqrt = math.sqrt

    i = 0
    # write to the data
    def_frames = jma_anim.frames[0]
    for n in range(jma_anim.node_count):
        node_state = def_frames[n]
        if not rot_flags[n]:
            # components are ones-signed
            qi = node_state.rot_i
            qj = node_state.rot_j
            qk = node_state.rot_k
            qw = node_state.rot_w
            nmag = qi**2 + qj**2 + qk**2 + qw**2
            if nmag:
                nmag = 32767.5 / sqrt(nmag)
                qi = int(qi * nmag)
                qj = int(qj * nmag)
                qk = int(qk * nmag)
                qw = int(qw * nmag)
            else:
                qi = qj = qk = 0
                qw = 32767

            pack_4_int16_into(data, i, qi, qj, qk, qw)
            i += 8

        if not trans_flags[n]:
            pack_3_float_into(data, i, node_state.pos_x / 100,
                              node_state.pos_y / 100, node_state.pos_z / 100)
            i += 12

        if not scale_flags[n]:
            pack_1_float_into(data, i, node_state.scale)
            i += 4

    return data
Esempio n. 8
0
def _fast_decode_mono_adpcm_samples(samples, endian="<"):
    adpcm2lin = audioop.adpcm2lin

    pcm_size   = PCM_BLOCKSIZE
    adpcm_size = ADPCM_BLOCKSIZE
    state_size = 4

    block_ct = len(samples) // adpcm_size
    out_data = bytearray(block_ct * pcm_size)

    pcm_i = 0
    unpacker = PyStruct(endian + "hh").unpack_from
    for i in range(0, len(samples), adpcm_size):
        # why couldn't it be nice and just follow the same
        # step packing pattern where the first step is the
        # first 4 bits and the second is the last 4 bits.
        steps = bytes(((b<<4) + (b>>4))&0xFF for b in
                      samples[i + state_size: i + adpcm_size])
        predictor = samples[i: i+2]
        if endian == ">":
            predictor = predictor[::-1]

        out_data[pcm_i: pcm_i + pcm_size] = (
            predictor + adpcm2lin(steps, 2, unpacker(samples, i))[0]
            )

        pcm_i += pcm_size

    return array("h", out_data)
Esempio n. 9
0
def make_bsp_renderable_jms_models(sbsp_body, base_nodes):
    jms_models = []

    lightmaps = sbsp_body.lightmaps.STEPTREE
    all_tris = sbsp_body.surfaces.STEPTREE

    shader_index_by_mat_name = {}
    mat_indices_by_mat_name = {}
    shader_mats = []
    for i in range(len(lightmaps)):
        materials = lightmaps[i].materials.STEPTREE
        for j in range(len(materials)):
            material = materials[j]
            mat_name = PureWindowsPath(material.shader.filepath).name.lower()
            mat_name += "!$" if material.flags.fog_plane else "!"

            if mat_name not in mat_indices_by_mat_name:
                shader_index_by_mat_name[mat_name] = len(shader_mats)
                shader_mats.append(JmsMaterial(mat_name))
                mat_indices_by_mat_name[mat_name] = []
                shader_mats[-1].shader_path = (shader_mats[-1].shader_path +
                                               shader_mats[-1].properties)
                shader_mats[-1].properties = ""

            mat_indices_by_mat_name[mat_name].append((i, j))

    uncomp_vert_unpacker = PyStruct("<14f").unpack_from
    for mat_name in sorted(mat_indices_by_mat_name):
        verts = []
        tris = []
        for i, j in mat_indices_by_mat_name[mat_name]:
            material = lightmaps[i].materials.STEPTREE[j]

            mat_index = shader_index_by_mat_name.get(mat_name)
            if mat_index is None:
                continue

            vert_data = material.uncompressed_vertices.data
            v_base = len(verts)

            tris.extend(
                JmsTriangle(0, mat_index, tri[0] + v_base, tri[2] +
                            v_base, tri[1] + v_base)
                for tri in all_tris[material.surfaces:material.surfaces +
                                    material.surface_count])

            for i in range(0, material.vertices_count * 56, 56):
                x, y, z, ni, nj, nk, bi, bj, bk, ti, tj, tk, u, v =\
                   uncomp_vert_unpacker(vert_data, i)
                verts.append(
                    JmsVertex(0, x * 100, y * 100, z * 100, ni, nj, nk, -1, 0,
                              u, 1 - v, 0, bi, bj, bk, ti, tj, tk))

        jms_models.append(
            JmsModel("bsp", 0, base_nodes, shader_mats, [], ("renderable", ),
                     verts, tris))

    return jms_models
Esempio n. 10
0
def extract_model(tagdata, tag_path="", **kw):
    do_write_jms = kw.get('write_jms', True)
    if do_write_jms:
        jms_models = None
        filepath_base = os.path.join(kw['out_dir'], os.path.dirname(tag_path),
                                     "models")
    else:
        jms_models = []
        filepath_base = ""

    global_markers = {}
    materials = []
    regions = []
    nodes = []

    for b in tagdata.markers.STEPTREE:
        marker_name = b.name

        for inst in b.marker_instances.STEPTREE:
            try:
                region = tagdata.regions.STEPTREE[inst.region_index]
            except Exception:
                print("Invalid region index in marker '%s'" % marker_name)
                continue

            try:
                perm = region.permutations.STEPTREE[inst.permutation_index]
                perm_name = perm.name
                if (perm.flags.cannot_be_chosen_randomly
                        and not perm_name.startswith("~")):
                    perm_name += "~"
            except Exception:
                print("Invalid permutation index in marker '%s'" % marker_name)
                continue

            perm_markers = global_markers.setdefault(perm_name, [])

            trans = inst.translation
            rot = inst.rotation
            perm_markers.append(
                JmsMarker(marker_name, perm_name, inst.region_index,
                          inst.node_index, rot.i, rot.j, rot.k, rot.w,
                          trans.x * 100, trans.y * 100, trans.z * 100, 1.0))

    for b in tagdata.nodes.STEPTREE:
        trans = b.translation
        rot = b.rotation
        nodes.append(
            JmsNode(b.name, b.first_child_node, b.next_sibling_node, rot.i,
                    rot.j, rot.k, rot.w, trans.x * 100, trans.y * 100,
                    trans.z * 100, b.parent_node))

    for b in tagdata.shaders.STEPTREE:
        materials.append(
            JmsMaterial(b.shader.filepath.split("/")[-1].split("\\")[-1]))

    markers_by_perm = {}
    geoms_by_perm_lod_region = {}

    u_scale = tagdata.base_map_u_scale
    v_scale = tagdata.base_map_v_scale

    for region in tagdata.regions.STEPTREE:
        region_index = len(regions)
        regions.append(region.name)
        for perm in region.permutations.STEPTREE:
            perm_name = perm.name
            if (perm.flags.cannot_be_chosen_randomly
                    and not perm_name.startswith("~")):
                perm_name += "~"

            geoms_by_lod_region = geoms_by_perm_lod_region.setdefault(
                perm_name, {})

            perm_markers = markers_by_perm.setdefault(perm_name, [])
            if hasattr(perm, "local_markers"):
                for m in perm.local_markers.STEPTREE:
                    trans = m.translation
                    rot = m.rotation
                    perm_markers.append(
                        JmsMarker(m.name, perm_name, region_index,
                                  m.node_index, rot.i, rot.j, rot.k, rot.w,
                                  trans.x * 100, trans.y * 100, trans.z * 100,
                                  1.0))

            last_geom_index = -1
            for lod in range(5):
                geoms_by_region = geoms_by_lod_region.get(lod, {})
                region_geoms = geoms_by_region.get(region.name, [])

                geom_index = perm[perm.NAME_MAP["superlow_geometry_block"] +
                                  (4 - lod)]

                if (geom_index in region_geoms
                        or geom_index == last_geom_index):
                    continue

                geoms_by_lod_region[lod] = geoms_by_region
                geoms_by_region[region.name] = region_geoms
                region_geoms.append(geom_index)
                last_geom_index = geom_index

    try:
        use_local_nodes = tagdata.flags.parts_have_local_nodes
    except Exception:
        use_local_nodes = False
    def_node_map = list(range(128))
    def_node_map.append(-1)

    # use big endian since it will have been byteswapped
    comp_vert_unpacker = PyStruct(">3f3I2h2bh").unpack_from
    uncomp_vert_unpacker = PyStruct(">14f2h2f").unpack_from

    for perm_name in sorted(geoms_by_perm_lod_region):
        geoms_by_lod_region = geoms_by_perm_lod_region[perm_name]
        perm_markers = markers_by_perm.get(perm_name)

        for lod in sorted(geoms_by_lod_region):
            if lod == -1:
                continue

            jms_name = perm_name + {
                4: " superlow",
                3: " low",
                2: " medium",
                1: " high",
                0: " superhigh"
            }.get(lod, "")

            filepath = os.path.join(filepath_base, jms_name + ".jms")

            markers = list(perm_markers)
            markers.extend(global_markers.get(perm_name, ()))
            verts = []
            tris = []

            geoms_by_region = geoms_by_lod_region[lod]
            for region_name in sorted(geoms_by_region):
                region_index = regions.index(region_name)
                geoms = geoms_by_region[region_name]

                for geom_index in geoms:
                    try:
                        geom_block = tagdata.geometries.STEPTREE[geom_index]
                    except Exception:
                        print("Invalid geometry index '%s'" % geom_index)
                        continue

                    for part in geom_block.parts.STEPTREE:
                        v_origin = len(verts)
                        shader_index = part.shader_index

                        try:
                            node_map = list(part.local_nodes)
                            node_map.append(-1)
                            compressed = False
                        except (AttributeError, KeyError):
                            compressed = True

                        if not use_local_nodes:
                            node_map = def_node_map

                        try:
                            unparsed = isinstance(part.triangles.STEPTREE.data,
                                                  bytearray)
                        except Exception:
                            unparsed = False

                        # TODO: Make this work in meta(parse verts and tris)
                        try:
                            if compressed and unparsed:
                                vert_data = part.compressed_vertices.STEPTREE.data
                                for off in range(0, len(vert_data), 32):
                                    v = comp_vert_unpacker(vert_data, off)
                                    n = v[3]
                                    ni = (n & 1023) / 1023
                                    nj = ((n >> 11) & 1023) / 1023
                                    nk = ((n >> 22) & 511) / 511
                                    if (n >> 10) & 1: ni = ni - 1.0
                                    if (n >> 21) & 1: nj = nj - 1.0
                                    if (n >> 31) & 1: nk = nk - 1.0

                                    verts.append(
                                        JmsVertex(v[8] // 3, v[0] * 100,
                                                  v[1] * 100, v[2] * 100, ni,
                                                  nj, nk, v[9] // 3,
                                                  1.0 - (v[10] / 32767),
                                                  u_scale * v[6] / 32767, 1.0 -
                                                  v_scale * v[7] / 32767))
                            elif compressed:
                                for v in part.compressed_vertices.STEPTREE:
                                    n = v[3]
                                    ni = (n & 1023) / 1023
                                    nj = ((n >> 11) & 1023) / 1023
                                    nk = ((n >> 22) & 511) / 511
                                    if (n >> 10) & 1: ni = ni - 1.0
                                    if (n >> 21) & 1: nj = nj - 1.0
                                    if (n >> 31) & 1: nk = nk - 1.0

                                    verts.append(
                                        JmsVertex(v[8] // 3, v[0] * 100,
                                                  v[1] * 100, v[2] * 100, ni,
                                                  nj, nk, v[9] // 3,
                                                  1.0 - (v[10] / 32767),
                                                  u_scale * v[6] / 32767, 1.0 -
                                                  v_scale * v[7] / 32767))
                            elif not compressed and unparsed:
                                vert_data = part.uncompressed_vertices.STEPTREE.data
                                for off in range(0, len(vert_data), 68):
                                    v = uncomp_vert_unpacker(vert_data, off)
                                    verts.append(
                                        JmsVertex(node_map[v[14]], v[0] * 100,
                                                  v[1] * 100, v[2] * 100, v[3],
                                                  v[4], v[5], node_map[v[15]],
                                                  max(0, min(1, v[17])),
                                                  u_scale * v[12],
                                                  1.0 - v_scale * v[13]))
                            else:
                                for v in part.uncompressed_vertices.STEPTREE:
                                    verts.append(
                                        JmsVertex(node_map[v[14]], v[0] * 100,
                                                  v[1] * 100, v[2] * 100, v[3],
                                                  v[4], v[5], node_map[v[15]],
                                                  max(0, min(1, v[17])),
                                                  u_scale * v[12],
                                                  1.0 - v_scale * v[13]))
                        except Exception:
                            print(format_exc())
                            print("If you see this, tell Moses to stop "
                                  "f*****g with the vertex definition.")

                        try:
                            if unparsed:
                                tri_block = part.triangles.STEPTREE.data
                                tri_list = [-1] * (len(tri_block) // 2)
                                for i in range(len(tri_list)):
                                    # assuming big endian
                                    tri_list[i] = (tri_block[i * 2 + 1] +
                                                   (tri_block[i * 2] << 8))
                                    if tri_list[i] > 32767:
                                        tri_list[i] = -1
                            else:
                                tri_block = part.triangles.STEPTREE
                                tri_list = []
                                for triangle in tri_block:
                                    tri_list.extend(triangle)

                            swap = True
                            for i in range(len(tri_list) - 2):
                                v0 = tri_list[i]
                                v1 = tri_list[i + 1 + swap]
                                v2 = tri_list[i + 2 - swap]
                                if v0 != -1 and v1 != -1 and v2 != -1:
                                    # remove degens
                                    if v0 != v1 and v0 != v2 and v1 != v2:
                                        tris.append(
                                            JmsTriangle(
                                                region_index, shader_index,
                                                v0 + v_origin, v1 + v_origin,
                                                v2 + v_origin))
                                swap = not swap
                        except Exception:
                            print(format_exc())
                            print("Could not parse triangle blocks.")

            jms_model = JmsModel(jms_name, tagdata.node_list_checksum, nodes,
                                 materials, markers, regions, verts, tris)
            if do_write_jms:
                write_jms(filepath, jms_model)
            else:
                jms_models.append(jms_model)

    return jms_models
Esempio n. 11
0
def _deserialize_frame_data(anim, get_default_data, def_node_states, endian):
    unpack_trans = PyStruct(endian + "3f").unpack_from
    unpack_ijkw = PyStruct(endian + "4h").unpack_from
    unpack_float = PyStruct(endian + "f").unpack_from
    sqrt = math.sqrt

    rot_flags, trans_flags, scale_flags = get_anim_flags(anim)

    if get_default_data:
        store = False
        stored_frame_count = 1
        data = anim.default_data.data
    else:
        store = True
        stored_frame_count = anim.frame_count
        data = anim.frame_data.data

    all_node_states = [[JmaNodeState() for n in range(anim.node_count)]
                       for f in range(stored_frame_count)]

    if get_default_data:
        def_node_states = all_node_states[0]

    assert len(def_node_states) == anim.node_count

    i = 0
    for f in range(stored_frame_count):
        node_states = all_node_states[f]

        for n in range(anim.node_count):
            def_node_state = def_node_states[n]
            state = node_states[n]

            qi = qj = qk = x = y = z = 0.0
            qw = scale = 1.0
            if rot_flags[n] == store:
                qi, qj, qk, qw = unpack_ijkw(data, i)
                i += 8

                rot_len = qi**2 + qj**2 + qk**2 + qw**2
                if rot_len:
                    rot_len = 1 / sqrt(rot_len)
                    qi *= rot_len
                    qj *= rot_len
                    qk *= rot_len
                    qw *= rot_len
                else:
                    qi = qj = qk = 0.0
                    qw = 1.0
            else:
                qi = def_node_state.rot_i
                qj = def_node_state.rot_j
                qk = def_node_state.rot_k
                qw = def_node_state.rot_w

            if trans_flags[n] == store:
                x, y, z = unpack_trans(data, i)
                i += 12

                x *= 100
                y *= 100
                z *= 100
            else:
                x = def_node_state.pos_x
                y = def_node_state.pos_y
                z = def_node_state.pos_z

            if scale_flags[n] == store:
                scale = unpack_float(data, i)[0]
                i += 4
            else:
                scale = def_node_state.scale

            state.pos_x = x
            state.pos_y = y
            state.pos_z = z
            state.rot_i = qi
            state.rot_j = qj
            state.rot_k = qk
            state.rot_w = qw
            state.scale = scale

    return all_node_states
Esempio n. 12
0
def end_swap_float(v,
                   packer=PyStruct(">f").pack,
                   unpacker=PyStruct("<f").unpack):
    return unpacker(packer(v))[0]
Esempio n. 13
0
    def byteswap_anniversary_fields(self, meta, tag_cls):

        if tag_cls == "antr":
            unpack_header = PyStruct("<11i").unpack
            for b in meta.animations.STEPTREE:
                b.unknown_sint16 = end_swap_int16(b.unknown_sint16)
                b.unknown_float = end_swap_float(b.unknown_float)
                if not b.flags.compressed_data:
                    continue

                comp_data = b.frame_data.data[b.offset_to_compressed_data:]
                # byteswap compressed frame data header
                for i in range(0, 44, 4):
                    data = comp_data[i:i + 4]
                    for j in range(4):
                        comp_data[i + 3 - j] = data[j]

                header = list(unpack_header(comp_data[:44]))
                header.insert(0, 44)
                header.append(len(comp_data))

                item_sizes = (4, 2, 2, 2, 4, 2, 4, 4, 4, 2, 4, 4)
                comp_data_off_len_size = []
                for i in range(len(header) - 1):
                    comp_data_off_len_size.append(
                        [header[i], header[i + 1] - header[i], item_sizes[i]])

                for off, length, size in comp_data_off_len_size:
                    for i in range(off, off + length, size):
                        data = comp_data[i:i + size]
                        for j in range(size):
                            comp_data[i + size - 1 - j] = data[j]

                # replace the frame_data with the compressed data and some
                # blank default / frame data so tool doesnt shit the bed.
                default_data_size = b.node_count * (12 + 8 + 4) - b.frame_size
                b.default_data.data += bytearray(
                    max(0,
                        len(b.default_data.data) - default_data_size))

                b.offset_to_compressed_data = b.frame_count * b.frame_size
                b.frame_data.data = bytearray(
                    b.frame_count * b.frame_size) + comp_data

        elif tag_cls == "bitm":
            for b in meta.bitmaps.STEPTREE:
                b.pixels = end_swap_uint16(b.pixels)

        elif tag_cls == "coll":
            for b in meta.nodes.STEPTREE:
                b.unknown0 = end_swap_int16(b.unknown0)
                b.unknown1 = end_swap_int16(b.unknown1)

        elif tag_cls == "effe":
            for event in meta.events.STEPTREE:
                for b in event.particles.STEPTREE:
                    b.unknown0 = end_swap_int16(b.unknown0)
                    b.unknown1 = end_swap_int16(b.unknown1)

        elif tag_cls == "hmt ":
            block_bytes = bytearray(meta.string.serialize())
            for i in range(20, len(block_bytes), 2):
                byte = block_bytes[i + 1]
                block_bytes[i + 1] = block_bytes[i]
                block_bytes[i] = byte

            meta.string.parse(rawdata=block_bytes)

        elif tag_cls == "lens":
            meta.unknown0 = end_swap_float(meta.unknown0)
            meta.unknown1 = end_swap_float(meta.unknown1)

        elif tag_cls == "lsnd":
            meta.unknown0 = end_swap_float(meta.unknown0)
            meta.unknown1 = end_swap_float(meta.unknown1)
            meta.unknown2 = end_swap_float(meta.unknown2)
            meta.unknown3 = end_swap_float(meta.unknown3)
            meta.unknown4 = end_swap_int16(meta.unknown4)
            meta.unknown5 = end_swap_int16(meta.unknown5)
            meta.unknown6 = end_swap_float(meta.unknown6)

        elif tag_cls == "metr":
            meta.screen_x_pos = end_swap_uint16(meta.screen_x_pos)
            meta.screen_y_pos = end_swap_uint16(meta.screen_y_pos)
            meta.width = end_swap_uint16(meta.width)
            meta.height = end_swap_uint16(meta.height)

        elif tag_cls in ("mod2", "mode"):
            for node in meta.nodes.STEPTREE:
                node.unknown = end_swap_float(node.unknown)
                for b in (node.rot_jj_kk, node.rot_kk_ii, node.rot_ii_jj,
                          node.translation_to_root):
                    for i in range(len(b)):
                        b[i] = end_swap_float(b[i])

        elif tag_cls == "part":
            meta.rendering.unknown0 = end_swap_int32(meta.rendering.unknown0)
            meta.rendering.unknown1 = end_swap_float(meta.rendering.unknown1)
            meta.rendering.unknown2 = end_swap_uint32(meta.rendering.unknown2)

        elif tag_cls == "pphy":
            meta.scaled_density = end_swap_float(meta.scaled_density)
            meta.water_gravity_scale = end_swap_float(meta.water_gravity_scale)
            meta.air_gravity_scale = end_swap_float(meta.air_gravity_scale)

        elif tag_cls == "sbsp":
            # TODO: Might need to byteswap cluster data and sound_pas data

            for coll_mat in meta.collision_materials.STEPTREE:
                coll_mat.unknown = end_swap_uint32(coll_mat.unknown)

            node_data = meta.nodes.STEPTREE
            for i in range(0, len(node_data), 2):
                b0 = node_data[i]
                node_data[i] = node_data[i + 1]
                node_data[i + 1] = b0

            leaf_data = meta.leaves.STEPTREE
            for i in range(0, len(leaf_data), 16):
                b0 = leaf_data[i]
                leaf_data[i] = leaf_data[i + 1]
                leaf_data[i + 1] = b0

                b0 = leaf_data[i + 2]
                leaf_data[i + 2] = leaf_data[i + 3]
                leaf_data[i + 3] = b0

                b0 = leaf_data[i + 4]
                leaf_data[i + 4] = leaf_data[i + 5]
                leaf_data[i + 5] = b0

                b0 = leaf_data[i + 6]
                leaf_data[i + 6] = leaf_data[i + 7]
                leaf_data[i + 7] = b0

            for lightmap in meta.lightmaps.STEPTREE:
                for b in lightmap.materials.STEPTREE:
                    vt_ct = b.vertices_count
                    l_vt_ct = b.lightmap_vertices_count

                    u_verts = b.uncompressed_vertices.STEPTREE
                    c_verts = b.compressed_vertices.STEPTREE

                    b.unknown_meta_offset0 = end_swap_uint32(
                        b.unknown_meta_offset0)
                    b.vertices_meta_offset = end_swap_uint32(
                        b.vertices_meta_offset)

                    b.vertex_type.data = end_swap_uint16(b.vertex_type.data)

                    b.unknown_meta_offset1 = end_swap_uint32(
                        b.unknown_meta_offset1)
                    b.lightmap_vertices_meta_offset = end_swap_uint32(
                        b.lightmap_vertices_meta_offset)

                    # byteswap (un)compressed verts and lightmap verts
                    for data in (u_verts, c_verts):
                        for i in range(0, len(data), 4):
                            b0 = data[i]
                            b1 = data[i + 1]
                            data[i] = data[i + 3]
                            data[i + 1] = data[i + 2]
                            data[i + 2] = b1
                            data[i + 3] = b0

                    # since the compressed lightmap u and v coordinates are
                    # 2 byte fields rather than 4, the above byteswapping
                    # will have swapped u and v. we need to swap them back.
                    # multiply vt_ct by 32 to skip non-lightmap verts, and
                    # add 4 to skip the 4 byte compressed lightmap normal.
                    for i in range(vt_ct * 32 + 4, len(c_verts), 8):
                        c_verts[i:i + 1] = c_verts[i + 1], c_verts[i]

            for fog_plane in meta.fog_planes.STEPTREE:
                fog_plane.material_type.data = end_swap_int16(
                    fog_plane.material_type.data)

        elif tag_cls == "scnr":
            for b in meta.object_names.STEPTREE:
                b.object_type.data = end_swap_uint16(b.object_type.data)
                b.reflexive_index = end_swap_int16(b.reflexive_index)

            for b in meta.trigger_volumes.STEPTREE:
                b.unknown = end_swap_uint16(b.unknown)

            for b in meta.encounters.STEPTREE:
                b.unknown = end_swap_int16(b.unknown)

            # PROLLY GONNA HAVE TO BYTESWAP RECORDED ANIMS AND MORE SHIT
            syntax_data = meta.script_syntax_data.data
            with FieldType.force_big:
                syntax_header = h1_script_syntax_data_def.build(
                    rawdata=syntax_data)

            i = 56
            for node_i in range(syntax_header.last_node):
                n_typ = syntax_data[i + 5] + (syntax_data[i + 4] << 8)
                flags = syntax_data[i + 7] + (syntax_data[i + 6] << 8)
                if flags & 7 == 1:
                    # node is a primitive
                    if n_typ == 5:
                        # node is a boolean
                        syntax_data[i + 19] = syntax_data[i + 16]
                        syntax_data[i + 16:i + 19] = (0, 0, 0)  # null these 3
                    elif n_typ == 7:
                        # node is a sint16
                        syntax_data[i + 18] = syntax_data[i + 16]
                        syntax_data[i + 19] = syntax_data[i + 17]
                        syntax_data[i + 16:i + 18] = (0, 0)  # null these 2

                i += 20

        elif tag_cls == "senv":
            meta.senv_attrs.bump_properties.map_scale_x = end_swap_float(
                meta.senv_attrs.bump_properties.map_scale_x)
            meta.senv_attrs.bump_properties.map_scale_y = end_swap_float(
                meta.senv_attrs.bump_properties.map_scale_y)

        elif tag_cls == "snd!":
            for pr in meta.pitch_ranges.STEPTREE:
                for b in pr.permutations.STEPTREE:
                    b.ogg_sample_count = end_swap_uint32(b.ogg_sample_count)

        elif tag_cls == "spla":
            meta.spla_attrs.primary_noise_map.unknown0 = end_swap_uint16(
                meta.spla_attrs.primary_noise_map.unknown0)
            meta.spla_attrs.primary_noise_map.unknown1 = end_swap_uint16(
                meta.spla_attrs.primary_noise_map.unknown1)

            meta.spla_attrs.secondary_noise_map.unknown0 = end_swap_uint16(
                meta.spla_attrs.secondary_noise_map.unknown0)
            meta.spla_attrs.secondary_noise_map.unknown1 = end_swap_uint16(
                meta.spla_attrs.secondary_noise_map.unknown1)

        elif tag_cls == "ustr":
            for b in meta.strings.STEPTREE:
                block_bytes = bytearray(b.serialize())
                for i in range(12, len(block_bytes), 2):
                    byte = block_bytes[i + 1]
                    block_bytes[i + 1] = block_bytes[i]
                    block_bytes[i] = byte

                b.parse(rawdata=block_bytes)

        if tag_cls in ("bipd", "vehi", "weap", "eqip", "garb", "proj", "scen",
                       "mach", "ctrl", "lifi", "plac", "ssce", "obje"):
            meta.obje_attrs.object_type.data = end_swap_int16(
                meta.obje_attrs.object_type.data)
        elif tag_cls in ("senv", "soso", "sotr", "schi", "scex", "swat",
                         "sgla", "smet", "spla", "shdr"):
            meta.shdr_attrs.shader_type.data = end_swap_int16(
                meta.shdr_attrs.shader_type.data)
Esempio n. 14
0
def compile_gbxmodel(mod2_tag, merged_jms, ignore_errors=False):
    tagdata = mod2_tag.data.tagdata

    tagdata.flags.parts_have_local_nodes = False

    u_scale, v_scale = merged_jms.calc_uv_scales()
    if u_scale < 1:
        u_scale = 1
    if v_scale < 1:
        v_scale = 1
    tagdata.base_map_u_scale = merged_jms.u_scale = u_scale
    tagdata.base_map_v_scale = merged_jms.v_scale = v_scale

    tagdata.node_list_checksum = merged_jms.node_list_checksum

    errors = []
    if len(merged_jms.materials) > 256:
        errors.append("Too many materials. Max count is 256.")

    if len(merged_jms.regions) > 32:
        errors.append("Too many regions. Max count is 32.")

    if errors and not ignore_errors:
        return errors

    # make nodes
    mod2_nodes = tagdata.nodes.STEPTREE
    del mod2_nodes[:]
    for node in merged_jms.nodes:
        mod2_nodes.append()
        mod2_node = mod2_nodes[-1]

        mod2_node.name = node.name[:31]
        mod2_node.next_sibling_node = node.sibling_index
        mod2_node.first_child_node = node.first_child
        mod2_node.parent_node = node.parent_index
        mod2_node.translation[:] = node.pos_x / 100,\
                                   node.pos_y / 100,\
                                   node.pos_z / 100
        mod2_node.rotation[:] = node.rot_i, node.rot_j,\
                                node.rot_k, node.rot_w

        if node.parent_index >= 0:
            mod2_node.distance_from_parent = sqrt(node.pos_x**2 +
                                                  node.pos_y**2 +
                                                  node.pos_z**2) / 100

    # record shader ordering and permutation indices
    mod2_shaders = tagdata.shaders.STEPTREE
    shdr_perm_indices_by_name = {}
    for mod2_shader in mod2_shaders:
        shdr_name = mod2_shader.shader.filepath.split("\\")[-1]
        shdr_perm_indices = shdr_perm_indices_by_name.setdefault(shdr_name, [])
        shdr_perm_indices.append(mod2_shader.permutation_index)

    del mod2_shaders[:]
    # make shader references
    for mat in merged_jms.materials:
        mod2_shaders.append()
        mod2_shader = mod2_shaders[-1]
        mod2_shader.shader.filepath = mat.shader_path
        if mat.shader_type:
            mod2_shader.shader.tag_class.set_to(mat.shader_type)
        else:
            mod2_shader.shader.tag_class.set_to("shader")

        shdr_name = mod2_shader.shader.filepath.split("\\")[-1].lower()
        shdr_perm_indices = shdr_perm_indices_by_name.get(shdr_name)
        if shdr_perm_indices:
            mod2_shader.permutation_index = shdr_perm_indices.pop(0)

    # make regions
    mod2_regions = tagdata.regions.STEPTREE
    del mod2_regions[:]

    global_markers = {}
    geom_meshes = []
    all_lod_nodes = {lod: set([0]) for lod in util.LOD_NAMES}
    for region_name in sorted(merged_jms.regions):
        region = merged_jms.regions[region_name]

        mod2_regions.append()
        mod2_region = mod2_regions[-1]
        mod2_region.name = region_name[:31]

        mod2_perms = mod2_region.permutations.STEPTREE
        for perm_name in sorted(region.perm_meshes):
            perm = region.perm_meshes[perm_name]

            mod2_perms.append()
            mod2_perm = mod2_perms[-1]
            mod2_perm.name = perm_name[:31]

            mod2_perm.flags.cannot_be_chosen_randomly = not perm.is_random_perm

            perm_added = False
            skipped_lods = []
            for i in range(len(util.LOD_NAMES)):
                lod_name = util.LOD_NAMES[i]
                if not perm.lod_meshes.get(lod_name):
                    if skipped_lods is not None:
                        skipped_lods.append(i)
                    continue

                geom_index = len(geom_meshes)
                lod_mesh = perm.lod_meshes[lod_name]
                geom_meshes.append(lod_mesh)

                # figure out which nodes this mesh utilizes
                this_meshes_nodes = set()
                for mesh in lod_mesh.values():
                    for vert in mesh.verts:
                        if vert.node_1_weight < 1:
                            this_meshes_nodes.add(vert.node_0)
                        if vert.node_1_weight > 0:
                            this_meshes_nodes.add(vert.node_1)

                all_lod_nodes[lod_name].update(this_meshes_nodes)

                lods_to_set = list(range(i, 5))
                if skipped_lods:
                    lods_to_set.extend(skipped_lods)
                    skipped_lods = None

                for i in lods_to_set:
                    setattr(mod2_perm, "%s_geometry_block" % util.LOD_NAMES[i],
                            geom_index)

                perm_added = True

            if len(perm.markers) > 32:
                for marker in perm.markers:
                    global_markers.setdefault(marker.name[:31],
                                              []).append(marker)
            else:
                perm_added |= bool(perm.markers)
                mod2_markers = mod2_perm.local_markers.STEPTREE
                for marker in perm.markers:
                    mod2_markers.append()
                    mod2_marker = mod2_markers[-1]

                    mod2_marker.name = marker.name[:31]
                    mod2_marker.node_index = marker.parent
                    mod2_marker.translation[:] = marker.pos_x / 100,\
                                                 marker.pos_y / 100,\
                                                 marker.pos_z / 100
                    mod2_marker.rotation[:] = marker.rot_i, marker.rot_j,\
                                              marker.rot_k, marker.rot_w

            if not (perm_added or ignore_errors):
                del mod2_perms[-1]
                continue

    if len(geom_meshes) > 256 and not ignore_errors:
        return ("Cannot add more than 256 geometries to a model. "
                "Each material in each region in each permutation "
                "in each LOD is counted as a single geometry.\n"
                "This model would contain %s geometries." % len(geom_meshes), )

    # make the markers
    mod2_marker_headers = tagdata.markers.STEPTREE
    del mod2_marker_headers[:]
    for marker_name in sorted(global_markers):
        marker_list = global_markers[marker_name]
        mod2_marker_headers.append()
        mod2_marker_header = mod2_marker_headers[-1]

        mod2_marker_header.name = marker_name[:31]
        mod2_marker_list = mod2_marker_header.marker_instances.STEPTREE

        for marker in marker_list:
            mod2_marker_list.append()
            mod2_marker = mod2_marker_list[-1]

            # figure out which permutation index this marker
            # matches for all the permutations in its region
            i = perm_index = 0
            for perm in mod2_regions[marker.region].permutations.STEPTREE:
                if perm.name == marker.permutation:
                    perm_index = i
                    break
                i += 1

            mod2_marker.region_index = marker.region
            mod2_marker.permutation_index = perm_index
            mod2_marker.node_index = marker.parent
            mod2_marker.translation[:] = marker.pos_x / 100,\
                                         marker.pos_y / 100,\
                                         marker.pos_z / 100
            mod2_marker.rotation[:] = marker.rot_i, marker.rot_j,\
                                      marker.rot_k, marker.rot_w

    # set the node counts per lod
    for lod in util.LOD_NAMES:
        lod_nodes = all_lod_nodes[lod]
        adding = True
        node_ct = len(mod2_nodes)

        for i in range(node_ct - 1, -1, -1):
            if i in lod_nodes:
                break
            node_ct -= 1

        setattr(tagdata, "%s_lod_nodes" % lod, max(0, node_ct - 1))

    # calculate triangle strips
    stripped_geom_meshes = []
    for geom_idx in range(len(geom_meshes)):
        material_meshes = {}
        stripped_geom_meshes.append(material_meshes)
        for mat_idx in sorted(geom_meshes[geom_idx]):
            material_meshes[mat_idx] = mesh_list = []
            geom_mesh = geom_meshes[geom_idx][mat_idx]
            all_verts = geom_mesh.verts

            stripifier = Stripifier()
            stripifier.load_mesh(geom_mesh.tris, True)
            stripifier.make_strips()
            stripifier.link_strips()

            if stripifier.get_strip_count() == 1:
                tri_strip = stripifier.translate_strip(stripifier.get_strip())
            else:
                all_verts = util.EMPTY_GEOM_VERTS
                tri_strip = (0, 1, 2)

            if len(tri_strip) > util.MAX_STRIP_LEN:
                return (("Too many triangles ya f**k. Max triangles per "
                         "geometry is %s.\nThis geometry is %s after linking "
                         "all strips.") %
                        (util.MAX_STRIP_LEN, len(tri_strip)), )

            mesh_list.append(GeometryMesh(all_verts, tri_strip))

    # make the geometries
    mod2_geoms = tagdata.geometries.STEPTREE
    del mod2_geoms[:]
    vert_packer = PyStruct(">14f2h2f").pack_into
    for geom_idx in range(len(stripped_geom_meshes)):
        mod2_geoms.append()
        mod2_parts = mod2_geoms[-1].parts.STEPTREE

        for mat_idx in sorted(stripped_geom_meshes[geom_idx]):
            geom_mesh_list = stripped_geom_meshes[geom_idx][mat_idx]
            for geom_mesh in geom_mesh_list:
                mod2_parts.append()
                mod2_part = mod2_parts[-1]
                mod2_verts = mod2_part.uncompressed_vertices.STEPTREE

                tris = geom_mesh.tris
                verts = geom_mesh.verts
                vert_ct = len(verts)
                mod2_verts.extend(len(verts))
                mod2_part.shader_index = mat_idx

                cent_x = cent_y = cent_z = 0

                # TODO: Modify this to take into account local nodes
                # honestly though, who the f**k is going to care? f**k it.

                # make a raw vert reflexive and replace the one in the part
                mod2_part.uncompressed_vertices = util.mod2_verts_def.build()
                mod2_verts = mod2_part.uncompressed_vertices.STEPTREE = \
                             bytearray(68 * len(verts))
                i = 0
                for vert in verts:
                    vert_packer(mod2_verts, i, vert.pos_x / 100,
                                vert.pos_y / 100, vert.pos_z / 100,
                                vert.norm_i, vert.norm_j, vert.norm_k,
                                vert.binorm_i, vert.binorm_j, vert.binorm_k,
                                vert.tangent_i, vert.tangent_j, vert.tangent_k,
                                vert.tex_u / u_scale,
                                (1 - vert.tex_v) / v_scale, vert.node_0,
                                vert.node_1, 1 - vert.node_1_weight,
                                vert.node_1_weight)
                    i += 68
                    cent_x += vert.pos_x / (vert_ct * 100)
                    cent_y += vert.pos_y / (vert_ct * 100)
                    cent_z += vert.pos_z / (vert_ct * 100)

                mod2_part.centroid_translation[:] = [cent_x, cent_y, cent_z]

                # make a raw tri reflexive and replace the one in the part
                mod2_part.triangles = util.mod2_tri_strip_def.build()
                mod2_tris = mod2_part.triangles.STEPTREE = bytearray(
                    [255, 255]) * (3 * ((len(tris) + 2) // 3))
                i = 0
                for tri in tris:
                    mod2_tris[i] = tri >> 8
                    mod2_tris[i + 1] = tri & 0xFF
                    i += 2
Esempio n. 15
0
def deserialize_frame_info(anim, include_extra_base_frame=False, endian=">"):
    i = 0
    dx = dy = dz = dyaw = x = y = z = yaw = 0.0

    root_node_info = [JmaRootNodeState() for i in range(anim.frame_count)]
    frame_info = anim.frame_info.data

    # write to the data
    if "dz" in anim.frame_info_type.enum_name:
        unpack = PyStruct(endian + "4f").unpack_from
        for f in range(anim.frame_count):
            dx, dy, dz, dyaw = unpack(frame_info, i)
            dx *= 100
            dy *= 100
            dz *= 100

            info = root_node_info[f]
            info.dx = dx
            info.dy = dy
            info.dz = dz
            info.dyaw = dyaw
            info.x = x
            info.y = y
            info.z = z
            info.yaw = yaw

            x += dx
            y += dy
            z += dz
            yaw += dyaw
            i += 16

    elif "dyaw" in anim.frame_info_type.enum_name:
        unpack = PyStruct(endian + "3f").unpack_from
        for f in range(anim.frame_count):
            dx, dy, dyaw = unpack(frame_info, i)
            dx *= 100
            dy *= 100

            info = root_node_info[f]
            info.dx = dx
            info.dy = dy
            info.dyaw = dyaw
            info.x = x
            info.y = y
            info.yaw = yaw

            x += dx
            y += dy
            yaw += dyaw
            i += 12

    elif "dx" in anim.frame_info_type.enum_name:
        unpack = PyStruct(endian + "2f").unpack_from
        for f in range(anim.frame_count):
            dx, dy = unpack(frame_info, i)
            dx *= 100
            dy *= 100

            info = root_node_info[f]
            info.dx = dx
            info.dy = dy
            info.x = x
            info.y = y

            x += dx
            y += dy
            i += 8

    if include_extra_base_frame and root_node_info:
        # duplicate the last frame and apply the change
        # that frame to the total change at that frame.
        last_root_node_info = deepcopy(root_node_info[-1])
        last_root_node_info.x += last_root_node_info.dx
        last_root_node_info.y += last_root_node_info.dy
        last_root_node_info.z += last_root_node_info.dz
        last_root_node_info.yaw += last_root_node_info.dyaw

        # no delta on last frame. zero it out
        last_root_node_info.dx = 0.0
        last_root_node_info.dy = 0.0
        last_root_node_info.dz = 0.0
        last_root_node_info.dyaw = 0.0

        root_node_info.append(last_root_node_info)

    return root_node_info
Esempio n. 16
0
def cast_uint32_to_float(uint32,
                         packer=PyStruct("<I"),
                         unpacker=PyStruct("<f")):
    return unpacker.unpack(packer.pack(uint32))[0]
for key in tag_class_be_int_to_fcc_stubbs:
    tag_cls_int_to_ext[key] = tag_class_fcc_to_ext_stubbs[
        tag_class_be_int_to_fcc_stubbs[key]]

NULL_CLASS = b'\xFF\xFF\xFF\xFF'

shader_class_bytes = (NULL_CLASS, NULL_CLASS, NULL_CLASS, b'vnes', b'osos',
                      b'rtos', b'ihcs', b'xecs', b'taws', b'algs', b'tems',
                      b'alps', b'rdhs')

object_class_bytes = (b'dpib', b'ihev', b'paew', b'piqe', b'brag', b'jorp',
                      b'necs', b'hcam', b'lrtc', b'ifil', b'calp', b'ecss',
                      b'ejbo')

_3_uint32_struct = PyStruct("<LLL")
_4_uint32_struct = PyStruct("<LLLL")
_5_uint32_struct = PyStruct("<LLLLL")


def read_reflexive(map_data,
                   refl_offset,
                   max_count=0xFFffFFff,
                   struct_size=1,
                   tag_magic=None,
                   unpacker=_3_uint32_struct.unpack):
    '''
    Reads a reflexive from the given map_data at the given offset.
    Returns the reflexive's offset and pointer.
    '''
    map_data.seek(refl_offset)