Esempio n. 1
0
def stream_adapter_serializer(self,
                              node,
                              parent=None,
                              attr_index=None,
                              writebuffer=None,
                              root_offset=0,
                              offset=0,
                              **kwargs):

    try:
        # make a new buffer to write the data to
        temp_buffer = BytearrayBuffer()
        orig_offset = offset
        desc = node.desc
        align = desc.get('ALIGN')

        try:
            sub_desc = node.data.desc
        except AttributeError:
            sub_desc = desc['SUB_STRUCT']

        # If there is a specific pointer to read the node from then go to it.
        # Only do this, however, if the POINTER can be expected to be accurate.
        # If the pointer is a path to a previously parsed field, but this node
        # is being built without a parent(such as from an exported block)
        # then the path wont be valid. The current offset will be used instead.
        if attr_index is not None and desc.get('POINTER') is not None:
            offset = node.get_meta('POINTER', **kwargs)
        elif align:
            offset += (align - (offset % align)) % align

        # write the sub_struct to the temp buffer
        sub_desc['TYPE'].serializer(node.data, node, 'SUB_STRUCT', temp_buffer,
                                    0, 0, **kwargs)

        # use the decoder method to get a decoded stream and
        # the length of the stream before it was decoded
        adapted_stream = desc['ENCODER'](node, temp_buffer, **kwargs)

        # write the adapted stream to the writebuffer
        writebuffer.seek(root_offset + offset)
        writebuffer.write(adapted_stream)

        # pass the incremented offset to the caller
        return offset + len(adapted_stream)
    except (Exception, KeyboardInterrupt) as e:
        desc = locals().get('desc', None)
        error = format_serialize_error(e,
                                       field_type=self,
                                       desc=desc,
                                       parent=parent,
                                       buffer=temp_buffer,
                                       attr_index=attr_index,
                                       root_offset=root_offset,
                                       offset=offset,
                                       **kwargs)
        # raise a new error if it was replaced, otherwise reraise
        if error is e:
            raise
        raise error from e
Esempio n. 2
0
    def inject_rawdata(self, meta, tag_cls, tag_index_ref):
        # get some rawdata that would be pretty annoying to do in the parser
        if tag_cls == "bitm":
            # grab bitmap data correctly from map
            new_pixels = BytearrayBuffer()
            pix_off = 0

            for bitmap in meta.bitmaps.STEPTREE:
                # grab the bitmap data from the correct map
                bitmap.pixels_offset = pix_off

                ptr, map_name = split_raw_pointer(bitmap.lod1_offset)
                halo_map = self
                if map_name != "local":
                    halo_map = self.maps.get(map_name)

                if halo_map is None:
                    bitmap.lod1_size = 0
                    continue

                halo_map.map_data.seek(ptr)
                mip_pixels = halo_map.map_data.read(bitmap.lod1_size)
                mip_pixels = zlib.decompress(mip_pixels)

                new_pixels += mip_pixels
                bitmap.lod1_size = len(mip_pixels)
                pix_off += bitmap.lod1_size

            meta.processed_pixel_data.STEPTREE = new_pixels
Esempio n. 3
0
    def meta_to_tag_data(self, meta, tag_cls, tag_index_ref, **kwargs):
        magic = self.map_magic
        engine = self.engine
        map_data = self.map_data
        tag_index = self.tag_index
        is_xbox = get_is_xbox_map(engine)

        if tag_cls == "bitm":
            # set the size of the compressed plate data to nothing
            meta.compressed_color_plate_data.STEPTREE = BytearrayBuffer()

            new_pixels_offset = 0

            # uncheck the prefer_low_detail flag and
            # set up the pixels_offset correctly.
            for bitmap in meta.bitmaps.STEPTREE:
                bitmap.flags.prefer_low_detail = is_xbox
                bitmap.pixels_offset = new_pixels_offset
                new_pixels_offset += bitmap.pixels_meta_size

                # clear some meta-only fields
                bitmap.pixels_meta_size = 0
                bitmap.bitmap_id_unknown1 = bitmap.bitmap_id_unknown2 = 0
                bitmap.bitmap_data_pointer = bitmap.base_address = 0

        elif tag_cls == "snd!":
            meta.maximum_bend_per_second = meta.maximum_bend_per_second**30
            for pitch_range in meta.pitch_ranges.STEPTREE:
                for permutation in pitch_range.permutations.STEPTREE:
                    if permutation.compression.enum_name == "none":
                        # byteswap pcm audio
                        byteswap_pcm16_samples(permutation.samples)

        return meta
Esempio n. 4
0
    def xbox_sign(self, rawdata=None, authkey=None):
        if rawdata is None:
            rawdata = self.data.serialize(
                buffer=BytearrayBuffer())[:self.data_end]

        hmac_sig = self.calc_hmac_sig(rawdata, authkey)
        self.data.gametype_footer.hmac_sig = hmac_sig
Esempio n. 5
0
 def calc_crc32(self, buffer=None, offset=-1):
     '''Returns the crc32 checksum of the data in 'buffer' up
     to the 'offset' specified. If offset is not specified, the
     entire buffer is used. If buffer is not specified, the tag
     will be written and the returned buffer will be used.'''
     if buffer is None:
         buffer = self.data.serialize(buffer=BytearrayBuffer())
     return 0xFFFFFFFF - (binascii.crc32(buffer[:offset]) & 0xFFFFFFFF)
Esempio n. 6
0
 def xbox_sign(self, rawdata=None, authkey=None):
     if rawdata is None:
         rawdata = self.data.serialize(buffer=BytearrayBuffer())
         if self.data_end != 0:
             rawdata = rawdata[self.data_start:self.data_end]
         else:
             rawdata = rawdata[self.data_start:]
     hmac_sig = self.calc_hmac_sig(rawdata, authkey)
     self.data.hmac_sig = hmac_sig
Esempio n. 7
0
def parse_lzw_stream(parent, rawdata, root_offset=0, offset=0, **kwargs):
    '''
    Reads and a stream of lzw compressed data from rawdata.
    Returns the compressed stream and its length.
    '''
    start = root_offset + offset
    size = get_lzw_data_length(rawdata, start)
    rawdata.seek(start)

    return BytearrayBuffer(rawdata.read(size)), size
Esempio n. 8
0
    def setup_tag_headers(self):
        if type(self).tag_headers is not None:
            return

        tag_headers = type(self).tag_headers = {}
        for def_id in sorted(self.defs):
            if def_id in tag_headers or len(def_id) != 4:
                continue
            h_desc, h_block = self.defs[def_id].descriptor[0], [None]
            h_desc['TYPE'].parser(h_desc, parent=h_block, attr_index=0)
            tag_headers[def_id] = bytes(h_block[0].serialize(
                buffer=BytearrayBuffer(), calc_pointers=False))
Esempio n. 9
0
    def parse(self, **kwargs):
        ''''''
        if kwargs.get('filepath') is None and kwargs.get('rawdata') is None:
            kwargs['filepath'] = self.filepath

        rawdata = get_rawdata(**kwargs)
        if rawdata:
            rawdata = kwargs['rawdata'] = BytearrayBuffer(rawdata)
        if 'filepath' in kwargs:
            del kwargs['filepath']

        if rawdata is not None:
            is_ce = self.validate_checksum(rawdata, CE_CRC32_OFF)
            is_pc = self.validate_checksum(rawdata, PC_CRC32_OFF)
            #if the checksum doesnt check out for either PC or CE,
            #see if the big endian version of the checksum checks out
            if not (is_ce or is_pc):
                if self.validate_checksum(rawdata, PC_CRC32_OFF, '>'):
                    #turns out the gametype is big endian, who woulda thought?
                    self.is_powerpc = is_pc = True

            self.is_xbox = not (is_ce or is_pc)

            #if the gametype isnt a valid PC gametype, make it a hybrid of both
            if is_ce and not is_pc:
                #copy the checksum to the PC Halo specific location
                rawdata[0x94:0x9C] = rawdata[0xD4:0xDC]
                #copy the gametype settings to the PC Halo specific location
                rawdata[0x7C:0x94] = rawdata[0x9C:0xB4]
        else:
            self.is_xbox = False
            self.is_powerpc = False

        #make sure to force all the fields to read normal endianness
        #after trying to read the tag, even if there is an exception
        try:
            if self.is_powerpc:
                FieldType.force_big()
            return XboxSaveTag.parse(self, **kwargs)
        finally:
            if self.is_powerpc:
                FieldType.force_normal()
Esempio n. 10
0
    def meta_to_tag_data(self, meta, tag_cls, tag_index_ref, **kwargs):
        engine = self.engine
        tag_index = self.tag_index

        if tag_cls == "bitm":
            # set the size of the compressed plate data to nothing
            meta.compressed_color_plate_data.STEPTREE = BytearrayBuffer()
            new_pixels_offset = 0

            # uncheck the prefer_low_detail flag and
            # set up the lod1_offset correctly.
            for bitmap in meta.bitmaps.STEPTREE:
                bitmap.lod1_offset = new_pixels_offset
                new_pixels_offset += bitmap.lod1_size

                bitmap.lod2_offset = bitmap.lod3_offset = bitmap.lod4_offset =\
                                     bitmap.lod5_offset = bitmap.lod6_offset = 0
                bitmap.lod2_size = bitmap.lod3_size = bitmap.lod4_size =\
                                   bitmap.lod5_size = bitmap.lod6_size = 0

        return meta
Esempio n. 11
0
def parse_rle_stream(parent, rawdata, root_offset=0, offset=0, **kwargs):
    '''
    Returns a buffer of pixel data from the supplied rawdata as
    well as the number of bytes long the compressed data was.
    If the tag says the pixel data is rle compressed, this
    function will decompress the buffer before returning it.
    '''
    assert parent is not None, "Cannot parse tga pixels without without parent"

    header = parent.parent.header
    pixels_count = header.width * header.height
    image_type = header.image_type

    bpp = (header.bpp + 7) // 8  # +7 to round up to nearest byte

    start = root_offset + offset
    bytes_count = pixels_count * bpp

    if image_type.rle_compressed:
        pixels = BytearrayBuffer([0] * bytes_count)

        comp_bytes_count = curr_pixel = 0
        rawdata.seek(start)

        while curr_pixel < pixels_count:
            packet_header = rawdata.read(1)[0]
            if packet_header & 128:
                # this packet is compressed with RLE
                pixels.write(rawdata.read(bpp) * (packet_header - 127))
                comp_bytes_count += 1 + bpp
                curr_pixel += packet_header - 127
            else:
                # it's a raw packet
                pixels.write(rawdata.read((packet_header + 1) * bpp))
                comp_bytes_count += 1 + (packet_header + 1) * bpp
                curr_pixel += packet_header + 1

        return pixels, comp_bytes_count
    else:
        return BytearrayBuffer(rawdata[start:start + bytes_count]), bytes_count
Esempio n. 12
0
    def serialize(self, **kwargs):
        '''Writes this tag to the set path like normal, but makes
        sure to calculate and set the checksums before doing so.'''
        try:
            if self.is_powerpc:
                FieldType.force_big()
            if self.is_xbox:
                #calculate the xbox checksum
                self.xbox_sign()
            else:
                #calculate the pc/ce checksum
                footer = self.data.gametype_footer
                footer.crc_32 = self.calc_crc32(None, CE_CRC32_OFF)

                footer.hybrid_settings = BytearrayBuffer()
                self.data.gametype_settings.serialize(buffer=footer.\
                                                      hybrid_settings)

                footer.crc_32_ce = self.calc_crc32(None, PC_CRC32_OFF)
            return Tag.serialize(self, **kwargs)
        finally:
            if self.is_powerpc:
                FieldType.force_normal()
Esempio n. 13
0
    def _get_tag_hash(self, data, def_id, data_key, hashes, is_meta,
                      partial_hashes, node_depths, curr_depth):
        # NOTE! a clone of node_depths must be provided when calling
        # this function as its contents MUST depend on the hierarchy

        if data_key in partial_hashes:
            ###########################################
            #              INCOMPLETE
            ###########################################
            return partial_hashes[data_key]
            __osa__(b, 'STEPTREE', partial_hashes[ref_key])
        elif data_key in hashes:
            #########################################################
            #  If detecting a tag as being circularly referenced,
            #  a set of data_keys will be returned containing any
            #  references which were re-encountered. If a higher up
            #  _get_tag_hash recursion sees the set that was returned
            #  isnt empty, it will update its own set with it.
            #  When choosing whether to save the hash as a partial or
            #  full, this recursion levels data_key will be removed
            #  from the set. If the set is not empty then it will be
            #  considerd as a partial hash.
            #
            #  When a circular tag is encountered, all tags in its
            #  chain need to be recognized as circular. This make it
            #  so any time a tag in that chain is about to be hashed,
            #  it will know that unless a hash has been calculated
            #  for the tag the chain was entered on, a complete hash
            #  will need to be calculated starting at that point.
            #########################################################
            partial_hashes[data_key] = None
            return

        # keep track of the depth of any circular reference
        node_depths[data_key] = curr_depth
        curr_depth += 1

        empty = ((), ())

        reflexive_paths = self.reflexive_cache.get(def_id, empty)[1]
        raw_data_paths = self.raw_data_cache.get(def_id, empty)[1]
        tag_ref_paths = self.tag_ref_cache.get(def_id, empty)[1]

        # temporarily put a None in for this hash so we know we're
        # trying to compute it, but that it's not been determined yet
        hashes[data_key] = None

        # local variables for faster access
        __lsi__ = list.__setitem__
        __osa__ = object.__setattr__
        ext_id_map = self.ext_id_map

        # null out the parts of a tag that can screw
        # with the hash when compared to a tag meta
        if tag_ref_paths is not empty:
            for b in self.get_nodes_by_paths(tag_ref_paths, data):
                tag_id = b.id
                ref_key = tag_id[0] + (tag_id[1] << 16)
                filepath = b.filepath
                __lsi__(b, 1, 0)  # set path_pointer to 0
                __lsi__(b, 2, 0)  # set path_length to 0
                # set id to 0
                __lsi__(tag_id, 0, 0)
                __lsi__(tag_id, 1, 0)

                if ((is_meta and ref_key == 0xFFFFFFFF)
                        or not (is_meta or filepath)):
                    # dependency is empty
                    __osa__(b, 'STEPTREE', '')
                    continue

                try:
                    ext = "." + b[0].enum_name
                except Exception:
                    ext = ".NONE"
                if filepath and not is_meta:
                    ref_key = filepath + ext

                if ext == ".NONE":
                    # the tag class is invalid
                    hashes[ref_key] = BAD_DEPENDENCY_HASH
                    __osa__(b, 'STEPTREE', BAD_DEPENDENCY_HASH[1])
                    continue
                elif ext == '.model' and self.treat_mode_as_mod2:
                    ext = '.gbxmodel'

                if ref_key in node_depths:
                    # this dependency points to something already being
                    # parsed in the chain above. use the depth as the hash
                    __osa__(b, 'STEPTREE',
                            str(curr_depth - node_depths[ref_key]))
                    continue

                if is_meta:
                    refd_tagdata = self.get_meta(ref_key)
                else:
                    refd_tag = self.get_tag(ref_key, ext_id_map[ext], True)
                    refd_tagdata = refd_tag.data.tagdata
                    # conserve ram
                    self.delete_tag(tag=refd_tag)

                # get the hash of this dependency
                self._get_tag_hash(refd_tagdata, ext_id_map[ext], ref_key,
                                   hashes, is_meta, partial_hashes,
                                   dict(node_depths), curr_depth)

                if ref_key in partial_hashes:
                    ###########################################
                    #              INCOMPLETE
                    ###########################################
                    __osa__(b, 'STEPTREE', partial_hashes[ref_key])
                elif hashes.get(ref_key):
                    # a hash exists for this reference, so set the path to it
                    __osa__(b, 'STEPTREE', hashes[ref_key][1])

        if reflexive_paths is not empty:
            for b in self.get_nodes_by_paths(reflexive_paths, data):
                __lsi__(b, 1, 0)  # set pointer to 0
                __lsi__(b, 2, 0)  # set id to 0

        if raw_data_paths is not empty:
            for b in self.get_nodes_by_paths(raw_data_paths, data):
                b[0].data = 0  # set flags to 0
                __lsi__(b, 2, 0)  # set raw_pointer to 0
                __lsi__(b, 3, 0)  # set pointer to 0
                __lsi__(b, 4, 0)  # set id to 0

        #serialize the tag data to a hashbuffer
        hashbuffer = BytearrayBuffer()

        if is_meta:
            data.TYPE.serializer(data, writebuffer=hashbuffer)
        else:
            # force the library to serialize tags in little endian
            try:
                FieldType.force_little()
                data.TYPE.serializer(data, writebuffer=hashbuffer)
            finally:
                FieldType.force_normal()

        # we'll include the def_id on the end of the data
        # to make sure tags of different types, but identical
        # contents, aren't detected as the same tag.
        hsh = md5(hashbuffer + def_id.encode("latin-1"))

        partial = False

        if partial:
            ###########################################
            #              INCOMPLETE
            ###########################################
            partial_hashes[data_key] = hsh.digest()
        else:
            hashes[data_key] = (hsh.digest(), hsh.hexdigest())
Esempio n. 14
0
def serialize_rle_stream(parent, buffer, **kwargs):
    '''
    Returns a buffer of pixel data from the supplied buffer.
    If the tag says the pixel data is rle compressed, this
    function will compress the buffer before returning it.
    '''
    assert parent is not None, "Cannot write tga pixels without without parent"

    header = parent.parent.header

    if header.image_type.rle_compressed:
        bpp = (header.bpp + 7) // 8  # +7 to round up to nearest byte

        buffer.seek(0)

        # start the compressed pixels buffer out as the same size as the
        # uncompressed pixels to minimize the number of times python has to
        # reallocate space every time the comp_pixels buffer is written to
        comp_pixels = BytearrayBuffer([0] * len(buffer))

        # get the first pixel to compress
        curr_pixel = buffer.read(bpp)
        next_pixel = buffer.peek(bpp)

        # keep running as long as there are pixels
        while curr_pixel:
            if curr_pixel == next_pixel:
                # this packet can be compressed with RLE
                rle_len = 1
                packet = curr_pixel

                # DO NOT REVERSE THESE CONDITIONS. If you do, read
                # wont be called and the read position will be wrong
                while curr_pixel == buffer.read(bpp) and rle_len < 128:
                    # see how many repeated pixels we can find(128 at most)
                    rle_len += 1

                # seek backward and read the last pixel
                buffer.seek(-bpp, 1)
                curr_pixel = buffer.read(bpp)
                next_pixel = buffer.peek(bpp)

                # write the header and the packet to comp_pixels
                comp_pixels.write(bytes([127 + rle_len]) + packet)

                # if the next read returns nothing, there are not more pixels
                if len(next_pixel) != bpp:
                    break
            else:
                # this should be a raw packet
                packet = b''

                while curr_pixel != next_pixel and len(packet) // bpp < 128:
                    # see how many non-repeated pixels we can find(128 at most)
                    packet += curr_pixel
                    curr_pixel = next_pixel
                    next_pixel = buffer.read(bpp)

                    # if next_pixel is the start of a repeated
                    # sequence of pixels then just break here
                    if curr_pixel == next_pixel or len(next_pixel) != bpp:
                        break

                # write the header and the packet to comp_pixels
                comp_pixels.write(bytes([len(packet) // bpp - 1]) + packet)

                # if the next read returns nothing, there are not more pixels
                if len(curr_pixel) != bpp:
                    break

        # slice the compressed pixels off at when the last write was
        comp_pixels = comp_pixels[:comp_pixels.tell()]
    else:
        comp_pixels = buffer

    return comp_pixels
Esempio n. 15
0
    def serialize(self, **kwargs):
        '''
        This function will serialize this Block to the provided
        filepath/buffer. The name of the Block will be used as the
        extension. This function is used ONLY for writing a piece
        of a tag to a file/buffer, not the entire tag. DO NOT CALL
        this function when writing a whole tag at once.
        '''

        buffer = kwargs.pop('buffer', kwargs.pop('writebuffer', None))
        filepath = kwargs.pop('filepath', None)
        temp = kwargs.pop('temp', False)
        clone = kwargs.pop('clone', True)
        zero_fill = kwargs.pop('zero_fill', True)

        attr_index = kwargs.pop('attr_index', None)
        root_offset = kwargs.pop('root_offset', 0)
        offset = kwargs.pop('offset', 0)

        kwargs.pop('parent', None)

        mode = 'buffer'
        parent = None
        block = self
        desc = self.desc
        if buffer is None:
            mode = 'file'

        if 'tag' in kwargs:
            parent_tag = kwargs.pop("tag")
        else:
            parent_tag = self.get_root()

        if "calc_pointers" in kwargs:
            calc_pointers = kwargs["calc_pointers"]
        if isinstance(parent_tag, supyr_struct.tag.Tag):
            calc_pointers = parent_tag.calc_pointers
        else:
            calc_pointers = True
            parent_tag = None

        # convert string attr_indexes to ints
        if isinstance(attr_index, str) and attr_index not in desc:
            attr_index = desc['NAME_MAP'][attr_index]

        # if we are serializing an attribute, change some stuff
        if attr_index is not None:
            parent = self
            block = self[attr_index]
            desc = desc[attr_index]

        calc_pointers = bool(kwargs.pop("calc_pointers", calc_pointers))

        if filepath is None and buffer is None:
            # neither a filepath nor a buffer were
            # given, so make a BytearrayBuffer to write to.
            buffer = BytearrayBuffer()
            mode = 'buffer'
        elif filepath is not None and buffer is not None:
            raise IOError("Provide either a buffer or a filepath, not both.")

        if mode == 'file':
            filepath = str(Path(filepath))
            folderpath = os.path.dirname(filepath)

            if os.path.exists(filepath) and not os.path.isfile(filepath):
                raise IOError('filepath must be a valid path ' +
                              'to a file, not a folder.')

            # if the path doesnt exist, create it
            if not os.path.exists(folderpath):
                os.makedirs(folderpath)

            if temp:
                filepath += ".temp"
            try:
                # to avoid 'open' failing if windows files are hidden, we
                # open in 'r+b' mode and truncate if the file exists.
                if os.path.isfile(filepath):
                    buffer = open(filepath, 'r+b')
                    buffer.truncate(0)
                else:
                    buffer = open(filepath, 'w+b')
            except Exception:
                raise IOError('Output filepath for serializing Block ' +
                              'was invalid or the file could not ' +
                              'be created.\n    %s' % filepath)

        # make sure the buffer has a valid write and seek routine
        if not (hasattr(buffer, 'write') and hasattr(buffer, 'seek')):
            raise TypeError('Cannot serialize a Block without either' +
                            ' an output path or a writable buffer')

        cloned = False
        # try to write the Block to the buffer
        try:
            # if we need to calculate the pointers, do so
            if calc_pointers:
                # Make a copy of this Block so any changes
                # to pointers dont affect the entire Tag
                try:
                    if clone:
                        block = block.__deepcopy__({})
                        cloned = True
                        # remove the parent so any pointers
                        # higher in the tree are unaffected
                        block.parent = None
                    block.set_pointers(offset)
                except (NotImplementedError, AttributeError):
                    pass

            # make the buffer as large as the Block is calculated to fill
            if zero_fill:
                try:
                    blocksize = block.binsize
                    buffer.seek(0, 2)
                    if buffer.tell() < blocksize:
                        buffer.seek(blocksize - 1)
                        buffer.write(b'\x00')
                except AttributeError:
                    pass

            # commence the writing process
            desc[TYPE].serializer(block,
                                  parent=parent,
                                  attr_index=attr_index,
                                  writebuffer=buffer,
                                  root_offset=root_offset,
                                  offset=offset,
                                  **kwargs)

            # if a copy of the Block was made, delete the copy
            if cloned:
                del block
                cloned = False

            # return the filepath or the buffer in case
            # the caller wants to do anything with it
            if mode == 'file':
                try:
                    buffer.close()
                except Exception:
                    pass
                return str(filepath)
            return buffer
        except Exception as e:
            if mode == 'file':
                try:
                    buffer.close()
                except Exception:
                    pass
            try:
                os.remove(str(filepath))
            except Exception:
                pass
            # if a copy of the Block was made, delete the copy
            if cloned:
                del block
            e.args += ("Error occurred while attempting to serialize the "
                       "%s to:\"%s\"" % (type(self), str(filepath)), )
            raise
Esempio n. 16
0
    def inject_rawdata(self, meta, tag_cls, tag_index_ref):
        bitmaps = self.maps.get("bitmaps")
        sounds = self.maps.get("sounds")
        loc = self.maps.get("loc")

        magic = self.map_magic
        engine = self.engine

        map_data = self.map_data

        try:
            bitmap_data = bitmaps.map_data
        except Exception:
            bitmap_data = None
        try:
            sound_data = sounds.map_data
        except Exception:
            sound_data = None
        try:
            loc_data = loc.map_data
        except Exception:
            loc_data = None

        is_not_indexed = not self.is_indexed(tag_index_ref.id & 0xFFff)
        might_be_in_rsrc = engine in ("halo1pc", "halo1pcdemo", "halo1ce",
                                      "halo1yelo", "halo1vap")
        might_be_in_rsrc &= not self.is_resource

        # get some rawdata that would be pretty annoying to do in the parser
        if tag_cls == "bitm":
            # grab bitmap data from map
            new_pixels = BytearrayBuffer()

            # to enable compatibility with my bitmap converter we'll set the
            # base address to a certain constant based on the console platform
            is_xbox = get_is_xbox_map(engine)
            for bitmap in meta.bitmaps.STEPTREE:
                pixel_data = map_data
                if might_be_in_rsrc and bitmap.flags.data_in_resource_map:
                    pixel_data = bitmap_data

                if pixel_data is None: return

                # grab the bitmap data from this map(no magic used)
                pixel_data.seek(bitmap.pixels_offset)
                new_pixels += pixel_data.read(bitmap.pixels_meta_size)

                bitmap.base_address = 1073751810 * is_xbox

            meta.processed_pixel_data.STEPTREE = new_pixels
        elif tag_cls == "font":
            # might need to grab pixel data from resource map
            meta_offset = tag_index_ref.meta_offset

            if is_not_indexed:
                return meta
            elif not self.is_resource:
                if loc is None or loc.map_header is None: return
                meta_offset = loc.rsrc_map.data.tags[meta_offset].tag.offset

            if loc_data is None: return

            loc_data.seek(meta.pixels.pointer + meta_offset)
            meta.pixels.data = loc_data.read(meta.pixels.size)
        elif tag_cls == "hmt ":
            # might need to grab string data from resource map
            meta_offset = tag_index_ref.meta_offset

            if is_not_indexed:
                return meta
            elif not self.is_resource:
                if loc is None or loc.map_header is None: return
                meta_offset = loc.rsrc_map.data.tags[meta_offset].tag.offset

            b = meta.string
            loc_data.seek(b.pointer + meta_offset)
            meta.string.data = loc_data.read(b.size).decode('utf-16-le')
        elif tag_cls == "snd!":
            # might need to get samples and permutations from the resource map
            is_pc = engine in ("halo1pc", "halo1pcdemo")
            is_ce = engine in ("halo1ce", "halo1yelo", "halo1vap")
            if not (is_pc or is_ce):
                return meta
            elif sound_data is None:
                return

            # ce tagpaths are in the format:  path__permutations
            #     ex: sound\sfx\impulse\coolant\enter_water__permutations
            #
            # pc tagpaths are in the format:  path__pitchrange__permutation
            #     ex: sound\sfx\impulse\coolant\enter_water__0__0
            other_data = map_data
            sound_magic = 0 - magic
            # DO NOT optimize this section. The logic is like this on purpose
            if is_pc:
                pass
            elif self.is_resource:
                other_data = sound_data
                sound_magic = tag_index_ref.meta_offset + meta.get_size()
            elif sounds is None:
                return

            for pitches in meta.pitch_ranges.STEPTREE:
                for perm in pitches.permutations.STEPTREE:
                    for b in (perm.samples, perm.mouth_data,
                              perm.subtitle_data):
                        inject_sound_data(other_data, sound_data, b,
                                          sound_magic)

        elif tag_cls == "ustr":
            # might need to grab string data from resource map
            meta_offset = tag_index_ref.meta_offset

            if is_not_indexed:
                return meta
            elif not self.is_resource:
                if loc is None or loc.map_header is None: return
                meta_offset = loc.rsrc_map.data.tags[meta_offset].tag.offset

            string_blocks = meta.strings.STEPTREE

            if len(string_blocks):
                desc = string_blocks[0].get_desc('STEPTREE')
                parser = desc['TYPE'].parser

            try:
                FieldType.force_little()
                for b in string_blocks:
                    parser(desc, None, b, 'STEPTREE', loc_data, meta_offset,
                           b.pointer)
                FieldType.force_normal()
            except Exception:
                print(format_exc())
                FieldType.force_normal()
                raise