Ejemplo n.º 1
0
def collect_mono_scripts(serializer, stream: FileStream):
    MONO_SCRIPT_TYPE_ID = -1
    for n in range(len(serializer.type_trees)):
        t = serializer.type_trees[n]
        if t.persistent_type_id == serialize.MONO_SCRIPT_PERSISTENT_ID:
            MONO_SCRIPT_TYPE_ID = n
            break
    if MONO_SCRIPT_TYPE_ID == -1: return
    type_tree = serializer.type_trees[MONO_SCRIPT_TYPE_ID]
    for n in range(len(serializer.objects)):
        o = serializer.objects[n]
        if o.type_id == MONO_SCRIPT_TYPE_ID:
            stream.seek(serializer.node.offset +
                        serializer.header.data_offset + o.byte_start)
            script = serializer.deserialize(
                fs=stream, meta_type=type_tree.type_dict.get(0))
            type_name = script.get('m_ClassName')
            namespace = script.get('m_Namespace')
            assembly = script.get('m_AssemblyName')
            # encode mono scripts to cache storage
            if o.local_identifier_in_file not in mono_scripts:
                mono_scripts_stream.write(
                    struct.pack('q', o.local_identifier_in_file))
                mono_scripts_stream.write(struct.pack('i', len(type_name)))
                mono_scripts_stream.write(type_name)
                mono_scripts_stream.write(struct.pack('i', len(namespace)))
                mono_scripts_stream.write(namespace)
                mono_scripts_stream.write(struct.pack('i', len(assembly)))
                mono_scripts_stream.write(assembly)
                mono_scripts[
                    o.
                    local_identifier_in_file] = type_name, namespace, assembly
Ejemplo n.º 2
0
 def decode_type_tree(self, fs: FileStream):
     type_index = -1
     node_count = fs.read_uint32()
     char_count = fs.read_uint32()
     for _ in range(node_count):
         node = TypeField()
         node.decode(fs)
         if type_index >= 0: assert node.index == type_index + 1
         self.nodes.append(node)
         type_index += 1
     if char_count > 0:
         string_offset = fs.position
         string_size = 0
         while string_size + 1 < char_count:
             offset = fs.position - string_offset
             position = fs.position
             self.strings[offset] = fs.read_string()
             string_size += fs.position - position
         assert fs.position - string_offset == char_count
     for node in self.nodes:  # type: TypeField
         node.name = get_caculate_string(offset=node.name_str_offset,
                                         strings=self.strings)
         node.type = get_caculate_string(offset=node.type_str_offset,
                                         strings=self.strings)
     self.name = self.nodes[0].type
Ejemplo n.º 3
0
def main():
    arguments = argparse.ArgumentParser()
    arguments.add_argument('--file', '-f', nargs='+', required=True)
    arguments.add_argument('--command',
                           '-c',
                           choices=Commands.get_option_choices(),
                           default=Commands.dump)
    arguments.add_argument('--debug', '-d', action='store_true')
    arguments.add_argument('--types', '-t', nargs='+', type=int)
    arguments.add_argument('--dump-mono-scripts', '-dms', action='store_true')
    options = arguments.parse_args(sys.argv[1:])
    if options.dump_mono_scripts:
        mono_script_keys = list(mono_scripts.keys())
        mono_script_keys.sort()
        for identifier in mono_script_keys:
            class_name, namespace, assembly = [
                b2s(x) for x in mono_scripts.get(identifier)
            ]
            print('\033[36m{} \033[33m{}::\033[4m{}\033[0m \033[2m{}\033[0m'.
                  format(identifier, namespace if namespace else 'global',
                         class_name, assembly))

    for file_path in options.file:
        print('>>>', file_path)
        archive = UnityArchiveFile(debug=options.debug)
        try:
            stream = archive.decode(file_path=file_path)
            node = archive.direcory_info.nodes[0]
        except:
            stream = FileStream(file_path=file_path)
            node = FileNode()
            node.size = stream.length
        if archive.direcory_info.nodes:
            for node in archive.direcory_info.nodes:
                if node.flags == NodeFlags.SerializedFile:
                    print('[+] {} {:,}'.format(node.path, node.size))
                    stream.endian = '>'
                    serializer = serialize.SerializedFile(debug=options.debug,
                                                          node=node)
                    serializer.decode(stream)
                    collect_mono_scripts(serializer, stream)
                    processs(parameters=locals())
        else:
            serializer = serialize.SerializedFile(debug=options.debug,
                                                  node=node)
            serializer.decode(stream)
            collect_mono_scripts(serializer, stream)
            processs(parameters=locals())
Ejemplo n.º 4
0
def main():
    import argparse, sys
    arguments = argparse.ArgumentParser()
    arguments.add_argument('--file', '-f', required=True)
    arguments.add_argument('--output', '-o', default='__types')
    options = arguments.parse_args(sys.argv[1:])
    output = p.abspath(options.output)
    if not p.exists(output): os.makedirs(output)
    MONO_BEHAVIOUR_PERSISTENT_ID = 114

    stream = FileStream(file_path=options.file)
    stream.endian = '<'
    while stream.bytes_available:
        persistent_id = stream.read_uint32()
        script_hash = b'0' * 16
        if persistent_id == MONO_BEHAVIOUR_PERSISTENT_ID:
            script_hash = stream.read(16)
        type_hash = stream.read(16)
        size = stream.read_uint32()
        offset = stream.position
        print(persistent_id, uuid.UUID(bytes=script_hash),
              uuid.UUID(bytes=type_hash), size)
        type_tree = MetadataTypeTree(True)
        type_tree.persistent_type_id = persistent_id
        type_tree.type_hash = type_hash
        type_tree.mono_hash = script_hash
        type_tree.decode_type_tree(fs=stream)
        print(type_tree)
        assert stream.position == offset + size
Ejemplo n.º 5
0
 def decode(self, fs: FileStream):
     self.version = fs.read_sint16()
     self.level = fs.read_uint8()
     self.is_array = fs.read_boolean()
     self.type_str_offset = fs.read_uint32()
     self.name_str_offset = fs.read_uint32()
     self.byte_size = fs.read_sint32()
     self.index = fs.read_sint32()
     self.meta_flags = fs.read_uint32()
Ejemplo n.º 6
0
 def dump(self, fs: FileStream):
     for o in self.objects:
         fs.seek(self.node.offset + self.header.data_offset + o.byte_start)
         type_tree = self.type_trees[o.type_id]
         if not type_tree.type_dict: continue
         try:
             self.print(vars(type_tree.type_dict.get(0)))
         except:
             continue
         offset = fs.position
         try:
             data = self.deserialize(fs=fs,
                                     meta_type=type_tree.type_dict.get(0))
             assert fs.position - offset == o.byte_size
             self.print(data)
             self.print()
         except:
             traceback.print_exc()
     self.print('position={} remain={}'.format(fs.position,
                                               fs.bytes_available))
Ejemplo n.º 7
0
 def decode(self, fs: FileStream):
     offset = fs.position
     self.signature = fs.read_string()
     assert self.signature == UnitySignature.UnityFS
     self.version = fs.read_sint32()
     assert self.version != 5
     self.unity_web_bundle_version = fs.read_string()
     self.unity_web_minimum_revision = fs.read_string()
     self.size = fs.read_uint64()
     self.compressed_blocks_info_size = fs.read_uint32()
     self.uncompressed_blocks_info_size = fs.read_uint32()
     assert self.compressed_blocks_info_size < self.uncompressed_blocks_info_size, vars(
         self)
     self.flags = fs.read_uint32()
     self.header_size = fs.position - offset
Ejemplo n.º 8
0
def main(filename=None, offset_frames=0, refrac_init=None):

    last_frame = None
    snow_confidence = 0

    #Set up view and mask output windows
    cv.namedWindow('view', cv.WINDOW_NORMAL)
    cv.resizeWindow('view', c.FrameSize.WIDTH.value, c.FrameSize.HEIGHT.value)
    font = cv.FONT_HERSHEY_SIMPLEX

    if constant.DEBUG == True:
        cv.namedWindow('mask', cv.WINDOW_NORMAL)
        cv.resizeWindow('mask', c.FrameSize.WIDTH.value,
                        c.FrameSize.HEIGHT.value)

    # Initialize detector
    detector = SnowDetector()

    # Initialize Snowtification
    snowtify = Snowtification(refrac_init)

    # Read frames from file if provided, otherwise read from live stream
    if filename is not None:
        stream = FileStream(filename, offset=offset_frames)
    else:
        stream = ArmoryCamStream()
        if constant.STREAMING:
            twitch = OutputStream()

    try:
        with stream:
            stream = resize(stream, scale=.25)
            stream = blur(stream, kernel_size=3)

            # Use custom built iterator in ArmoryCamStream object to keep grabbing
            # frames from the video
            frame_hop = 0
            for frame in stream:

                if frame_hop > 3:
                    frame_hop = 0
                    snow_confidence = detector.detect(frame)

                    # If we exceed impulse decay we've detected snow. Log it.
                    if snow_confidence > constant.IMPULSE_DECAY + 1:
                        snowtify.log_snow_event()

                displayed = cv.drawKeypoints(
                    frame, detector._debug_keypoints, np.array([]),
                    (0, 0, 255), cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)

                cv.putText(displayed, "Detects: " + str(snow_confidence),
                           (40, 30), font, 1, (0, 0, 255), 2, cv.LINE_AA)

                cv.imshow('view', displayed)
                # if constant.STREAMING is True:
                if constant.STREAMING:
                    twitch.send_video_frame(displayed)
                if constant.DEBUG:
                    cv.imshow('mask', detector._debug_mask)
                if cv.waitKey(30) & 0xff == 27:
                    break

                frame_hop += 1

    except exceptions.StreamError as e:
        print(e.message)
Ejemplo n.º 9
0
    def decode(self, fs: FileStream):
        offset = fs.position
        self.persistent_type_id = fs.read_sint32()
        self.is_stripped = fs.read_boolean()
        self.script_index = fs.read_sint16()
        if self.persistent_type_id == MONO_BEHAVIOUR_PERSISTENT_ID:
            self.mono_hash = fs.read(16)
        self.type_hash = fs.read(16)

        self.nodes = []
        self.strings = {}
        if self.type_tree_enabled:
            self.decode_type_tree(fs)
        else:
            cache_path = self.get_cache_path()
            if p.exists(cache_path):
                tmp = FileStream(file_path=cache_path)
                tmp.endian = '<'
                persistent_type_id = tmp.read_sint32()
                assert persistent_type_id == self.persistent_type_id, '{} != {}'.format(
                    persistent_type_id, self.persistent_type_id)
                tmp.seek(fs.position - offset)
                self.decode_type_tree(fs=tmp)
Ejemplo n.º 10
0
 def decode(self, fs: FileStream):
     self.uncompressed_data_hash = fs.read(16)
     for _ in range(fs.read_uint32()):
         block = StorageBlock()
         block.decode(fs)
         self.blocks.append(block)
Ejemplo n.º 11
0
    def decode(self, fs: FileStream):
        fs.seek(self.node.offset)
        header = self.header
        header.metadata_size = fs.read_sint32()
        header.file_size = fs.read_sint32()
        assert self.node.size == header.file_size, '{} != {}'.format(
            self.node.size, header.file_size)
        header.version = fs.read_sint32()
        header.data_offset = fs.read_sint32()
        header.endianess = fs.read_boolean()
        fs.read(3)  # reserved bytes
        fs.endian = '>' if header.endianess else '<'
        self.print(vars(header))
        self.version = fs.read_string()
        self.platform = fs.read_uint32()
        self.type_tree_enabled = fs.read_boolean()
        self.print('version={} platform={} type_tree_enabled={}'.format(
            self.version, self.platform, self.type_tree_enabled))
        self.type_trees = []
        type_count = fs.read_uint32()
        self.print('type', type_count)
        for _ in range(type_count):
            offset = fs.position
            type_tree = MetadataTypeTree(
                type_tree_enabled=self.type_tree_enabled)
            type_tree.decode(fs)
            if self.type_tree_enabled:
                position = fs.position
                fs.seek(offset)
                type_data = fs.read(position - offset)
                with open(type_tree.get_cache_path(auto_create=True),
                          'wb') as fp:
                    fp.write(type_data)
            self.type_trees.append(type_tree)
            self.register_type_tree(type_tree=type_tree)
            self.print(type_tree)

        object_count = fs.read_sint32()
        self.print('object', object_count)
        for _ in range(object_count):
            fs.align(4)
            obj = ObjectInfo()
            obj.decode(fs)
            type_tree = self.type_trees[obj.type_id]
            obj.name = type_tree.name
            self.objects.append(obj)
            self.print(vars(obj))

        script_type_count = fs.read_sint32()
        self.print('typeinfo', script_type_count)
        for _ in range(script_type_count):
            st = ScriptTypeInfo()
            st.decode(fs)
            self.typeinfos.append(st)
            self.print(vars(st))

        external_count = fs.read_sint32()
        self.print('external', external_count)
        for _ in range(external_count):
            ext = ExternalInfo()
            ext.decode(fs)
            self.externals.append(ext)
            self.print(ext)
        fs.read_string()
Ejemplo n.º 12
0
 def deserialize(self, fs: FileStream, meta_type: MetadataType):
     result = {}
     if not meta_type: return result
     type_map = meta_type.type_tree.type_dict
     for n in range(len(meta_type.fields)):
         node = meta_type.fields[n]
         if node.is_array:
             element_type = meta_type.type_tree.nodes[node.index + 2]
             element_count = fs.read_sint32()
             array = result[node.name] = {'size': element_count}
             if element_count == 0: continue
             if element_type.byte_size == 1:
                 array['data'] = fs.read(
                     element_count) if element_count > 0 else b''
                 fs.align()
             else:
                 items = []
                 if element_type.type in self.__premitive_decoders:
                     decode = self.__premitive_decoders.get(
                         element_type.type)
                     for _ in range(element_count):
                         items.append(decode(fs))
                 elif element_type.type == 'string':
                     for _ in range(element_count):
                         size = fs.read_sint32()
                         items.append(fs.read(size) if size > 0 else b'')
                         fs.align()
                 else:
                     for m in range(element_count):
                         it = self.deserialize(fs,
                                               meta_type=type_map.get(
                                                   element_type.index))
                         items.append(it)
                     fs.align()
                 array['data'] = items
         elif node.type == 'string':
             size = fs.read_sint32()
             result[node.name] = fs.read(size) if size > 0 else b''
             fs.align()
         elif node.type in self.__premitive_decoders:
             result[node.name] = self.__premitive_decoders.get(
                 node.type)(fs)
             if node.meta_flags & 0x4000 != 0: fs.align()
         elif node.byte_size == 0: continue
         else:
             result[node.name] = self.deserialize(fs,
                                                  meta_type=type_map.get(
                                                      node.index))
     return result
Ejemplo n.º 13
0
 def decode(self, fs: FileStream):
     fs.read_string()
     self.guid = fs.read(16)
     self.type = fs.read_sint32()
     self.path = fs.read_string()
Ejemplo n.º 14
0
 def decode(self, fs: FileStream):
     self.local_serialized_file_index = fs.read_sint32()
     fs.align(4)
     self.local_identifier_in_file = fs.read_sint64()
Ejemplo n.º 15
0
 def decode(self, fs: FileStream):
     self.local_identifier_in_file = fs.read_sint64()
     self.byte_start = fs.read_uint32()
     self.byte_size = fs.read_uint32()
     self.type_id = fs.read_uint32()
Ejemplo n.º 16
0
 def decode(self, fs: FileStream):
     self.uncompressed_size = fs.read_uint32()
     self.compressed_size = fs.read_uint32()
     self.flags = fs.read_uint16()
Ejemplo n.º 17
0
def loadFile(filename, typeResolver=None):
    return loadStream(FileStream(filename),typeResolver)
Ejemplo n.º 18
0
 def decode(self, fs: FileStream):
     for n in range(fs.read_uint32()):
         node = FileNode()
         node.decode(fs)
         node.index = n
         self.nodes.append(node)
Ejemplo n.º 19
0
 def decode(self, fs: FileStream):
     self.offset = fs.read_uint64()
     self.size = fs.read_uint64()
     self.flags = fs.read_uint32()
     self.path = fs.read_string()
Ejemplo n.º 20
0
 def decode(self, file_path: str):
     fs = FileStream()
     fs.open(file_path)
     self.header.decode(fs)
     blocks_info_offset = self.header.get_blocks_info_offset()
     self.print(vars(self.header), blocks_info_offset, fs.position,
                self.header.compression_type)
     fs.seek(blocks_info_offset)
     compression_type = self.header.compression_type
     if compression_type != CompressionType.NONE:
         compressed_data = fs.read(self.header.compressed_blocks_info_size)
         assert len(
             compressed_data) == self.header.compressed_blocks_info_size
         uncompressed_data = lz4.block.decompress(
             compressed_data, self.header.uncompressed_blocks_info_size)
         temp = FileStream(data=uncompressed_data)
         self.read_blocks_and_directory(temp)
     else:
         assert self.header.compressed_blocks_info_size == self.header.uncompressed_blocks_info_size
         self.read_blocks_and_directory(fs)
     import io
     buffer = io.BytesIO()
     for block in self.blocks_info.blocks:
         if block.compression_type != CompressionType.NONE:
             compressed_data = fs.read(block.compressed_size)
             uncompressed_data = lz4.block.decompress(
                 compressed_data, block.uncompressed_size)
             assert len(uncompressed_data
                        ) == block.uncompressed_size, uncompressed_data
             buffer.write(uncompressed_data)
         else:
             uncompressed_data = fs.read(block.uncompressed_size)
             buffer.write(uncompressed_data)
     assert fs.position == fs.length
     buffer.seek(0)
     with open('data.bin', 'wb') as fp:
         fp.write(buffer.read())
         buffer.seek(0)
     return FileStream(data=buffer.read())