def value(self):
            if hasattr(self, '_m_value'):
                return self._m_value if hasattr(self, '_m_value') else None

            io = self._root._io
            _pos = io.pos()
            io.seek(self.offset)
            _on = self.tag
            if _on == u"head":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Head(io, self, self._root)
            elif _on == u"cvt ":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Cvt(io, self, self._root)
            elif _on == u"prep":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Prep(io, self, self._root)
            elif _on == u"kern":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Kern(io, self, self._root)
            elif _on == u"hhea":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Hhea(io, self, self._root)
            elif _on == u"post":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Post(io, self, self._root)
            elif _on == u"OS/2":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Os2(io, self, self._root)
            elif _on == u"name":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Name(io, self, self._root)
            elif _on == u"maxp":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Maxp(io, self, self._root)
            elif _on == u"glyf":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Glyf(io, self, self._root)
            elif _on == u"fpgm":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Fpgm(io, self, self._root)
            elif _on == u"cmap":
                self._raw__m_value = io.read_bytes(self.length)
                io = KaitaiStream(BytesIO(self._raw__m_value))
                self._m_value = self._root.Cmap(io, self, self._root)
            else:
                self._m_value = io.read_bytes(self.length)
            io.seek(_pos)
            return self._m_value if hasattr(self, '_m_value') else None
def get_tstamp_at(data, offset):
    stream = KaitaiStream(BytesIO(data))
    stream.seek(offset)
    entry = ModelSxLog(stream).log_entry
    dt = datetime.utcfromtimestamp(
        struct.unpack('>L', entry.body.value[:4])[0])
    return dt
        def as_dir(self):
            if hasattr(self, '_m_as_dir'):
                return self._m_as_dir if hasattr(self, '_m_as_dir') else None

            io = self._root._io
            _pos = io.pos()
            io.seek(self.offset)
            self._raw__m_as_dir = io.read_bytes(self.size)
            io = KaitaiStream(BytesIO(self._raw__m_as_dir))
            self._m_as_dir = self._root.DirInode(io, self, self._root)
            io.seek(_pos)
            return self._m_as_dir if hasattr(self, '_m_as_dir') else None
        def body(self):
            if hasattr(self, '_m_body'):
                return self._m_body if hasattr(self, '_m_body') else None

            io = self._root._io
            _pos = io.pos()
            io.seek(self.body_ofs)
            self._raw__m_body = io.read_bytes(self.in_archive_length)
            io = KaitaiStream(BytesIO(self._raw__m_body))
            self._m_body = self._root.Body(io, self, self._root)
            io.seek(_pos)
            return self._m_body if hasattr(self, '_m_body') else None
Example #5
0
        def data(self):
            if hasattr(self, '_m_data'):
                return self._m_data if hasattr(self, '_m_data') else None

            io = self._root._io
            _pos = io.pos()
            io.seek(self.ofs_data)
            self._raw__m_data = io.read_bytes(self.len_data)
            io = KaitaiStream(BytesIO(self._raw__m_data))
            self._m_data = self._root.SubSection(16, io, self, self._root)
            io.seek(_pos)
            return self._m_data if hasattr(self, '_m_data') else None
Example #6
0
        def target(self):
            if hasattr(self, '_m_target'):
                return self._m_target if hasattr(self, '_m_target') else None

            io = self._root._io
            _pos = io.pos()
            io.seek((self.val * self._root.block_size))
            self._raw__m_target = io.read_bytes(self._root.block_size)
            io = KaitaiStream(BytesIO(self._raw__m_target))
            self._m_target = self._root.Obj(io, self, self._root)
            io.seek(_pos)
            return self._m_target if hasattr(self, '_m_target') else None
Example #7
0
        def extent_as_dir(self):
            if hasattr(self, '_m_extent_as_dir'):
                return self._m_extent_as_dir if hasattr(self, '_m_extent_as_dir') else None

            if (self.file_flags & 2) != 0:
                io = self._root._io
                _pos = io.pos()
                io.seek((self.lba_extent.le * self._root.sector_size))
                self._raw__m_extent_as_dir = io.read_bytes(self.size_extent.le)
                io = KaitaiStream(BytesIO(self._raw__m_extent_as_dir))
                self._m_extent_as_dir = self._root.DirEntries(io, self, self._root)
                io.seek(_pos)

            return self._m_extent_as_dir if hasattr(self, '_m_extent_as_dir') else None
Example #8
0
        def contents(self):
            if hasattr(self, '_m_contents'):
                return self._m_contents if hasattr(self,
                                                   '_m_contents') else None

            io = self._root._io
            _pos = io.pos()
            io.seek(self.offset)
            _on = self.name
            if _on == u"SECTORS":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Sectors(io, self, self._root)
            elif _on == u"TEXTURE1":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Texture12(io, self, self._root)
            elif _on == u"VERTEXES":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Vertexes(io, self, self._root)
            elif _on == u"BLOCKMAP":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Blockmap(io, self, self._root)
            elif _on == u"PNAMES":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Pnames(io, self, self._root)
            elif _on == u"TEXTURE2":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Texture12(io, self, self._root)
            elif _on == u"THINGS":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Things(io, self, self._root)
            elif _on == u"LINEDEFS":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Linedefs(io, self, self._root)
            elif _on == u"SIDEDEFS":
                self._raw__m_contents = io.read_bytes(self.size)
                io = KaitaiStream(BytesIO(self._raw__m_contents))
                self._m_contents = self._root.Sidedefs(io, self, self._root)
            else:
                self._m_contents = io.read_bytes(self.size)
            io.seek(_pos)
            return self._m_contents if hasattr(self, '_m_contents') else None
Example #9
0
        def entries(self):
            if hasattr(self, '_m_entries'):
                return self._m_entries if hasattr(self, '_m_entries') else None

            io = self._root._io
            _pos = io.pos()
            io.seek((self.entries_start * self._root.sector_size))
            self._raw__m_entries = [None] * (self.entries_count)
            self._m_entries = [None] * (self.entries_count)
            for i in range(self.entries_count):
                self._raw__m_entries[i] = io.read_bytes(self.entries_size)
                io = KaitaiStream(BytesIO(self._raw__m_entries[i]))
                self._m_entries[i] = self._root.PartitionEntry(io, self, self._root)

            io.seek(_pos)
            return self._m_entries if hasattr(self, '_m_entries') else None
Example #10
0
    def partition_lookup(self):
        """Every partition entry contains the number of partition entries.
        We parse the first entry, to know how many to parse, including the first one.
        No logic is given what to do if other entries have a different number.
        """
        if hasattr(self, '_m_partition_lookup'):
            return self._m_partition_lookup if hasattr(
                self, '_m_partition_lookup') else None

        io = self._root._io
        _pos = io.pos()
        io.seek(self._root.sector_size)
        self._raw__m_partition_lookup = io.read_bytes(self.sector_size)
        io = KaitaiStream(BytesIO(self._raw__m_partition_lookup))
        self._m_partition_lookup = self._root.PartitionEntry(
            io, self, self._root)
        io.seek(_pos)
        return self._m_partition_lookup if hasattr(
            self, '_m_partition_lookup') else None
def read_entries(data, start_offset=0):
    # Inspired by: https://stackoverflow.com/questions/49699820/parsing-binary-messages-with-kaitai-struct-python
    stream = KaitaiStream(BytesIO(data))
    stream.seek(start_offset)
    last = stream.pos()
    start = ModelSxLog(stream)
    log_entry = start.log_entry
    yield log_entry
    n_entries = 1
    with tqdm(total=stream.size() - start_offset,
              unit='B',
              unit_scale=True,
              desc='Processing log') as pbar:
        while not stream.is_eof():
            if n_entries % 1000 == 0:
                consumed = stream.pos() - last
                pbar.update(consumed)
                last = stream.pos()
            try:
                log_entry = ModelSxLog.Entry(stream, _root=start._root)
                if sum(log_entry.raw_bytes) % 256 != 0:
                    print(
                        f'Checksum error at {stream.pos()}, seeking to the next entry...'
                    )
                    stream.read_bytes_term(0xaa,
                                           include_term=False,
                                           consume_term=False,
                                           eos_error=True)
                else:
                    yield log_entry
            except ValidationNotEqualError:
                print(
                    f'Encountered an error at {stream.pos()}, probably a corrupt entry, seeking to next one...'
                )
                stream.read_bytes_term(0xaa,
                                       include_term=False,
                                       consume_term=False,
                                       eos_error=True)
                pass
            n_entries += 1
        pbar.update(stream.pos() - last)
        stream.close()
Example #12
0
    def partition_entries(self):
        if hasattr(self, '_m_partition_entries'):
            return self._m_partition_entries if hasattr(
                self, '_m_partition_entries') else None

        io = self._root._io
        _pos = io.pos()
        io.seek(self._root.sector_size)
        self._raw__m_partition_entries = [None] * (
            self._root.partition_lookup.number_of_partitions)
        self._m_partition_entries = [None] * (
            self._root.partition_lookup.number_of_partitions)
        for i in range(self._root.partition_lookup.number_of_partitions):
            self._raw__m_partition_entries[i] = io.read_bytes(self.sector_size)
            io = KaitaiStream(BytesIO(self._raw__m_partition_entries[i]))
            self._m_partition_entries[i] = self._root.PartitionEntry(
                io, self, self._root)

        io.seek(_pos)
        return self._m_partition_entries if hasattr(
            self, '_m_partition_entries') else None
Example #13
0
def gse_parse(file,
              outfile,
              matype_crib=int(0x4200),
              stream=False,
              tcp_hijack=False,
              tcp_hijack_ips=None,
              reliable=True):
    with open(outfile, 'wb') as pcap_file:
        io = KaitaiStream(open(file, 'rb'))
        pcap_writer = Writer()
        pcap_writer.create_header(pcap_file)
        bbframe_count = 1
        pkt_count = 0
        eof_count = 0
        while True:
            try:
                # we record the last io position for kaitai so we can recover from EOF errors in streaming mode
                last_pos = io.pos()
                # prints the first BBframe we find at the current IO position
                # this throws EOF if there's no bytes left in the file
                current_bbframe = PureBb(io, matype_crib=matype_crib).bbframe
                if eof_count > 0:
                    print("new frames found, continuing...")
                    eof_count = 0
            except EOFError:
                if not stream:
                    # if we're reading from a static file EOFError is sufficient reason to stop
                    break
                elif eof_count == 0:
                    pass
                # otherwise we will wait progressively longer whenever there isn't data in the streamed file
                elif eof_count % 10000 == 0:
                    time.sleep(1)
                elif eof_count > 1000000:
                    time.sleep(10)
                elif eof_count > 1000600:
                    # after an hour of no fresh bytes (plus a little bit more), gsextract will exit and clean up buffers
                    # this normally means something has broken in the satellite hardware side
                    print(
                        "No new data received for at least 1 hour. Exiting gsextract."
                    )
                eof_count += 1
                io.seek(last_pos)
                continue
            except:
                # we want to maximize recovery in the case of stream parsing errors so we will just keep trying
                continue

            bbframe_count += 1
            # record stats on corrupt BBframes and then move to the next frame
            if hasattr(current_bbframe, 'corrupt_data'):
                counters['broken_bbframes'] += 1
                print("BBFrame", bbframe_count,
                      " contains corrupt data, (MA2:",
                      current_bbframe.bbheader.matype_2,
                      ") attempting to recover")
            else:
                # for valid BBFrames
                # next extract gse packets from the bbframe and try to make them into IP payloads
                gse_packets = get_gse_from_bbdata(current_bbframe.data_field)
                raw_packets = parse_gse_packet_array(gse_packets,
                                                     bbframe_count,
                                                     reliable=reliable)

                # if we get any IP packets, write them to a pcap file
                if len(raw_packets) > 0:
                    pcap_writer.write(raw_packets, pcap_file)
                    pkt_count += len(raw_packets)

                    # print some  progress stats
                    if pkt_count % 10000 == 0:
                        print(pkt_count, "packets parsed")
                        print(counters)

        # Clean up any lingering fragments when GSExtract closes
        # these would be partially filled buffers from end of recording
        raw_packets = parse_gse_packet_array([],
                                             0,
                                             cleanup=True,
                                             reliable=reliable)
        if len(raw_packets) > 0:
            pcap_writer.write(raw_packets, pcap_file)

        # Print some basic stats before finishing
        print(counters)