Example #1
0
 def parse(self, stream: KaitaiStream, context: AST):
     offset = stream.pos()
     try:
         b = self.reader(stream)
     except EOFError as e:
         print(context.root.to_dict())
         raise e
     return Integer(b, offset, self.bitwidth // 8)
Example #2
0
 def parse(self, stream: KaitaiStream, context: AST):
     offset = stream.pos()
     c = stream.read_bytes(len(self.contents))
     if c != self.contents:
         print(context.root.to_dict())
         raise RuntimeError(
             f"File offset {offset}: Expected {self.contents!r} but instead got {c!r}"
         )
     return RawBytes(c, offset)
def read_entries(data, start_offset=0):
    # Inspired by: https://stackoverflow.com/questions/49699820/parsing-binary-messages-with-kaitai-struct-python
    stream = KaitaiStream(BytesIO(data))
    stream.seek(start_offset)
    last = stream.pos()
    start = ModelSxLog(stream)
    log_entry = start.log_entry
    yield log_entry
    n_entries = 1
    with tqdm(total=stream.size() - start_offset,
              unit='B',
              unit_scale=True,
              desc='Processing log') as pbar:
        while not stream.is_eof():
            if n_entries % 1000 == 0:
                consumed = stream.pos() - last
                pbar.update(consumed)
                last = stream.pos()
            try:
                log_entry = ModelSxLog.Entry(stream, _root=start._root)
                if sum(log_entry.raw_bytes) % 256 != 0:
                    print(
                        f'Checksum error at {stream.pos()}, seeking to the next entry...'
                    )
                    stream.read_bytes_term(0xaa,
                                           include_term=False,
                                           consume_term=False,
                                           eos_error=True)
                else:
                    yield log_entry
            except ValidationNotEqualError:
                print(
                    f'Encountered an error at {stream.pos()}, probably a corrupt entry, seeking to next one...'
                )
                stream.read_bytes_term(0xaa,
                                       include_term=False,
                                       consume_term=False,
                                       eos_error=True)
                pass
            n_entries += 1
        pbar.update(stream.pos() - last)
        stream.close()
Example #4
0
 def parse(self, stream: KaitaiStream, context: AST) -> RawBytes:
     offset = stream.pos()
     ret = bytearray()
     if self.size is None:
         size = None
     else:
         size = int(self.size.interpret(context))
     while self.size is None or not self._size_met(ret, size):
         try:
             b = stream.read_bytes(1)
         except EOFError:
             if self.size_eos:
                 break
             else:
                 raise RuntimeError("Unexpected end of stream")
         ret.extend(b)
         if self.terminator is not None and b == self.terminator:
             break
     return RawBytes(bytes(ret), offset)
Example #5
0
def gse_parse(file,
              outfile,
              matype_crib=int(0x4200),
              stream=False,
              tcp_hijack=False,
              tcp_hijack_ips=None,
              reliable=True):
    with open(outfile, 'wb') as pcap_file:
        io = KaitaiStream(open(file, 'rb'))
        pcap_writer = Writer()
        pcap_writer.create_header(pcap_file)
        bbframe_count = 1
        pkt_count = 0
        eof_count = 0
        while True:
            try:
                # we record the last io position for kaitai so we can recover from EOF errors in streaming mode
                last_pos = io.pos()
                # prints the first BBframe we find at the current IO position
                # this throws EOF if there's no bytes left in the file
                current_bbframe = PureBb(io, matype_crib=matype_crib).bbframe
                if eof_count > 0:
                    print("new frames found, continuing...")
                    eof_count = 0
            except EOFError:
                if not stream:
                    # if we're reading from a static file EOFError is sufficient reason to stop
                    break
                elif eof_count == 0:
                    pass
                # otherwise we will wait progressively longer whenever there isn't data in the streamed file
                elif eof_count % 10000 == 0:
                    time.sleep(1)
                elif eof_count > 1000000:
                    time.sleep(10)
                elif eof_count > 1000600:
                    # after an hour of no fresh bytes (plus a little bit more), gsextract will exit and clean up buffers
                    # this normally means something has broken in the satellite hardware side
                    print(
                        "No new data received for at least 1 hour. Exiting gsextract."
                    )
                eof_count += 1
                io.seek(last_pos)
                continue
            except:
                # we want to maximize recovery in the case of stream parsing errors so we will just keep trying
                continue

            bbframe_count += 1
            # record stats on corrupt BBframes and then move to the next frame
            if hasattr(current_bbframe, 'corrupt_data'):
                counters['broken_bbframes'] += 1
                print("BBFrame", bbframe_count,
                      " contains corrupt data, (MA2:",
                      current_bbframe.bbheader.matype_2,
                      ") attempting to recover")
            else:
                # for valid BBFrames
                # next extract gse packets from the bbframe and try to make them into IP payloads
                gse_packets = get_gse_from_bbdata(current_bbframe.data_field)
                raw_packets = parse_gse_packet_array(gse_packets,
                                                     bbframe_count,
                                                     reliable=reliable)

                # if we get any IP packets, write them to a pcap file
                if len(raw_packets) > 0:
                    pcap_writer.write(raw_packets, pcap_file)
                    pkt_count += len(raw_packets)

                    # print some  progress stats
                    if pkt_count % 10000 == 0:
                        print(pkt_count, "packets parsed")
                        print(counters)

        # Clean up any lingering fragments when GSExtract closes
        # these would be partially filled buffers from end of recording
        raw_packets = parse_gse_packet_array([],
                                             0,
                                             cleanup=True,
                                             reliable=reliable)
        if len(raw_packets) > 0:
            pcap_writer.write(raw_packets, pcap_file)

        # Print some basic stats before finishing
        print(counters)