def _packets_from_fd(cls, fd, previous_data=b'', packet_count=None, wait_for_more_data=True, batch_size=4096): """ Reads packets from a file-like object containing a TShark XML. Returns a generator. :param fd: A file-like object containing a TShark XML :param previous_data: Any data to put before the file. :param packet_count: A maximum amount of packets to stop after. :param wait_for_more_data: Whether to wait for more data or stop when none is available (i.e. when the fd is a standard file) """ data = previous_data packets_captured = 0 while True: # Read data until we get a packet, and yield it. print "_packets_from_fd loop before read" new_data = fd.read(batch_size) print "_packets_from_fd loop after read" data += new_data packet, data = cls._extract_packet_from_data(data) if packet: print "_packets_from_fd loop if packet:" packets_captured += 1 yield packet_from_xml_packet(packet) if not wait_for_more_data and len(new_data) < batch_size: print "_packets_from_fd loop not wait_for_more_data and len(new_data) < batch_size" break if packet_count and packets_captured >= packet_count: print "_packets_from_fd loop packet_count and packets_captured >= packet_count" break
def _get_packet_from_stream(self, stream, existing_data, psml_structure=None): """ A coroutine which returns a single packet if it can be read from the given StreamReader. :return a tuple of (packet, remaining_data). The packet will be None if there was not enough XML data to create a packet. remaining_data is the leftover data which was not enough to create a packet from. :raises EOFError if EOF was reached. """ #yield each packet in existing_data #Maybe there is already a packet in our buffer packet, existing_data = self._extract_tag_from_data(existing_data) if packet: packet = packet_from_xml_packet(packet, psml_structure=psml_structure) raise Return(packet, existing_data) new_data = yield From(stream.read(self.DEFAULT_BATCH_SIZE)) existing_data += new_data if not new_data: # Reached EOF raise EOFError() raise Return(None, existing_data)
async def _get_packet_from_stream(self, stream, existing_data, got_first_packet=True, psml_structure=None): """A coroutine which returns a single packet if it can be read from the given StreamReader. :return a tuple of (packet, remaining_data). The packet will be None if there was not enough XML data to create a packet. remaining_data is the leftover data which was not enough to create a packet from. :raises EOFError if EOF was reached. """ # yield each packet in existing_data if self.use_json: packet, existing_data = self._extract_packet_json_from_data(existing_data, got_first_packet=got_first_packet) else: packet, existing_data = self._extract_tag_from_data(existing_data) if packet: if self.use_json: packet = packet_from_json_packet(packet, deduplicate_fields=self._json_has_duplicate_keys) else: packet = packet_from_xml_packet(packet, psml_structure=psml_structure) return packet, existing_data new_data = await stream.read(self.DEFAULT_BATCH_SIZE) existing_data += new_data if not new_data: # Reached EOF self._eof_reached = True raise EOFError() return None, existing_data
def _packets_from_fd(cls, fd, previous_data='', packet_count=None, wait_for_more_data=True, batch_size=1000): """ Reads packets from a file-like object containing a TShark XML. Returns a generator. :param fd: A file-like object containing a TShark XML :param previous_data: Any data to put before the file. :param packet_count: A maximum amount of packets to stop after. :param wait_for_more_data: Whether to wait for more data or stop when none is available (i.e. when the fd is a standard file) """ data = previous_data packets_captured = 0 while True: # Read data until we get a packet, and yield it. new_data = fd.read(batch_size) data += new_data packet, data = cls._extract_packet_from_data(data) if packet: packets_captured += 1 yield packet_from_xml_packet(packet) if not wait_for_more_data and len(new_data) < batch_size: break if packet_count and packets_captured >= packet_count: break
async def _get_packet_from_stream(self, stream, existing_data, got_first_packet=True, psml_structure=None): """A coroutine which returns a single packet if it can be read from the given StreamReader. :return a tuple of (packet, remaining_data). The packet will be None if there was not enough XML data to create a packet. remaining_data is the leftover data which was not enough to create a packet from. :raises EOFError if EOF was reached. """ # yield each packet in existing_data if self.use_json: packet, existing_data = self._extract_packet_json_from_data(existing_data, got_first_packet=got_first_packet) else: packet, existing_data = self._extract_tag_from_data(existing_data) if packet: if self.use_json: packet = packet_from_json_packet(packet) else: packet = packet_from_xml_packet(packet, psml_structure=psml_structure) return packet, existing_data new_data = await stream.read(self.DEFAULT_BATCH_SIZE) existing_data += new_data if not new_data: # Reached EOF raise EOFError() return None, existing_data
def _get_packet_from_stream(self, stream, existing_data, psml_structure=None): """ A coroutine which returns a single packet if it can be read from the given StreamReader. :return a tuple of (packet, remaining_data). The packet will be None if there was not enough XML data to create a packet. remaining_data is the leftover data which was not enough to create a packet from. :raises EOFError if EOF was reached. """ # Read data until we get a packet, and yield it. new_data = yield From(stream.read(self.DEFAULT_BATCH_SIZE)) existing_data += new_data packet, existing_data = self._extract_tag_from_data(existing_data) if packet: packet = packet_from_xml_packet(packet, psml_structure=psml_structure) raise Return(packet, existing_data) if not new_data: # Reached EOF raise EOFError() raise Return(None, existing_data)
def _packets_from_fd(self, fd, previous_data=b'', packet_count=None, wait_for_more_data=True, batch_size=4096): """ Reads packets from a file-like object containing a TShark XML. Returns a generator. :param fd: A file-like object containing a TShark XML :param previous_data: Any data to put before the file. :param packet_count: A maximum amount of packets to stop after. :param wait_for_more_data: Whether to wait for more data or stop when none is available (i.e. when the fd is a standard file) """ data = previous_data packets_captured = 0 psml_struct = None if self.only_summaries: # If summaries are read, we need the psdml structure which appears # on top of the file. while not psml_struct: data += fd.read(batch_size) psml_struct, data = self._extract_tag_from_data(data, 'structure') psml_struct = psml_structure_from_xml(psml_struct) while True: # Read data until we get a packet, and yield it. new_data = fd.read(batch_size) data += new_data packet, data = self._extract_tag_from_data(data) if packet: packets_captured += 1 yield packet_from_xml_packet(packet, psml_structure=psml_struct) if packet is None and not wait_for_more_data and \ len(new_data) < batch_size: break if packet_count and packets_captured >= packet_count: break
def parsed_packet(data_directory): return tshark_xml.packet_from_xml_packet( data_directory.joinpath("packet.xml").read_bytes())