Exemplo n.º 1
0
    def test_000_data_framer_construct_destruct(self):
        """Test constructing and destructing of DataFramer classes."""
        uut2 = data_framer.InterwovenLogFramer("")
        del uut2

        uut3 = data_framer.NewlineFramer()
        del uut3
Exemplo n.º 2
0
    def __init__(self,
                 device_name,
                 exception_queue,
                 command_queue,
                 log_queue,
                 transport,
                 call_result_queue,
                 raw_data_queue=None,
                 raw_data_id=0,
                 framer=None,
                 partial_line_timeout=PARTIAL_LINE_TIMEOUT,
                 read_timeout=_READ_TIMEOUT,
                 max_read_bytes=_MAX_READ_BYTES,
                 max_write_bytes=_MAX_WRITE_BYTES):
        """Initialize TransportProcess with the arguments provided.

    Args:
      device_name (str): name of device using this transport
      exception_queue (Queue): to use for reporting exception traceback
        message from subprocess
      command_queue (Queue): to receive commands into
      log_queue (Queue): to write each log line with host stamp added
      transport (Transport): to use to receive and send raw data
      call_result_queue (Queue): to write transport call responses to.
      raw_data_queue (Queue): to put raw (if applicable, detokenized) data
        into when enabled.
      raw_data_id (int): unique identifier for data published by this
        transport process to the raw_data_queue.
      framer (DataFramer): to use to frame raw data into partial and
        complete lines.
      partial_line_timeout (float): time in seconds to wait before adding
        partial lines to raw_data_queue and log_queue.
      read_timeout (float): time to wait in seconds for transport reads.
      max_read_bytes (int): to attempt to read on each transport read
        call.
      max_write_bytes (int): to attempt to write on each transport write
        call.
    """
        process_name = "{}-Transport{}".format(device_name, raw_data_id)
        super(TransportProcess,
              self).__init__(device_name,
                             process_name,
                             exception_queue,
                             command_queue,
                             valid_commands=_ALL_VALID_COMMANDS)
        self._buffered_unicode = u""
        self._framer = framer or data_framer.NewlineFramer()
        self._log_queue = log_queue
        self._max_read_bytes = max_read_bytes
        self._max_write_bytes = max_write_bytes
        self._partial_line_timeout = partial_line_timeout
        self._partial_log_time = time.time()
        self._pending_writes = None
        self._raw_data_enabled = multiprocessing_utils.get_context().Event()
        self._call_result_queue = call_result_queue
        self._raw_data_id = raw_data_id
        self._raw_data_queue = raw_data_queue
        self._read_timeout = read_timeout
        self._transport_open = multiprocessing_utils.get_context().Event()
        self.transport = transport
Exemplo n.º 3
0
    def test_040_newline_framer_returns_generator(self):
        """Test LogResponseIdentifier class accepts all lines."""
        uut = data_framer.NewlineFramer()

        lines = uut.get_lines("", begin=0)
        self.assertIsInstance(
            lines, types.GeneratorType,
            "Expected GeneratorType found {}".format(type(lines)))
Exemplo n.º 4
0
    def get_data_framers(self, num_transports):
        """Set up framers used to atomicize the raw output of the device.

    Deals with interwoven lines as well as tokenized output. Default
    is to use new line character to separate the lines.

    Args:
      num_transports (int): number of declared transports.

    Returns:
       list: list of data framers mapped to each transport.
    """
        return [data_framer.NewlineFramer()] * num_transports
Exemplo n.º 5
0
  def __init__(self,
               device_name,
               mp_manager,
               exception_queue,
               command_queue,
               parser,
               log_path,
               max_read_bytes=_MAX_READ_BYTES,
               framer=None):
    """Initialize LogFilterProcess with the arguments provided.

    Args:
        device_name (str): name of device used for process and error
          messages
        mp_manager (multiprocessing.Manager): object to use for creating
          Events
        exception_queue (Queue): to use for reporting exception traceback
          message from subprocess
        command_queue (Queue): to receive commands into
        parser (Parser): object to use for filtering log lines
        log_path (str): path and filename to write log messages to
        max_read_bytes (int): to attempt to read from log file each time.
        framer (DataFramer): to use to frame log data into partial and
          complete lines.
    """

    super(LogFilterProcess, self).__init__(
        device_name,
        device_name + "-LogFilter",
        mp_manager,
        exception_queue,
        command_queue,
        valid_commands=_VALID_FILTER_COMMANDS)

    self._buffered_unicode = u""
    self._framer = framer or data_framer.NewlineFramer()
    self._header_length = HOST_TIMESTAMP_LENGTH + LOG_LINE_HEADER_LENGTH
    self._max_read_bytes = max_read_bytes
    self._next_log_path = None
    self._parser = parser
    self._log_file = None
    self._log_filename = os.path.basename(log_path)
    self._log_directory = os.path.dirname(log_path)
    self._event_file = None
    self._event_path = get_event_filename(log_path)
Exemplo n.º 6
0
  def _parse_events(self, log_path, display_refresh):
    """Parses log file searching for events depositing results into an event file.

    Args:
        log_path (str): Path to log filename containing raw, log event data
        display_refresh (float): Number of seconds to wait prior to refresh
          of display

    Raises:
        ParserError: if log parser fails.

    Note:
        With large log files, this process may take a large amount of time.
    """
    logger.info("Parsing log file {} into event file {}, please wait", log_path,
                self.event_filename)
    start_time = time.time()
    with codecs.open(self.event_filename, "a", encoding="utf-8") as event_file:
      log_filename = os.path.basename(log_path)
      with codecs.open(
          log_path, "r", encoding="utf-8", errors="replace") as log_file:
        log_file.seek(0, os.SEEK_END)
        total_bytes = log_file.tell()
        log_file.seek(0, os.SEEK_SET)
        process_time = start_time
        buffered_unicode = u""
        framer = data_framer.NewlineFramer()
        new_header_length = (
            log_process.HOST_TIMESTAMP_LENGTH +
            log_process.LOG_LINE_HEADER_LENGTH)
        old_header_length = 29
        try:
          while True:
            log_data = log_file.read(size=4096)
            if time.time() - process_time > display_refresh:
              process_time = time.time()
              bytes_processed = log_file.tell()
              logger.info("{:.2%} complete - bytes processed: {} of {}",
                          bytes_processed / total_bytes, bytes_processed,
                          total_bytes)
            if not log_data:
              break

            log_lines = buffered_unicode + log_data
            buffered_len = len(buffered_unicode)
            buffered_unicode = u""
            for log_line in framer.get_lines(log_lines, begin=buffered_len):
              if log_line[-1] == "\n":
                if "> GDM-" in log_line:
                  header_length = new_header_length
                else:
                  header_length = old_header_length
                self._parser_obj.process_line(
                    event_file,
                    log_line,
                    header_length=header_length,
                    log_filename=log_filename)
              else:
                buffered_unicode += log_line
        except IOError as err:
          logger.debug("log_parser encountered error: {!r}".format(err))
          raise errors.ParserError("Log file processing failed. "
                                   "IOError: {!r}".format(err))
    logger.info("Parsing log file {} into event file {} finished in {}s",
                log_path, self.event_filename,
                time.time() - start_time)