Exemplo n.º 1
0
    def getStderr(self, block="AUTO"):
        """Return the entire stderr for the process as a list of strings.

           Parameters
           ----------

           block : bool
               Whether to block until the process has finished running.

           Returns
           -------

           error : [str]
               The list of stderror strings.
        """

        # Wait for the process to finish.
        if block is True:
            self.wait()
        elif block == "AUTO" and self._is_blocked:
            self.wait()

        # Append any new lines to the stdout list.
        for line in _pygtail.Pygtail(self._stderr_file):
            self._stderr.append(line.rstrip())

        return self._stderr.copy()
Exemplo n.º 2
0
    def watch(self):
        for line in pygtail.Pygtail(self.log_file,
                                    offset_file=self.offset_file,
                                    read_from_end=True):
            if line:
                self.logger.debug(line)

            match = self.regex.search(line)

            if match:
                self.logger.debug('Match found {}'.format(match.group(0)))

            if match and match.group(1) == 'start':
                self.current_event = Event(match.group(2))
            elif match and match.group(1) == 'stop':
                self.current_event = None

        if self.current_event:
            self.logger.debug('current event {}'.format(self.current_event))

        if self.current_event:
            time_diff = datetime.datetime.now() - self.current_event.start_time
            self.logger.debug('checking event {} again {}'.format(
                self.current_event, time_diff.total_seconds()))

            if time_diff.total_seconds() > 10:
                self.mqtt_client.publish('{}/{}'.format(
                    constants.ALERT_TOPIC, self.camera_name),
                                         payload='on')

                self.logger.info('Alert threshold hit for event {}'.format(
                    self.current_event.id))
                self.current_event = None
Exemplo n.º 3
0
    def stderr(self, n=10):
        """Print the last n lines of the stderr buffer.

           Parameters
           ----------

           n : int
               The number of lines to print.
        """

        # Ensure that the number of lines is positive.
        if n < 0:
            raise ValueError("The number of lines must be positive!")

        # Append any new lines to the stdout list.
        for line in _pygtail.Pygtail(self._stderr_file):
            self._stderr.append(line.rstrip())

        # Get the current number of lines.
        num_lines = len(self._stderr)

        # Set the line from which to start printing.
        if num_lines < n:
            start = 0
        else:
            start = num_lines - n

        # Print the lines.
        for x in range(start, num_lines):
            print(self._stderr[x])
Exemplo n.º 4
0
def tail_file(filename):
    ''' Tail a file using pygtail. Note: this could probably be improved '''
    with make_temp_file() as offset_file:
        while True:
            for line in pygtail.Pygtail(filename, offset_file=offset_file):
                yield line
            time.sleep(1.0)
def main():
    parser = argparse.ArgumentParser(description=DESCRIPTION)
    parser.add_argument("--transform-func",
                        default="jsonlog2elasticsearch.no_transform",
                        help="python function to transform each json line")
    parser.add_argument("--debug",
                        action="store_true",
                        help="force debug mode")
    parser.add_argument("--index-override-func",
                        default="no",
                        help="python function to override the ES_INDEX value")
    parser.add_argument("ES_HOST", help="ES hostname/ip")
    parser.add_argument("ES_PORT", help="ES port", type=int)
    parser.add_argument("ES_INDEX", help="ES index name", type=str)
    parser.add_argument("LOG_PATH", help="json log file fullpath")
    args = parser.parse_args()
    if args.debug:
        set_config(minimal_level="DEBUG")
    if not os.path.isfile(args.LOG_PATH):
        touch(args.LOG_PATH)
    transform_func = get_transform_func(args.transform_func)
    if args.index_override_func == "no":
        index_func = functools.partial(default_index_func, args.ES_INDEX)
    else:
        index_func = get_index_func(args.index_override_func)
    pygtail.Pygtail._is_new_file = patched_is_new_file
    pygtail.Pygtail._update_offset_file = patched_update_offset_file
    pt = pygtail.Pygtail(filename=args.LOG_PATH, read_from_end=True)
    es = elasticsearch.Elasticsearch(hosts=[{
        "host": args.ES_HOST,
        "port": args.ES_PORT,
        "use_ssl": False
    }])
    signal.signal(signal.SIGTERM, signal_handler)
    LOG.info("started")
    while RUNNING:
        while True:
            try:
                line = pt.next()
            except StopIteration:
                break
            except FileNotFoundError:
                touch(args.LOG_PATH)
                break
            else:
                if process(line, transform_func, index_func):
                    commit(es)
        commit(es, True)
        LOG.debug("sleeping %i seconds..." % SLEEP_AFTER_EACH_ITERATION)
        time.sleep(SLEEP_AFTER_EACH_ITERATION)
    LOG.info("exited")
Exemplo n.º 6
0
def kafkaProducer(filename, kafka_broker, topic):
    pygTail = pygtail.Pygtail(filename)
    lines = pygTail.readlines()

    producer = KafkaProducer(bootstrap_servers=kafka_broker,
                             partitioner=partitioner)
    # Asynchronous by default
    if lines:
        for line in lines:
            future = producer.send(topic,
                                   value=bytes(line.strip(), encoding="utf-8"))
        future.get()
        return 1, "All sent"
    return 0, "Nothing to send to Kafka"
Exemplo n.º 7
0
 async def corelogwatcher(self):
     '''
     Watches for activity on the hid-io-core log file
     '''
     while self.retry_task:
         # Make sure there is a log file to watch
         if self.core_current_log_file:
             try:
                 for line in pygtail.Pygtail(
                         self.core_current_log_file,
                         offset_file=self.core_current_log_file_offset):
                     self.on_core_log_entry(line)
             except Exception as err:
                 logger.error(err)
             await asyncio.sleep(0.5)
         else:
             await asyncio.sleep(1)
def main():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument('-l',
                        '--log_level',
                        action='count',
                        default=0,
                        help='Set logging level, multiples for more detailed.')
    parser.add_argument(
        '-m',
        '--metrics_log',
        default="/var/log/dataone/daemon/cn-process-metric.log",
        help="Metrics log file to process.")
    parser.add_argument(
        '-o',
        '--offset_file',
        default="/var/log/dataone/daemon/cn-process-metric.log.offset",
        help="Offset file to record log reader position.")
    parser.add_argument('-j',
                        '--json_file',
                        default="/var/www/processing_metrics.json",
                        help="Metrics state file in JSON.")
    parser.add_argument('-t',
                        '--text_output',
                        default=None,
                        help="Text output file (stdout)")
    args = parser.parse_args()
    # Setup logging verbosity
    levels = [logging.WARNING, logging.INFO, logging.DEBUG]
    level = levels[min(len(levels) - 1, args.log_level)]
    logging.basicConfig(level=level,
                        format="%(asctime)s %(levelname)s %(message)s")

    collation = Collator(args.json_file)
    pyg = pygtail.Pygtail(args.metrics_log, offset_file=args.offset_file)
    for entry in pyg:
        collation.addEntry(entry)
    collation.save()
    fdest = sys.stdout
    if args.text_output is not None:
        fdest = open(args.text_output, "wt")
    fdest.write(collation.asText())
    collation.emitToStatsd("measure-unm-1.dataone.org")
Exemplo n.º 9
0
    def _update_hills_dict(self):
        """Read the HILLS file and update any records."""

        # Exit if the HILLS file hasn't been created.
        if not _os.path.isfile(self._hills_file):
            return

        # Loop over all new lines in the file.
        for line in _pygtail.Pygtail(self._hills_file):

            # Is this a header line. If so, store the keys.
            if line[3:9] == "FIELDS":
                self._hills_keys = line[10:].split()

            # This is an actual data record. Update the multi-dictionary.
            elif line[0] != "#":
                data = [float(x) for x in line.split()]
                for key, value in zip(self._hills_keys, data):
                    self._hills_dict[key] = value
Exemplo n.º 10
0
    def getGradient(self, time_series=False, block="AUTO"):
        """Get the free energy gradient.

           Parameters
           ----------

           time_series : bool
               Whether to return a list of time series records.

           block : bool
               Whether to block until the process has finished running.

           Returns
           -------

           gradient : float
               The free energy gradient.
        """

        # Wait for the process to finish.
        if block is True:
            self.wait()
        elif block == "AUTO" and self._is_blocked:
            self.wait()

        # No gradient file.
        if not _os.path.isfile(self._gradient_file):
            return None

        # Append any new lines to the gradients list.
        for line in _pygtail.Pygtail(self._gradient_file):
            # Ignore comments.
            if line[0] != "#":
                self._gradients.append(float(line.rstrip().split()[-1]))

        if len(self._gradients) == 0:
            return None

        if time_series:
            return self._gradients
        else:
            return self._gradients[-1]
Exemplo n.º 11
0
def watch_csv_log(path):
    for line in pygtail.Pygtail(path):
        try:
            yield parse_alert_line(line)
        except:
            eprint(f"Error processing CSV line: '{line}' - skipping")
Exemplo n.º 12
0
 def ireadlines(self):
     tailer = pygtail.Pygtail(self.logfile, offset_file=self.statefile)
     for line in tailer:
         yield line