コード例 #1
0
ファイル: logs.py プロジェクト: iomedhealth/paasta
def scribe_tail(scribe_env, stream_name, service, levels, components, clusters, queue, filter_fn, parse_fn=None):
    """Creates a scribetailer for a particular environment.

    When it encounters a line that it should report, it sticks it into the
    provided queue.

    This code is designed to run in a thread as spawned by tail_paasta_logs().
    """
    try:
        log.debug("Going to tail %s scribe stream in %s" % (stream_name, scribe_env))
        host_and_port = scribereader.get_env_scribe_host(scribe_env, True)
        host = host_and_port['host']
        port = host_and_port['port']
        tailer = scribereader.get_stream_tailer(stream_name, host, port)
        for line in tailer:
            if parse_fn:
                line = parse_fn(line, clusters)
            if filter_fn(line, levels, service, components, clusters):
                queue.put(line)
    except KeyboardInterrupt:
        # Die peacefully rather than printing N threads worth of stack
        # traces.
        pass
    except StreamTailerSetupError:
        log.error("Failed to setup stream tailing for %s in %s" % (stream_name, scribe_env))
        log.error("Don't Panic! This can happen the first time a service is deployed because the log")
        log.error("doesn't exist yet. Please wait for the service to be deployed in %s and try again." % scribe_env)
        raise
コード例 #2
0
def read_oom_events_from_scribe(cluster, superregion, num_lines=1000):
    """Read the latest 'num_lines' lines from OOM_EVENTS_STREAM and iterate over them."""
    # paasta configs incls a map for cluster -> env that is expected by scribe
    log_reader_config = load_system_paasta_config().get_log_reader()
    cluster_map = log_reader_config["options"]["cluster_map"]
    scribe_env = cluster_map[cluster]

    # `scribe_env_to_locations` slightly mutates the scribe env based on whether
    # or not it is in dev or prod
    host, port = scribereader.get_tail_host_and_port(
        **scribe_env_to_locations(scribe_env), )
    stream = scribereader.get_stream_tailer(
        stream_name=OOM_EVENTS_STREAM,
        tailing_host=host,
        tailing_port=port,
        lines=num_lines,
        superregion=superregion,
    )
    try:
        for line in stream:
            try:
                j = json.loads(line)
                if j.get("cluster", "") == cluster:
                    yield j
            except json.decoder.JSONDecodeError:
                pass
    except StreamTailerSetupError as e:
        if "No data in stream" in str(e):
            pass
        else:
            raise e
コード例 #3
0
ファイル: logs.py プロジェクト: EvanKrall/paasta
    def scribe_tail(self, scribe_env, stream_name, service, levels, components, clusters, queue, filter_fn,
                    parse_fn=None):
        """Creates a scribetailer for a particular environment.

        When it encounters a line that it should report, it sticks it into the
        provided queue.

        This code is designed to run in a thread as spawned by tail_paasta_logs().
        """
        try:
            log.debug("Going to tail %s scribe stream in %s" % (stream_name, scribe_env))
            host_and_port = scribereader.get_env_scribe_host(scribe_env, True)
            host = host_and_port['host']
            port = host_and_port['port']
            tailer = scribereader.get_stream_tailer(stream_name, host, port)
            for line in tailer:
                if parse_fn:
                    line = parse_fn(line, clusters, service)
                if filter_fn(line, levels, service, components, clusters):
                    queue.put(line)
        except KeyboardInterrupt:
            # Die peacefully rather than printing N threads worth of stack
            # traces.
            pass
        except StreamTailerSetupError as e:
            if 'No data in stream' in e.message:
                log.warning("Scribe stream %s is empty on %s" % (stream_name, scribe_env))
                log.warning("Don't Panic! This may or may not be a problem depending on if you expect there to be")
                log.warning("output within this stream.")
                # Enter a wait so the process isn't considered dead.
                # This is just a large number, since apparently some python interpreters
                # don't like being passed sys.maxsize.
                sleep(2**16)
            else:
                raise
コード例 #4
0
ファイル: logs.py プロジェクト: edric-shen/paasta
    def scribe_tail(self, scribe_env, stream_name, service, levels, components, clusters, instances, queue, filter_fn,
                    parse_fn=None):
        """Creates a scribetailer for a particular environment.

        When it encounters a line that it should report, it sticks it into the
        provided queue.

        This code is designed to run in a thread as spawned by tail_paasta_logs().
        """
        try:
            log.debug("Going to tail %s scribe stream in %s" % (stream_name, scribe_env))
            host_and_port = scribereader.get_env_scribe_host(scribe_env, True)
            host = host_and_port['host']
            port = host_and_port['port']
            tailer = scribereader.get_stream_tailer(stream_name, host, port)
            for line in tailer:
                if parse_fn:
                    line = parse_fn(line, clusters, service)
                if filter_fn(line, levels, service, components, clusters, instances):
                    queue.put(line)
        except KeyboardInterrupt:
            # Die peacefully rather than printing N threads worth of stack
            # traces.
            pass
        except StreamTailerSetupError as e:
            if 'No data in stream' in e.message:
                log.warning("Scribe stream %s is empty on %s" % (stream_name, scribe_env))
                log.warning("Don't Panic! This may or may not be a problem depending on if you expect there to be")
                log.warning("output within this stream.")
                # Enter a wait so the process isn't considered dead.
                # This is just a large number, since apparently some python interpreters
                # don't like being passed sys.maxsize.
                sleep(2**16)
            else:
                raise
コード例 #5
0
 def scribe_tail(queue):
     host_and_port = scribereader.get_env_scribe_host(SCRIBE_ENV, True)
     host = host_and_port['host']
     port = host_and_port['port']
     tailer = scribereader.get_stream_tailer(SLACK_WEBHOOK_STREAM, host,
                                             port)
     for line in tailer:
         queue.put(line)
コード例 #6
0
def read_oom_events_from_scribe(cluster, superregion, num_lines=1000):
    """Read the latest 'num_lines' lines from OOM_EVENTS_STREAM and iterate over them."""
    host_port = choice(scribereader.get_default_scribe_hosts(tail=True))
    stream = scribereader.get_stream_tailer(
        stream_name=OOM_EVENTS_STREAM,
        tailing_host=host_port['host'],
        tailing_port=host_port['port'],
        use_kafka=True,
        lines=num_lines,
        superregion=superregion,
    )
    for line in stream:
        try:
            j = json.loads(line)
            if j.get('cluster', '') == cluster:
                yield j
        except json.decoder.JSONDecodeError:
            pass
コード例 #7
0
    def scribe_tail(self,
                    scribe_env,
                    stream_name,
                    service,
                    levels,
                    components,
                    clusters,
                    queue,
                    filter_fn,
                    parse_fn=None):
        """Creates a scribetailer for a particular environment.

        When it encounters a line that it should report, it sticks it into the
        provided queue.

        This code is designed to run in a thread as spawned by tail_paasta_logs().
        """
        try:
            log.debug("Going to tail %s scribe stream in %s" %
                      (stream_name, scribe_env))
            host_and_port = scribereader.get_env_scribe_host(scribe_env, True)
            host = host_and_port['host']
            port = host_and_port['port']
            tailer = scribereader.get_stream_tailer(stream_name, host, port)
            for line in tailer:
                if parse_fn:
                    line = parse_fn(line, clusters, service)
                if filter_fn(line, levels, service, components, clusters):
                    queue.put(line)
        except KeyboardInterrupt:
            # Die peacefully rather than printing N threads worth of stack
            # traces.
            pass
        except StreamTailerSetupError:
            log.error("Failed to setup stream tailing for %s in %s" %
                      (stream_name, scribe_env))
            log.error(
                "Don't Panic! This can happen the first time a service is deployed because the log"
            )
            log.error(
                "doesn't exist yet. Please wait for the service to be deployed in %s and try again."
                % scribe_env)
            raise
コード例 #8
0
ファイル: logs.py プロジェクト: white105/paasta
 def fake_context():
     yield scribereader.get_stream_tailer(stream_name, host, port, True,
                                          line_count)
コード例 #9
0
ファイル: logs.py プロジェクト: white105/paasta
 def fake_context():
     log.info(
         "Running the equivalent of 'scribereader -f -e %s %s" %
         (scribe_env, stream_name))
     yield scribereader.get_stream_tailer(stream_name, host, port,
                                          True, -1)
コード例 #10
0
ファイル: logs.py プロジェクト: vasanth3045/paasta
 def fake_context():
     log.debug(
         f"Running the equivalent of 'scribereader -e {scribe_env} {stream_name}'"
     )
     yield scribereader.get_stream_tailer(stream_name, host, port, True,
                                          line_count)
コード例 #11
0
ファイル: logs.py プロジェクト: somic/paasta
 def fake_context():
     yield scribereader.get_stream_tailer(stream_name, host, port, True, line_count)
コード例 #12
0
ファイル: logs.py プロジェクト: somic/paasta
 def fake_context():
     log.info("Running the equivalent of 'scribereader -f -e %s %s" % (scribe_env, stream_name))
     yield scribereader.get_stream_tailer(stream_name, host, port, True, -1)