def log_line(log_name, data): """Fork-aware ``log_line``. This behaves like normal ``clog.log_line``, but checks the process pid between calls. If the pid changes, log handlers are reset for you. :param log_name: the scribe log stream :type log_name: string :param data: the data to log :type data: basestring/unicode """ global _current_pid # check for forking if os.getpid() != _current_pid: _current_pid = os.getpid() clog.reset_default_loggers() if isinstance(data, type(u'')): data = data.encode('utf8') if not isinstance(data, bytes): raise TypeError('data must be a basestring') clog.log_line(log_name, data)
def _log(service, line, component, level=DEFAULT_LOGLEVEL, cluster=ANY_CLUSTER, instance=ANY_INSTANCE): """This expects someone (currently the paasta cli main()) to have already configured the log object. We'll just write things to it. """ if level == 'event': print(line, file=sys.stdout) elif level == 'debug': print(line, file=sys.stderr) else: raise NoSuchLogLevel log_name = get_log_name_for_service(service) formatted_line = format_log_line(level, cluster, instance, component, line) clog.log_line(log_name, formatted_line)
def log_to_clog(log_line): """Send the event to 'tmp_paasta_oom_events'.""" line = ( '{"timestamp": %d, "hostname": "%s", "container_id": "%s", "cluster": "%s", ' '"service": "%s", "instance": "%s", "process_name": "%s", ' '"mesos_container_id": "%s", "mem_limit": "%s"}' % ( log_line.timestamp, log_line.hostname, log_line.container_id, log_line.cluster, log_line.service, log_line.instance, log_line.process_name, log_line.mesos_container_id, log_line.mem_limit, )) clog.log_line("tmp_paasta_oom_events", line)
def _log( self, timestamp=None, level="INFO", additional_fields=None, ): if clog is not None: # `settings` values are set by paasta_tools.api.api:setup_paasta_api if not timestamp: timestamp = datetime.now(pytz.utc) dct = { "human_timestamp": timestamp.strftime("%Y-%m-%dT%H:%M:%S%Z"), "unix_timestamp": timestamp.timestamp(), "hostname": settings.hostname, "level": level, "cluster": settings.cluster, } if additional_fields is not None: dct.update(additional_fields) line = json.dumps(dct, sort_keys=True) clog.log_line(self.log_name, line)
def publish(self, message): try: clog.log_line(message.topic, self.envelope.pack(message, ascii_encoded=True)) except: logger.error("Failed to scribe message - {}".format(str(message)))
def log_to_scribe(scribe_stream, message): try: import clog clog.log_line(scribe_stream, message) except ModuleNotFoundError: logger.warn('clog not found, are you running on a Yelp host?')