def _emit_loop(): sys.stderr.write("metrics starting... batch=%d pid=%d\n" % (_emitter.batch_size, os.getpid())) sys.stderr.flush() from futile.signals import handle_exit # TODO drain the queue, when SystemExit receives while True: try: # 读取一个点 point = _metrics_queue.get() _emitter.emit(point) except Exception as e: get_logger("metrics emitter").exception(e) _emitter.close()
def __init__(self, name, client): self._name = name self._client = client self._channel = self._client.channel() self.logger = get_logger(f"pipeline-{self._name}") self._workers = [] self._worker_threads = []
def work(self, thread_queue, handle): logger = get_logger("Worker") while True: if self._should_stop: logger.info("receive stop signal, stopping worker...") break try: ch, method, props, message = thread_queue.get_nowait() except QueueEmpty: logger.debug("worker got no work to do") time.sleep(0.5) continue try: handle(message) except Exception as e: # 如果处理消息出错就不会 ACK if self._message_type is not None: parsed_message = self._message_type() parsed_message.ParseFromString(message) logger.exception( "handle message %s error %s", MessageToString(parsed_message, as_one_line=True), e, ) else: logger.exception("handle message %s error %s", message, e)
def __init__(self, host, port, user, passwd, db): self._host = host self._port = port self._user = user self._passwd = passwd self._db = db self._logger = get_logger("mysql_connection") self.pid = os.getpid() self._connection = None
def __init__(self, host, port, user, passwd, db): self._host = host self._port = port self._user = user self._passwd = passwd self._db = db self._logger = get_logger("mysql_client") self._connection_pool = ConnectionPool( connection_class=MysqlConnection, host=host, port=port, user=user, passwd=passwd, db=db, )
def init( *, influxdb_host=None, influxdb_port=8086, influxdb_udp_port=8089, influxdb_database=None, prefix=None, batch_size=1024, debug=False, directly=False, use_thread=False, use_udp=False, timeout=10, **kwargs, ): if prefix is None: raise ValueError("Metric prefix not set") global _inited_pid if _inited_pid == os.getpid(): get_logger("metrics").error("metrics already started") return _inited_pid = os.getpid() global _debug _debug = debug global _directly _directly = directly global _emitter db = InfluxDBClient( host=os.environ.get("INFLUXDB_HOST", influxdb_host), port=int(os.environ.get("INFLUXDB_PORT", influxdb_port)), udp_port=int(os.environ.get("INFLUXDB_UDP_PORT", influxdb_udp_port)), database=os.environ.get("INFLUXDB_DATABASE", influxdb_database), use_udp=use_udp, timeout=timeout, ) _emitter = MetricsEmitter(db, prefix, batch_size=batch_size) if not _directly: if use_thread: thread = threading.Thread(target=_emit_loop) thread.daemon = True thread.start() else: if threading.current_thread() != threading.main_thread(): get_logger("metrics").error( "metrics called NOT from main thread") return run_process(_emit_loop, auto_quit=True) get_logger("metrics").info("metrics init successfully")