class ErrorRecord(logging.Handler): """ 将日志保存在 Communication 类中,传输至远端 """ def __init__(self): self.communication = Communication(get_conf()) logging.Handler.__init__(self) def emit(self, record): msg = self.format(record) self.communication.enqueue_log(msg)
def start_chitu(): # load all configs all_conf = get_conf('conf/conf.toml') # init log config setup_logging(all_conf['log_configuration']) thread_set = dict() queue = None workers = list() for redis_address in all_conf['redis']['address']: work = Transport(all_conf, redis_address) work.name = 'redis_' + str(redis_address['db']) thread = Thread(target=work.work, args=(), name='%s' % work.name) thread.setDaemon(True) thread.start() workers.append(work) thread_set[work.name] = thread # start communication instance communication = Communication(all_conf) thread = Thread(target=communication.work, args=(), name='%s' % communication.name) thread.setDaemon(True) thread.start() # start watch instance watch = WatchDog(all_conf) watch = Thread(target=watch.work, name='watchdog', args=(thread_set, queue, workers)) watch.setDaemon(True) watch.start()
def __init__(self, conf, redis_address): self.to_where = conf['send_to_where'] self.redis = RedisWrapper(redis_address) self.data_original = None self.name = None self.communication = Communication(conf) if self.to_where == 'influxdb': self.db = InfluxdbWrapper(conf['influxdb'])
def start_ziyan(): from plugins.your_plugin import MyCheck, MyHandler # init queues queue = {'data_queue': Queue(), 'sender': Queue()} # load all configs all_conf = get_conf('conf/conf.toml') # init log config setup_logging(all_conf['log_configuration']) # init instances checker = MyCheck(all_conf) handler = MyHandler(all_conf) sender = Sender(all_conf) communication = Communication(all_conf) # name instances checker.name = 'checker' handler.name = 'handler' sender.name = 'sender' # init work threads set workers = [checker, handler] thread_set = dict() # start workers instance for worker in workers: thread = Thread(target=worker.work, args=(queue, ), name='%s' % worker.name) thread.setDaemon(True) thread.start() thread_set[worker.name] = thread # init send set send_set = [communication, sender] for send in send_set: thread = Thread(target=send.work, args=(queue, ), name='%s' % send.name) thread.setDaemon(True) thread.start() # start watch instance watch = WatchDog(all_conf) watch = Thread(target=watch.work, name='%s' % watch.name, args=(thread_set, queue, workers)) watch.setDaemon(True) watch.start()
def __init__(self, configuration): self.redis_conf = configuration['redis'] self.conf = configuration['sender'] self.lua_path = self.conf['lua_path'] self.connect_redis() # log format self.enque_log_flag = self.conf['enque_log'] self.log_format = '\ntable_name: {}\nfields: {}\ntimestamp: {}\n' # init communication class (singleinstance) self.communication = Communication(configuration) self.name = None
def __init__(self, conf, redis_address=None): self.to_where = conf['send_to_where'] self.data_original = None self.name = None self.communication = Communication(conf) # Redis conf self.redis = RedisWrapper(redis_address) self.group = conf['data_stream']['group'] self.consumer = conf['data_stream']['consumer'] # create group for data_stream self.redis.addGroup(self.group) if self.to_where == 'influxdb': self.db = InfluxdbWrapper(conf['influxdb']) elif self.to_where == 'kafka': self.db = self.initKafka(conf['kafka']) elif self.to_where == 'mqtt': self.mqtt_conf = conf.get('mqtt', dict()) self.mqtt_put_queue = Queue() self.mqtt = MqttWrapper(self.mqtt_conf)
def __init__(self): self.communication = Communication(get_conf()) logging.Handler.__init__(self)