Example #1
0
 def setUp(self):
     conf: dict = {
         "host": "127.0.0.1",
         "port": 6379,
         "db": 4,
     }
     self.client = RedisWrapper(conf)
Example #2
0
    def __init__(self, conf, redis_address):
        self.to_where = conf['send_to_where']
        self.redis = RedisWrapper(redis_address)
        self.data_original = None
        self.name = None
        self.communication = Communication(conf)

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])
Example #3
0
class Upload:
    def __init__(self, conf):
        self.__redis = RedisWrapper(conf["redis"])

    def on_get(self, req, resp):
        self.__redis.rpush("order_name", "upload")
        resp.body = json.dumps("Upload configuration now, wait please")
        resp.content_type = "application/json"
        resp.status = falcon.HTTP_200
Example #4
0
class Restart:
    def __init__(self, conf):
        self.__redis = RedisWrapper(conf["redis"])

    def on_get(self, req, resp):
        self.__redis.rpush("order_name", "restart")
        resp.body = json.dumps("Restart now, wait please")
        resp.content_type = "application/json"
        resp.status = falcon.HTTP_200

    def on_post(self, req, resp):
        pass
Example #5
0
    def __init__(self, conf):
        self.redis = RedisWrapper(conf['redis'])
        self.watchdog = WatchDog(conf)
        self.app = conf['application']
        self.data = dict()
        self._name = 'communication'
        self.log = list()
        self.hash = None

        # 20-12-14 zhy: windows启动异常,communication中没有self.paths参数
        self.__init_paths()
        # 重启刷新缓存
        self.flush_data()
Example #6
0
    def __init__(self, conf):
        self.redis = RedisWrapper(conf['redis'])
        self.etcd = EtcdWrapper(conf['etcd'])
        self.etcd_interval_time = conf['etcd']['interval']
        self.watchdog = WatchDog(conf)
        self.node = conf['node']
        self.ip = conf['local_ip']
        self.app = conf['application']
        self.paths = conf['paths']
        self.data = dict()
        self._name = 'communication'
        self.log = list()
        self.hash = None

        # 重启刷新缓存
        self.flush_data()
Example #7
0
    def __init__(self, configuration):

        self.redis_conf = configuration['redis']
        self.conf = configuration['sender']
        self.lua_path = self.conf['lua_path']

        self.db = RedisWrapper(self.redis_conf)
        self.db.script_load(self.lua_path)

        # log format
        self.enque_log_flag = self.conf['enque_log']
        self.log_format = '\ntable_name: {}\nfields: {}\ntimestamp: {}\n'

        # init communication class (singleinstance)
        self.communication = Communication(configuration)

        self.name = None
Example #8
0
class SeverStatus:
    def __init__(self, conf):
        self.__redis = RedisWrapper(conf["redis"])

    def on_get(self, req, resp):
        data = self.__redis.hgetall("node_data")
        resp.body = json.dumps(data if data else {"data": "No data"})
        resp.content_type = "application/json"
        resp.status = falcon.HTTP_200
Example #9
0
    def __init__(self, conf, redis_address=None):
        self.to_where = conf['send_to_where']

        self.data_original = None
        self.name = None
        self.communication = Communication(conf)

        # Redis conf
        self.redis = RedisWrapper(redis_address)
        self.group = conf['data_stream']['group']
        self.consumer = conf['data_stream']['consumer']
        # create group for data_stream
        self.redis.addGroup(self.group)

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])
        elif self.to_where == 'kafka':
            self.db = self.initKafka(conf['kafka'])
        elif self.to_where == 'mqtt':
            self.mqtt_conf = conf.get('mqtt', dict())
            self.mqtt_put_queue = Queue()
            self.mqtt = MqttWrapper(self.mqtt_conf)
Example #10
0
class NodeStatus:
    def __init__(self, conf):
        self.__redis = RedisWrapper(conf["redis"])

    def on_get(self, req, resp, node):
        resp.body = self.get_data(node)
        resp.content_type = "application/json"
        resp.status = falcon.HTTP_200

    def get_data(self, node):
        """
        Gets the status information of the specified device
        :param node: Device code
        :return: json data
        """
        data = self.__redis.hgetall("node_data")
        if data.get(node):
            url = "http://{}/status".format(data.get(node)['ip'])
            return requests.get(url).text
        else:
            return json.dumps({'data': "No data"})
Example #11
0
class TestRedis(unittest.TestCase):
    def setUp(self):
        conf: dict = {
            "host": "127.0.0.1",
            "port": 6379,
            "db": 4,
        }
        self.client = RedisWrapper(conf)

    def testAddGroup(self):
        group_name = "test_group"
        self.client.addGroup(group_name)

    def testReaddGroup(self):
        group_name = "test_group"
        consumer = "chitu"
        result = self.client.readGroup(group_name, consumer)
        print("==========readgroup===========")
        print(result)
        print("===============================")

    def testPending(self):
        group_name = "test_group"
        result = self.client.xPending(group_name)
        print(result)

    def testReadPending(self):
        group_name = "test_group"
        consumer = "chitu"
        id = "1571038514316-0"
        result = self.client.readPending(group_name, consumer, id)
        print(result)

    def testACK(self):
        group_name = "test_group"
        id = "1571041740221-0"
        self.client.ack(group_name, id)
Example #12
0
class Transport:
    def __init__(self, conf, redis_address=None):
        self.to_where = conf['send_to_where']

        self.data_original = None
        self.name = None
        self.communication = Communication(conf)

        # Redis conf
        self.redis = RedisWrapper(redis_address)
        self.group = conf['data_stream']['group']
        self.consumer = conf['data_stream']['consumer']
        # create group for data_stream
        self.redis.addGroup(self.group)

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])
        elif self.to_where == 'kafka':
            self.db = self.initKafka(conf['kafka'])
        elif self.to_where == 'mqtt':
            self.mqtt_conf = conf.get('mqtt', dict())
            self.mqtt_put_queue = Queue()
            self.mqtt = MqttWrapper(self.mqtt_conf)

    def initKafka(self, conf):
        while True:
            try:
                client = KafkaWrapper(conf)
                return client

            except Exception as err:
                log.exception(err)
                log.error("Can't init Kafka client, try again...")
                time.sleep(1)

    def work(self, *args):
        while True:
            # get and decompress data
            try:
                bin_data = self.getData()
                raw_data = self.unpack(bin_data)
                log.debug('Data from redis: {}'.format(raw_data))
            except exceptions.ResponseError as e:
                # NOGROUP for data_stream, recreate it.
                if "NOGROUP" in str(e):
                    log.error('{}, recreate group: {}'.format(
                        str(e), self.group))
                    self.redis.addGroup(self.group)
                raw_data = None
            except Exception as err:
                log.exception(err)
                raw_data = None

            # compress and send data
            if raw_data:
                data = self.pack(raw_data["data"])
                try:
                    # send data and ack data id
                    log.debug("Send data to {}.".format(self.to_where))
                    self.send(data)
                    log.debug("Redis ack data.")
                    self.redis.ack(self.group, raw_data["id"])
                except Exception as err:
                    log.exception(err)
                    time.sleep(3)

    def pending(self, *args):
        while True:
            try:
                pending_data = self.getPendingData()

            except exceptions.ResponseError as err:
                # NOGROUP for data_stream, recreate it.
                if "NOGROUP" in str(err):
                    log.exception(err)
                    self.redis.addGroup(self.group)
                pending_data = []
            except Exception as err:
                log.exception(err)
                pending_data = []

            if pending_data:
                for d in pending_data:
                    raw_data = self.unpack(d)
                    if raw_data:
                        data = self.pack(raw_data["data"])
                        try:
                            # send data and ack data id
                            log.debug("Send pending data to {}".format(
                                self.to_where))
                            self.send(data)
                            log.debug("Redis ack pending data.")
                            self.redis.ack(self.group, raw_data["id"])
                        except Exception as err:
                            log.exception(err)
                            log.debug(f'Err data is: {data}')
                            time.sleep(3)
            else:
                time.sleep(5)
                log.debug("No pending data.")

    def unpack(self, data):
        """
        Get data from redis and unpack it
        :return: dict
        """
        # data_len = self.redis.get_len("data_queue")
        if data:
            try:
                data["data"] = msgpack.unpackb(data["data"])
                raw_data = data["data"]
                for k in list(raw_data.keys()):
                    v = raw_data[k]
                    raw_data.pop(k)
                    raw_data[k.decode('utf-8')] = msgpack.unpackb(
                        v, encoding='utf-8')
                data["data"] = raw_data
            except Exception as err:
                traceback.print_exc()
                log.exception(err)
        else:
            log.info('Redis have no new data.')
            time.sleep(5)
        return data

    def pack(self, data):
        """
        Converts the data to the format required for the corresponding database
        :param data: dict
        :return: the format data, usually dict
        """
        if data:
            if self.to_where == 'influxdb':
                try:
                    measurement = data['table_name']
                    tags = data['fields'].pop('tags')

                    unit = data['fields'].pop('unit')

                    fields = data['fields']
                    timestamp = data['time']

                    json_data = [{
                        'measurement': measurement,
                        'tags': tags,
                        'time': timestamp,
                        'fields': fields,
                        'unit': unit
                    }]
                    return json_data
                except Exception as err:
                    log.exception(err)
            elif self.to_where == 'kafka':
                try:
                    json_data = self.db.pack(data)
                    return json_data
                except Exception as err:
                    traceback.print_exc()
                    log.exception(err)
            elif self.to_where == 'mqtt':
                try:
                    schema, table = data.get('table_name').split('.')
                    data['fields'].pop('tags', None)
                    data['fields'].pop('unit', None)
                    data.pop('table_name', None)

                    fields = data['fields']
                    ts = data['time']
                    timestamp = datetime.fromtimestamp(ts).strftime(
                        "%Y-%m-%d %H:%M:%S")

                    json_data = {
                        "timestamp": timestamp,
                        "schema": schema,
                        "table": table,
                        "deviceid": table,
                        "fields": fields
                    }
                    return json_data
                except Exception as err:
                    raise err
            else:
                data['fields'].pop('tags')
                data['fields'].pop('unit')

    def send(self, data):
        """
        Sent to the corresponding database
        :param data: dict
        :return: None
        """
        if self.to_where == 'influxdb':
            time_precision = data[0].pop('unit')
            try:
                info = self.db.send(data, time_precision)
            except InfluxDBClientError as e:
                timestamp = data[0]['time'] / 1000000
                t_string = datetime.utcfromtimestamp(timestamp).strftime(
                    '%Y-%m-%d %H:%M:%S')
                # 2021-07-19 zhy: 为防止超过保留策略的数据导致 chitu 传数据卡死.
                if 'points beyond retention policy dropped' in str(e):
                    log.warning(
                        'Data beyond influx retention policy, timestamp is: {}, means {}'
                        .format(timestamp, t_string))
                    log.warning('Data is: {}'.format(data))
                    log.warning('Drop it.')
                    info = 'Drop data because data beyond influx retention policy.'
                # https://10.7.0.117:9091/mabo_group/base_application/doctopus/issues/3
                # 2021-07-19 zhy: 为防止数据因未知原因导致parse错误, drop掉. 防止卡死.
                elif 'invalid field format' in str(e):
                    log.warning(
                        'Data parse error, influx can`t receive it, timestamp is: {}, means {}'
                        .format(timestamp, t_string))
                    log.warning('Data is: {}'.format(data))
                    log.warning('Drop it.')
                    info = 'Drop data because parse error, influx can`t receive it.'
                else:
                    raise e

            self.communication.data[data[0]["measurement"]] = data
            if info:
                log.info('Send data to inflxudb.{}, {}'.format(
                    data[0]['measurement'], info))
            else:
                raise Exception("\nCan't connect influxdb")

        elif self.to_where == 'kafka':
            try:
                log.debug("Stuck here in send kafka data.")
                self.db.sendMessage(data)
                log.info('Send data to kafka: {}'.format(
                    data["dims"]["data_name"]))
            except Exception as e:
                raise e
        elif self.to_where == 'mqtt':
            self.mqtt_put_queue.put(data)
            try:
                self.mqtt.pubMessage(self.mqtt_put_queue)
                log.info('Publish data to MQTT topics({}): {}'.format(
                    self.mqtt_conf.get('topics', list()), data))
            except Exception as err:
                log.exception(err)

    def getData(self):
        """Get data from data_stream
        return:
            data: dict; {id:string, data:bytes}
        """
        data = self.redis.readGroup(self.group, self.consumer)
        if not data:
            return None
        else:
            id, raw = data[0][1][0]
            return {
                "id": id.decode(),
                "data": raw[b'data'],
            }

    def getPendingData(self):
        """Get pending data from data_stream

        return:
            data: [dict{"id":string, "data":bytes}]
                  dict{id:string, data:bytes}
        """
        data = self.redis.readPending(self.group, self.consumer, 0)
        res = []
        if not data:
            return res
        else:
            for v in data[0][1]:
                id, raw = v
                res.append({"id": id.decode(), "data": raw[b'data']})
            return res

    def reque_data(self):
        """
        return data to redis
        :return:
        """
        self.redis.queue_back('data_queue', self.data_original)

    def re_load(self):
        conf = get_conf()
        self.to_where = conf['send_to_where']
        self.data_original = None
        self.name = None

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])
        elif self.to_where == 'kafka':
            self.db = KafkaWrapper(conf['kafka'])
        elif self.to_where == 'mqtt':
            self.mqtt_conf = conf.get('mqtt', dict())
            self.mqtt = MqttWrapper(self.mqtt_conf)
            self.mqtt_put_queue = Queue()
        return self
Example #13
0
class Sender(object):
    """
    send data to redis and watchdog
    """

    def __init__(self, configuration):

        self.redis_conf = configuration['redis']
        self.conf = configuration['sender']
        self.lua_path = self.conf['lua_path']

        self.db = RedisWrapper(self.redis_conf)
        self.db.script_load(self.lua_path)

        # log format
        self.enque_log_flag = self.conf['enque_log']
        self.log_format = '\ntable_name: {}\nfields: {}\ntimestamp: {}\n'

        # init communication class (singleinstance)
        self.communication = Communication(configuration)

        self.name = None

    def work(self, queue, **kwargs):
        """
        send data to redis and watchdog
        :param queue: 
        :param kwargs: 
        :return: 
        """
        sender_pipe = queue['sender']
        while True:
            data = sender_pipe.get()
            # pack and send data to redis and watchdog
            self.pack(data)
            self.send_to_communication(data)

    def pack(self, data):
        """
        pack data and send data to redis
        :param data: 
        :return: 
        """
        table_name = data['table_name']
        fields = data['fields']
        timestamp = data['timestamp']

        if 'unit' in fields.keys():
            if fields['unit'] == 's':
                date_time = pendulum.from_timestamp(timestamp, tz='Asia/Shanghai').to_datetime_string()
            else:
                date_time = pendulum.from_timestamp(timestamp / 1000000, tz='Asia/Shanghai').to_datetime_string()
        else:
            date_time = pendulum.from_timestamp(timestamp, tz='Asia/Shanghai').to_datetime_string()

        log_str = self.log_format.format(table_name, fields, date_time)
        # show log or not
        if self.enque_log_flag:
            log.info(log_str)
        # pack data by msgpack ready to send to redis
        table_name = msgpack.packb(table_name)
        fields = msgpack.packb(fields)
        timestamp = msgpack.packb(timestamp)
        # send data to redis
        try:
            lua_info = self.db.enqueue(table_name=table_name, fields=fields, timestamp=timestamp)
            log.info('\n' + lua_info.decode())
        except Exception as e:
            log.error("\n%s", e)

    def send_to_communication(self, data):
        """
        send data to communication instance(singleinstance)
        :param data: 
        :return: 
        """
        self.communication.data[data["table_name"]] = data
Example #14
0
class Communication(object):
    INSTANCE = None
    Lock = threading.RLock()

    def __new__(cls, *args, **kwargs):
        """
        单例模式的 double check 确保线程安全,增加缓冲标量
        确保第一次初始化完全
        :param args:
        :param kwargs:
        :return:
        """
        _instance = None
        if not cls.INSTANCE:
            try:
                cls.Lock.acquire()
                if not cls.INSTANCE:
                    _instance = object.__new__(cls)
                    cls.INSTANCE = _instance
            finally:
                cls.Lock.release()
        return cls.INSTANCE

    def __init__(self, conf):
        self.redis = RedisWrapper(conf['redis'])
        self.watchdog = WatchDog(conf)
        self.app = conf['application']
        self.data = dict()
        self._name = 'communication'
        self.log = list()
        self.hash = None

        # 20-12-14 zhy: windows启动异常,communication中没有self.paths参数
        self.__init_paths()
        # 重启刷新缓存
        self.flush_data()

    def __init_paths(self):
        if self.app == "ziyan":
            self.paths = [
                './conf/conf.toml', './lua/enque_script.lua',
                './plugins/your_plugin.py'
            ]
        elif self.app == "chitu":
            self.paths = ['./conf/conf.toml']
        else:
            self.paths = []

    @property
    def name(self):
        return self._name

    def work(self, *args):
        """
        获取 event loop,并启动异步循环
        :param args:
        :return:
        """
        if platform.system() == "Windows":
            gevent.joinall(
                [gevent.spawn(self.handle),
                 gevent.spawn(self.monitor)])
        else:
            gevent.joinall([
                gevent.spawn(self.handle),
            ])

    def handle(self):
        """
        异步方法,监听 redis 给出的命令
        :return:
        """
        while True:
            # 获取外部命令,并处理
            command = self.check_order()

            if command == b'get_status':
                self.write_into_local()

            elif command == b'restart':
                self.watchdog.restart = True
                self.flush_data()

            elif command == b'reload':
                self.watchdog.reload = True
                self.flush_data()
                self.re_load()

            gevent.sleep(0.5)

    def check_order(self):
        """
        Get outside command
        :return: None or order
        """
        try:
            if self.redis.get_len("order_name") > 0:
                return self.redis.dequeue("order_name")
        except Exception as err:
            log.exception(err)

    def write_into_local(self):
        """
        Write the data to local redis
        :return: None
        """
        if self.app == 'ziyan':
            status = {
                'data': self.data,
                'log': self.log,
                'check_restart_time': self.watchdog.check_restart_num,
                'handle_restart_time': self.watchdog.handle_restart_num,
                'real_time_thread_name': self.watchdog.thread_real_time_names
            }
        else:
            status = {
                'data': self.data,
                'log': self.log,
                'transport_restart_time': self.watchdog.transport_restart_num,
                'real_time_thread_name': self.watchdog.thread_real_time_names
            }
        try:
            self.redis.sadd('status', status)
            self.redis.expire('status', 60 * 5)
        except Exception as err:
            log.exception(err)

    def flush_data(self):
        """
        Delete the existing key "status"
        :return:
        """
        try:
            if self.redis.exists("status"):
                self.redis.delete("status")
        except Exception as err:
            log.exception(err)

    def enqueue_log(self, msg):
        """
        保存定长的历史 log 日志
        :param msg: str, log
        :return:
        """
        if len(self.log) < 10:
            self.log.append(msg)
        else:
            self.log.pop(0)
            self.log.append(msg)

    def re_load(self):
        conf = get_conf()
        self.app = conf['application']

    def monitor(self):
        while True:
            data = ""
            for file in self.paths:
                data += str(os.stat(file).st_mtime)
            sha = hashlib.sha1(data.encode()).hexdigest()
            if not self.hash:
                self.hash = sha
            elif self.hash != sha:
                self.hash = sha
                self.redis.rpush("order_name", "reload")
            gevent.sleep(30)
Example #15
0
class Transport:
    def __init__(self, conf, redis_address):
        self.to_where = conf['send_to_where']
        self.redis = RedisWrapper(redis_address)
        self.data_original = None
        self.name = None
        self.communication = Communication(conf)

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])

    def work(self, *args):
        while True:
            raw_data = self.unpack()

            data = self.pack(raw_data)
            if data:
                try:
                    self.send(data)
                except Exception as e:
                    log.error("\n%s", e)
                    self.reque_data()
                    time.sleep(3)

    def unpack(self):
        """
        Get data from redis and unpack it
        :return: dict
        """
        data_len = self.redis.get_len("data_queue")
        data = dict()

        if data_len > 0:
            self.data_original = self.redis.dequeue("data_queue")
            try:
                for k, v in msgpack.unpackb(self.data_original).items():
                    if v == True:
                        continue
                    else:
                        data[k.decode('utf-8')] = msgpack.unpackb(
                            v, encoding='utf-8')
            except Exception as e:
                log.error("\n%s", e)
        else:
            log.info('redis have no data')
            time.sleep(5)
        return data

    def pack(self, data):
        """
        Converts the data to the format required for the corresponding database
        :param data: dict
        :return: the format data, usually dict
        """
        if data:
            if self.to_where == 'influxdb':
                try:
                    measurement = data['table_name']
                    tags = data['fields'].pop('tags')

                    unit = data['fields'].pop('unit')

                    fields = data['fields']
                    timestamp = data['time']

                    json_data = [{
                        'measurement': measurement,
                        'tags': tags,
                        'time': timestamp,
                        'fields': fields,
                        'unit': unit
                    }]
                    return json_data
                except Exception as e:
                    log.error("\n%s", e)
            else:
                data['fields'].pop('tags')
                data['fields'].pop('unit')

    def send(self, data):
        """
        Sent to the corresponding database
        :param data: dict
        :return: None
        """
        if self.to_where == 'influxdb':
            time_precision = data[0].pop('unit')
            info = self.db.send(data, time_precision)
            self.communication.data[data[0]["measurement"]] = data
            if info:
                log.info('send data to inflxudb.{}, {}'.format(
                    data[0]['measurement'], info))
            else:
                raise Exception("\nCan't connect influxdb")

    def reque_data(self):
        """
        return data to redis
        :return:
        """
        self.redis.queue_back('data_queue', self.data_original)

    def re_load(self):
        conf = get_conf()
        self.to_where = conf['send_to_where']
        self.data_original = None
        self.name = None

        if self.to_where == 'influxdb':
            self.db = InfluxdbWrapper(conf['influxdb'])
        return self
Example #16
0
 def connect_redis(self):
     self.db = RedisWrapper(self.redis_conf)
     self.db.script_load(self.lua_path)
Example #17
0
class Status:
    def __init__(self, conf):
        redis_conf = conf['redis']
        web_conf = conf['web']
        self.__redis = RedisWrapper(redis_conf)
        self.set_name = web_conf.get('set_name', 'status')
        self.order_name = web_conf.get('order_status', 'get_status')

    def on_get(self, req, resp):
        """
        1.check redis if there has set we need;
        2.if doesn't put get_status order in redis;
        3.listen to redis and take set
        4.if the parameter flush, refresh the history cache
        :param req:
        :param resp:
        :return:
        """
        if req.params.get("flush"):
            self.__redis.delete(self.set_name)

        check_data = self.__redis.smembers(self.set_name)
        if check_data:
            resp.body = check_data.pop()

        else:
            self.__put_order(self.order_name)
            data = self.__listen()

            if data:
                data = data.pop().decode("utf-8")
                resp.body = data
            else:
                resp.body = json.dumps({'data': "No data"})

        resp.content_type = 'application/json'
        resp.status = falcon.HTTP_200

    def on_post(self):
        pass

    def __put_order(self, order_name):
        """
        put get_status order in redis
        :param order_name: status order_name
        :return:
        """
        self.__redis.rpush("order_name", order_name)

    def __listen(self, timeout=3):
        """
        check redis specific set_name and return value if it is not None
        :param timeout:
        :return:
        """
        start_time = time.time()
        while True:
            data = self.__redis.smembers(self.set_name)
            if data or time.time() - start_time > timeout:
                break
            time.sleep(0.25)

        return data
Example #18
0
 def __init__(self, conf):
     self.__redis = RedisWrapper(conf["redis"])
Example #19
0
class Communication(object):
    INSTANCE = None
    Lock = threading.RLock()

    def __new__(cls, *args, **kwargs):
        """
        单例模式的 double check 确保线程安全,增加缓冲标量
        确保第一次初始化完全
        :param args:
        :param kwargs:
        :return:
        """
        _instance = None
        if not cls.INSTANCE:
            try:
                cls.Lock.acquire()
                if not cls.INSTANCE:
                    _instance = object.__new__(cls)
                    cls.INSTANCE = _instance
            finally:
                cls.Lock.release()
        return cls.INSTANCE

    def __init__(self, conf):
        self.redis = RedisWrapper(conf['redis'])
        self.etcd = EtcdWrapper(conf['etcd'])
        self.etcd_interval_time = conf['etcd']['interval']
        self.watchdog = WatchDog(conf)
        self.node = conf['node']
        self.ip = conf['local_ip']
        self.app = conf['application']
        self.paths = conf['paths']
        self.data = dict()
        self._name = 'communication'
        self.log = list()
        self.hash = None

        # 重启刷新缓存
        self.flush_data()

    @property
    def name(self):
        return self._name

    def work(self, *args):
        """
        获取 event loop,并启动异步循环
        :param args:
        :return:
        """
        loop = asyncio.SelectorEventLoop()
        asyncio.set_event_loop(loop)
        loop.call_soon_threadsafe(loop.create_task, self.handle())
        loop.call_soon_threadsafe(loop.create_task, self.write_into_remote())
        if platform.system() == "Windows":
            loop.call_soon_threadsafe(loop.create_task, self.monitor())
        loop.run_forever()

    async def handle(self):
        """
        异步方法,监听 redis 给出的命令
        :return:
        """
        while True:
            # 获取外部命令,并处理
            command = self.check_order()

            if command == b'get_status':
                self.write_into_local()

            elif command == b'restart':
                self.watchdog.restart = True
                self.flush_data()

            elif command == b'reload':
                self.watchdog.reload = True
                self.flush_data()
                self.re_load()

            elif command == b'upload':

                for path in self.paths:
                    self.upload(path)
            await asyncio.sleep(0.5)

    def check_order(self):
        """
        Get outside command
        :return: None or order
        """
        try:
            if self.redis.get_len("order_name") > 0:
                return self.redis.dequeue("order_name")
        except Exception as e:
            log.error("\n%s", e)

    def write_into_local(self):
        """
        Write the data to local redis
        :return: None
        """
        if self.app == 'ziyan':
            status = {
                'node': self.node,
                'data': self.data,
                'log': self.log,
                'check_restart_time': self.watchdog.check_restart_num,
                'handle_restart_time': self.watchdog.handle_restart_num,
                'real_time_thread_name': self.watchdog.thread_real_time_names
            }
        else:
            status = {
                'node': self.node,
                'data': self.data,
                'log': self.log,
                'transport_restart_time': self.watchdog.transport_restart_num,
                'real_time_thread_name': self.watchdog.thread_real_time_names
            }
        try:
            self.redis.sadd('status', status)
            self.redis.expire('status', 60 * 5)
        except Exception as e:
            log.error("\n%s", e)

    def upload(self, path):
        """
        upload specific path file
        :param path: 
        :return: 
        """
        key = "/nodes/" + self.node + "/" + self.app
        if 'toml' in path:
            key += '/conf'
        elif 'py' in path:
            key += '/code'
        elif 'lua' in path:
            key += '/lua'

        try:
            with open(path, 'rb') as f:
                self.etcd.write(key, f.read())

        except Exception as e:
            log.error("\n%s", e)

    def flush_data(self):
        """
        Delete the existing key "status"
        :return:
        """
        try:
            if self.redis.exists("status"):
                self.redis.delete("status")
        except Exception as e:
            log.error("\n%s", e)

    async def write_into_remote(self):
        """
        异步方法,每10分钟向服务器 etcd 中注册当前状态
        :return:
        """
        while True:
            if self.app == 'ziyan':
                status = {
                    'node':
                    self.node,
                    'data':
                    self.data,
                    'log':
                    self.log,
                    'check_restart_time':
                    self.watchdog.check_restart_num,
                    'handle_restart_time':
                    self.watchdog.handle_restart_num,
                    'real_time_thread_name':
                    list(self.watchdog.thread_real_time_names)
                }
            else:
                status = {
                    'node':
                    self.node,
                    'data':
                    self.data,
                    'log':
                    self.log,
                    'transport_restart_time':
                    self.watchdog.transport_restart_num,
                    'real_time_thread_name':
                    list(self.watchdog.thread_real_time_names)
                }

            key = "/nodes/" + self.node + "/" + self.app + "/status"
            try:
                self.etcd.write(key, json.dumps(status))
                log.debug("\nkey: %s\ndata: %s\n", key, status)
            except Exception as e:
                log.error("\n%s", e)

            await asyncio.sleep(self.etcd_interval_time)

    def enqueue_log(self, msg):
        """
        保存定长的历史 log 日志
        :param msg: str, log
        :return:
        """
        if len(self.log) < 10:
            self.log.append(msg)
        else:
            self.log.pop(0)
            self.log.append(msg)

    def re_load(self):
        conf = get_conf()
        self.node = conf['node']
        self.ip = conf['local_ip']
        self.app = conf['application']
        self.paths = conf['paths']
        self.etcd_interval_time = conf['etcd']['interval']

    async def monitor(self):
        while True:
            data = ""
            for file in self.paths:
                data += str(os.stat(file).st_mtime)
            sha = hashlib.sha1(data.encode()).hexdigest()
            if not self.hash:
                self.hash = sha
            elif self.hash != sha:
                self.hash = sha
                self.redis.rpush("order_name", "reload")
            await asyncio.sleep(30)
Example #20
0
 def __init__(self, conf):
     redis_conf = conf['redis']
     web_conf = conf['web']
     self.__redis = RedisWrapper(redis_conf)
     self.set_name = web_conf.get('set_name', 'status')
     self.order_name = web_conf.get('order_status', 'get_status')