Exemple #1
0
 def insert_log(self, ch, method, properties, body):
     self.insert_datas.append(json.loads(str(body, encoding="utf-8")))
     self.delivery_tags.append(method.delivery_tag)
     is_ack = False
     global insert_interval_time_stamp
     now_time_stamp = Time.getNowTimeStamp()
     interval_time = now_time_stamp - insert_interval_time_stamp
     if len(
             self.insert_datas
     ) >= init.MAX_INSERT_COUNT or interval_time > init.INSERT_INETRVAL_TIME:
         insert_interval_time_stamp = Time.getNowTimeStamp()
         is_ack = True
         try:
             L.info("will insert data count: is %f", len(self.insert_datas))
             mongo_instance = Mongo.getInstance(table="YXYBB_interface",
                                                ds="YXYBB")
             collection = mongo_instance.getCollection()
             collection.insert_many(self.insert_datas)
             mongo_instance.close()
             self.insert_datas = []
         except Exception as e:
             L.warning("insert_log Exception %s", e)
     if is_ack is True:
         for delivery_tag in self.delivery_tags:
             ch.basic_ack(delivery_tag=delivery_tag)
         self.delivery_tags = []
     pass
    def line_test(self, data):
        _PR = PR.getInstance()

        _title_text = data['title_text']  # 标题
        _step_count = data["step_count"]  # 横坐标点
        _step = data['step']  # 步长(单位:min)
        _type = data['type']  # 绘图类型
        # 保宝网的点击量和保宝网的观看人数统计的折线图 怎么处理??? 项目 类型
        _legend_infos = data['legend_infos']  # 数据项信息
        print(_legend_infos)
        _legend_datas = _legend_infos.keys()  # 数据项名称

        _search_filter_infos = {}
        for _legend_data in _legend_datas:
            _project_name = _legend_infos[_legend_data]['project_name']  # 项目名称
            _statistic_type = _legend_infos[_legend_data]['statistic_type']  # 统计类型
            _statistic_name = _legend_infos[_legend_data]['statistic_name']  # 统计名称
            _filter_infos = _legend_infos[_legend_data]['filter_infos']  # 过滤条件
            print(_filter_infos)
            ds = logic.project_ds_info[_project_name]  # 查询数据源
            table = _project_name + "_" + _statistic_type  # YXYBB_interface
            self_mongo_instance = Mongo.getInstance(table=table, ds=ds)
            self_collection = self_mongo_instance.getCollection()

            _search_filter_infos[_legend_data] = {
                "project_name": _project_name,
                "self_collection": self_collection,  # 连接额外数据源
                "filter_infos": _filter_infos,  # 过滤机制
                "statistic_type": _statistic_type,  # 统计类型
                "statistic_name": _statistic_name  # 统计名称
            }
        _result = Line.getInsatnce(search_filter_infos=_search_filter_infos, _step=_step, _step_count=_step_count, _title_text=_title_text, _type=_type).getLineChartData()
        self_mongo_instance.close()
        _PR.setResult(_result)
        return _PR.getPRBytes()
Exemple #3
0
 def init_DB_ds(self):
     path = P.confDirPath + os.sep + "DB.json"
     DB_infos = J.readFile(path)
     mongo_instance = Mongo.getInstance(table="project_ds", ds='base')
     collection = mongo_instance.getCollection()
     collection.remove({})  # 先删除表中所有数据
     datas = []
     for key in DB_infos.keys():
         value = DB_infos[key]['dbname']
         data = {}
         data["ds_code"] = key
         data["project"] = value
         datas.append(data)
         L.info("init DB_ds , insert data:" + key + ":" + value)
     collection.insert_many(datas)
     mongo_instance.close()
     pass
    def getTasks(self):
        '''
         "id":"主键",
            "project_name":"项目名称",
            "createtime":"数据创建时间",
            "updatetime":"数据更新时间",
            "statistical_type":"统计类型",
            "statistical_lastTime":"最后一次统计时间",
            "statistical_startTime":"起始统计时间",
            "statistical_step":"统计频率",
            "statistical_name“:"统计名称"
        '''

        if self.queue is None:
            L.error("task publish not the right queue")
            return None
        tasks = []
        task_mongo_instance = Mongo.getInstance(table="statistical_item",
                                                ds='base')
        task_collection = task_mongo_instance.getCollection()
        statistical_datas = task_collection.find({}).sort(
            "statistical_step", 1)
        statistical_step_min = 0
        statistical_step_datas = []
        for statistical_data in statistical_datas.sort("statistical_step"):
            statistical_step = statistical_data['statistical_step']
            if statistical_step_min == statistical_step or statistical_step_min is 0:
                statistical_step_min = statistical_step
                statistical_step_datas.append(statistical_data)
            else:
                task = {
                    "method": self.statistical_compute,
                    "pars": statistical_step_datas.copy()
                }
                tasks.append(task)
                statistical_step_min = statistical_step
                del statistical_step_datas[:]
                statistical_step_datas.append(statistical_data)
        task = {
            "method": self.statistical_compute,
            "pars": statistical_step_datas.copy()
        }
        tasks.append(task)
        return tasks
    def line_test(self, data):
        _PR = PR.getInstance()

        _title_text = data['title_text']  # 标题
        _step_count = data["step_count"]  # 横坐标点
        _step = data['step']  # 步长(单位:min)
        _type = data['type']  # 绘图类型
        # 保宝网的点击量和保宝网的观看人数统计的折线图 怎么处理??? 项目 类型
        _legend_infos = data['legend_infos']  # 数据项信息
        print(_legend_infos)
        _legend_datas = _legend_infos.keys()  # 数据项名称

        _search_filter_infos = {}
        for _legend_data in _legend_datas:
            _project_name = _legend_infos[_legend_data]['project_name']  # 项目名称
            _statistic_type = _legend_infos[_legend_data][
                'statistic_type']  # 统计类型
            _statistic_name = _legend_infos[_legend_data][
                'statistic_name']  # 统计名称
            _filter_infos = _legend_infos[_legend_data]['filter_infos']  # 过滤条件
            print(_filter_infos)
            ds = logic.project_ds_info[_project_name]  # 查询数据源
            table = _project_name + "_" + _statistic_type  # YXYBB_interface
            self_mongo_instance = Mongo.getInstance(table=table, ds=ds)
            self_collection = self_mongo_instance.getCollection()

            _search_filter_infos[_legend_data] = {
                "project_name": _project_name,
                "self_collection": self_collection,  # 连接额外数据源
                "filter_infos": _filter_infos,  # 过滤机制
                "statistic_type": _statistic_type,  # 统计类型
                "statistic_name": _statistic_name  # 统计名称
            }
        _result = Line.getInsatnce(search_filter_infos=_search_filter_infos,
                                   _step=_step,
                                   _step_count=_step_count,
                                   _title_text=_title_text,
                                   _type=_type).getLineChartData()
        self_mongo_instance.close()
        _PR.setResult(_result)
        return _PR.getPRBytes()
 def callback(self, ch, method, properties, body):
     print(" [x] Received %r" % body)
     print(str(body, encoding="utf-8"))
     collection = Mongo.getInstance(table="YXYBB_interface",
                                    dbname="YXYBB").collection
     collection.insert(json.loads(str(body, encoding="utf-8")))
    def statistical_deal(self, pars):
        while True:
            sleep_time = 60 * 60
            for par in pars:
                _id = par['_id']
                _item_mongo_instnce = Mongo.getInstance(
                    table="statistical_item")
                _item_conllection = _item_mongo_instnce.getCollection()
                _item_bo = statistical_item_BO.getInstance()
                _item_bo.setId(_id)
                _item_filter = Filter.getInstance().filter(
                    "_id", _id, DBCODE.EQ)
                _item_info = _item_conllection.find_one(
                    _item_filter.filter_json())

                statistical_step = _item_info['statistical_step']
                sleep_time = statistical_step * 60
                project_name = _item_info['project_name']
                statistical_type = _item_info['statistical_type']
                if "statistical_name" in _item_info.keys():
                    statistical_name = _item_info['statistical_name']
                else:
                    statistical_name = None
                statistical_start_time = _item_info['statistical_start_time']
                times = Time.getComputeTimes(start_time=statistical_start_time,
                                             step=statistical_step)

                ds = logic.project_ds_info[project_name]
                table = project_name + "_" + statistical_type
                # 数据源数据,用于统计数据
                statistical_mongo_instance = Mongo.getInstance(table=table,
                                                               ds=ds)
                statistical_mongo_collection = statistical_mongo_instance.getCollection(
                )
                documents = []
                lastTime = None
                L.debug("compute step is  %d s", statistical_step * 60)
                for i in range(1, len(times)):
                    lastTime = times[i]
                    _f = Filter.getInstance()
                    _f.filter("type", statistical_type, DBCODE.EQ)
                    _f.filter("project", project_name, DBCODE.EQ)
                    if statistical_name is not None:
                        _f.filter("name", statistical_name, DBCODE.EQ)
                    _f.filter("createtime", times[i - 1], DBCODE.GT)
                    _f.filter("createtime", times[i], DBCODE.LTE)
                    _filter = _f.filter_json()
                    count = statistical_mongo_collection.find(_filter).count()
                    document_bo = statistic_res_BO.getInstance()
                    document_bo.setStatistical_project(project_name)
                    document_bo.setStatistical_time(times[i])
                    document_bo.setStatistical_count(count)
                    document_bo.setStatistical_step(statistical_step)
                    document_bo.setStatistical_type(statistical_type)
                    if statistical_name is not None:
                        document_bo.setStatistical_name(statistical_name)
                    documents.append(document_bo.json())
                    if len(documents) > init.MAX_INSERT_COUNT:
                        res_mongo_instance = Mongo.getInstance(
                            table=BO.BASE_statistic_res)
                        res_collection = res_mongo_instance.getCollection()
                        res_collection.insert_many(
                            documents=documents)  # 将结果插入到结果表中,防止爆了
                        documents = []
                        res_mongo_instance.close()

                if len(documents) > 0:
                    res_mongo_instance = Mongo.getInstance(
                        table=BO.BASE_statistic_res)
                    res_collection = res_mongo_instance.getCollection()
                    res_collection.insert_many(
                        documents=documents)  # 将结果插入到结果表中
                    documents = []
                    res_mongo_instance.close()
                else:
                    L.debug("statistical_deal ,not get the insert data")

                # 去更新statistical_item表
                if lastTime is not None:
                    _item_bo_1 = statistical_item_BO.getInstance()
                    _item_bo_1.setStatistical_start_time(lastTime)
                    _item_conllection.update_one(_item_filter.filter_json(),
                                                 _item_bo_1.update_json)
                else:
                    L.debug("statistical_deal ,not get last time")

                # 关闭数据库连接
                _item_mongo_instnce.close()
            L.debug("thread will sleep %s s", sleep_time)
            time.sleep(sleep_time)
            self.queue = queue.Queue(self.step_type_count)
        self.start()  # 启动队列
        for task in tasks:
            self.queue.put(task, block=True, timeout=None)  # 向队列中添加内容
        # self.queue.join()  # 等待所有任务完成
        L.info("item compute task add finished ")

    # 计算全部定时任务内容代码开始启动 (启动一个线程来进行处理)
    def start_init(self):
        t = threading.Thread(target=self.product_consume)
        t.start()


if __name__ == "__main__":
    item_bo = statistical_item_BO.getInstance()
    collection = Mongo.getInstance(
        table=BO.BASE_statistical_item).getCollection()
    datas = []
    data_1 = item_bo.setProject_name("YXYBB").setStatistical_type(
        "interface").setStatistical_step(1).setStatistical_start_time(
            "2017-05-18 00:00:00.000").json
    data_2 = item_bo.setProject_name("YXYBB").setStatistical_type(
        "interface").setStatistical_step(5).setStatistical_start_time(
            "2017-05-18 00:00:00.000").json
    data_3 = item_bo.setProject_name("YXYBB").setStatistical_type(
        "interface").setStatistical_step(30).setStatistical_start_time(
            "2017-05-18 00:00:00.000").json
    data_4 = item_bo.setProject_name("YXYBB").setStatistical_type(
        "interface").setStatistical_step(60).setStatistical_start_time(
            "2017-05-18 00:00:00.000").json
    data_5 = item_bo.setProject_name("YXYBB").setStatistical_type(
        "interface").setStatistical_step(
    def stop(self):
        self.thread_stop = True


if __name__ == "__main__":
    # q = queue.Queue(3)
    # worker = worker(q)
    # worker.start()
    # q.put(["produce one cup!", 1], block=True, timeout=None)  # 产生任务消息
    # q.put(["produce one desk!", 2], block=True, timeout=None)
    # q.put(["produce one apple!", 3], block=True, timeout=None)
    # q.put(["produce one banana!", 4], block=True, timeout=None)
    # q.put(["produce one bag!", 5], block=True, timeout=None)
    # print("***************leader:wait for finish!")
    # q.join()  # 等待所有任务完成
    # print("***************leader:all task finished!")
    collection = Mongo.getInstance(table=BO.BASE_statistic_res).getCollection()
    # count = collection.find({'statistical_time': {'$gt': '2017-05-16 11:25:00', '$lte': '2017-05-23 10:05:00'}, 'statistical_step': 1, 'statistical_type': 'interface', 'statistical_project': 'YXYBB','statistical_name':None}).count()
    count = collection.find({
        'statistical_step': 1,
        'statistical_project': 'YXYBB',
        'statistical_time': {
            '$lte': '2017-05-23 11:42:00',
            '$gt': '2017-05-16 13:02:00'
        },
        'statistical_type': 'interface',
        'statistical_name': None
    }).count()
    print(count)
#!/usr/bin/env python
# !-*- coding:utf-8 -*-

from bin.until import Mongo

project_ds_info = {}
for ds_info in Mongo.getInstance(table="project_ds").getCollection().find():
    project_ds_info[ds_info["project"]] = ds_info["ds_code"]
class Service_logic(object):
    def logic(self, data):
        _PR = PR.getInstance()
        _data = {
            "title": {
                "text": 'ECharts 入门示例'
            },
            "tooltip": {},
            "legend": {
                "data": ['销量']
            },
            "xAxis": {
                "data": ["衬衫", "羊毛衫", "雪纺衫", "裤子", "高跟鞋", "袜子"]
            },
            "yAxis": {},
            "series": [{
                "name": '销量',
                "type": 'bar',
                "data": [5, 20, 36, 10, 10, 200]
            }]
        }
        _PR.setResult(_data)
        print(_PR.getResult())
        # L.debug(_data)
        # L.info(_data)
        # L.warn(_data)
        # L.error(_data)
        # L.critical(_data)
        return _PR.getPRBytes()

    def xx(self, data):
        _PR = PR.getInstance()

        chart = Echart('GDP', 'This is a fake chart')
        chart.use(Bar('China', [2, 3, 4, 5]))
        chart.use(Legend(['GDP']))
        chart.use(Axis('category', 'bottom', data=['Nov', 'Dec', 'Jan',
                                                   'Feb']))
        _chart = chart.json
        print(_chart)
        _chart["tooltip"] = {}
        _PR.setResult(_chart)

        return _PR.getPRBytes()

    collection = Mongo.getInstance("statistics").collection

    def line_test(self, data):
        _PR = PR.getInstance()

        _title_text = data['title_text']  # 标题
        _step_count = data["step_count"]  # 横坐标点
        _step = data['step']  # 步长(单位:min)
        _type = data['type']  # 绘图类型
        # 保宝网的点击量和保宝网的观看人数统计的折线图 怎么处理??? 项目 类型
        _legend_infos = data['legend_infos']  # 数据项信息
        print(_legend_infos)
        _legend_datas = _legend_infos.keys()  # 数据项名称

        _search_filter_infos = {}
        for _legend_data in _legend_datas:
            _project_name = _legend_infos[_legend_data]['project_name']  # 项目名称
            _statistic_type = _legend_infos[_legend_data][
                'statistic_type']  # 统计类型
            _statistic_name = _legend_infos[_legend_data][
                'statistic_name']  # 统计名称
            _filter_infos = _legend_infos[_legend_data]['filter_infos']  # 过滤条件
            print(_filter_infos)
            ds = logic.project_ds_info[_project_name]  # 查询数据源
            table = _project_name + "_" + _statistic_type  # YXYBB_interface
            self_mongo_instance = Mongo.getInstance(table=table, ds=ds)
            self_collection = self_mongo_instance.getCollection()

            _search_filter_infos[_legend_data] = {
                "project_name": _project_name,
                "self_collection": self_collection,  # 连接额外数据源
                "filter_infos": _filter_infos,  # 过滤机制
                "statistic_type": _statistic_type,  # 统计类型
                "statistic_name": _statistic_name  # 统计名称
            }
        _result = Line.getInsatnce(search_filter_infos=_search_filter_infos,
                                   _step=_step,
                                   _step_count=_step_count,
                                   _title_text=_title_text,
                                   _type=_type).getLineChartData()
        self_mongo_instance.close()
        _PR.setResult(_result)
        return _PR.getPRBytes()
    def getLineChartData(self):
        series = []
        _legend_datas = []
        for key in self._search_filter_infos:
            _legend_data = key
            _legend_datas.append(_legend_data)
            _search_filter_info = self._search_filter_infos[key]
            _project = _search_filter_info['project_name']
            self_collection = _search_filter_info['self_collection']
            _filter_infos = _search_filter_info['filter_infos']
            _statistic_type = _search_filter_info['statistic_type']
            _statistic_name = _search_filter_info['statistic_name']

            self.start_time = Time.getStartTime(
                step=self._step, step_count=self._step_count)  # 获取起始时间
            is_search_db = False
            for _filter_info in _filter_infos:
                key = _filter_info['key']
                relation = _filter_info['relation']
                value = _filter_info['value']
                if key == 'time' and (relation == DBCODE.GT
                                      or relation == DBCODE.GTE):
                    self.start_time = value  # 过滤条件中的起始时间
                elif key == 'time' and (relation == DBCODE.LTE
                                        or relation == DBCODE.LT):
                    self.end_time = value  # 过滤条件中的终止时间
                else:
                    is_search_db = True

            times = Time.getComputeTimes(start_time=self.start_time,
                                         end_time=self.end_time,
                                         step=self._step)
            series_data = []  # y轴上的信息
            if is_search_db is True:  # 多条件查询
                _self_filter = Filter.getInstance()
                _self_filter.filter("project", _project, DBCODE.EQ)
                _self_filter.filter("type", _statistic_type, DBCODE.EQ)
                for _filter_info in _filter_infos:
                    if _filter_info['key'] != 'time':
                        _self_filter.filter(_filter_info['key'],
                                            _filter_info['value'],
                                            _filter_info['relation'])
                for i in range(len(times) - 1):
                    _self_filter.filter("createtime", times[i], DBCODE.GT)
                    _self_filter.filter("createtime", times[i + 1], DBCODE.LTE)
                    _filter = _self_filter.filter_json()
                    count = self_collection.find(_filter).count()
                    series_data.append(count)
            else:
                # 计划分批次查询
                res_collection = Mongo.getInstance(
                    table=BO.BASE_statistic_res).getCollection()
                res_filter = Filter.getInstance()
                res_filter.filter("statistical_time", times[0], DBCODE.GT)
                res_filter.filter("statistical_time", times[-1], DBCODE.LTE)
                res_filter.filter("statistical_step", self._step, DBCODE.EQ)
                res_filter.filter("statistical_type", _statistic_type,
                                  DBCODE.EQ)
                res_filter.filter("statistical_project", _project, DBCODE.EQ)
                if Data.isNone(_statistic_name):
                    _statistic_name = None
                res_filter.filter("statistical_name", _statistic_name,
                                  DBCODE.EQ)
                print(res_filter.filter_json())
                ress = res_collection.find(res_filter.filter_json()).sort(
                    "statistical_time", -1)  # 计算前半部分值
                self._step_count = len(times) - 1
                series_data = Data.getD4tArr(len=self._step_count,
                                             default_value=0)  # 坐标轴上的值
                # 先来尝试组合数据,发现数据无法组合完整时,补充数据
                i = 0
                for res in ress:
                    if i == 0 and ress.count() != (
                            len(times) -
                            1) and res['statistical_time'] != times[-1]:
                        # 重新补录一个值
                        _self_filter = Filter.getInstance()
                        if not Data.isNone(_statistic_name):
                            _self_filter.filter("name", _statistic_name,
                                                DBCODE.EQ)
                        _self_filter.filter("project", _project, DBCODE.EQ)
                        _self_filter.filter("type", _statistic_type, DBCODE.EQ)
                        _self_filter.filter("createtime", times[-2], DBCODE.GT)
                        _self_filter.filter("createtime", times[-1],
                                            DBCODE.LTE)
                        _filter = _self_filter.filter_json()
                        count = self_collection.find(_filter).count()
                        series_data[i] = count
                        series_data[i + 1] = res['statistical_count']
                        i = i + 2
                    else:
                        series_data[i] = res['statistical_count']
                        i = i + 1
                series_data.reverse()
            xAxis_data = times[1:]  # 横坐标轴信息[] 时间信息 去掉首要点

            serie = {
                "name": _legend_data,
                "type": self._type,
                "showSymbol": False,
                "smooth": True,
                # "stack": '总量',
                "data": series_data.copy()  # 坐标轴上的值
            }
            series.append(serie)

        _result = {
            "title": {
                "text": self._title_text
            },
            "legend": {
                "data": _legend_datas.copy()
            },
            "xAxis": {
                "data": xAxis_data.copy()
            },
            "series": series
        }
        return _result