示例#1
0
    def typical_stats(self, gen_params, group_field, start_time, end_time):
        must_params = self.generate_must(gen_params)
        match = {}
        field = {}
        q = {}
        q["format"] = "epoch_millis"
        q["gte"] = int(start_time) * 1000
        q["lte"] = int(end_time) * 1000
        field["@timestamp"] = q
        match["range"] = field
        must_params.append(match)

        t_start_time = datetime.datetime.utcfromtimestamp(int(start_time))
        t_end_time = datetime.datetime.utcfromtimestamp(int(end_time))
        index_names = self.get_index_names(self.flog_index_prefix,
                                           t_start_time, t_end_time)
        if index_names is None:
            return {"code": 0, "msg": "no data, no index"}

        interval, interval_cn, interval_en = \
            self.get_interval(int(start_time), int(end_time))
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        data = es_template.search_typical_logs(must_params, group_field,
                                               start_time, end_time, interval)

        data_stats = []
        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return {"code": -1, "msg": "internal error, bad request"}
        res = json.loads(text)

        aggr = res.get("aggregations", None)
        if aggr is None:
            return {"code": 0, "msg": "no data, no aggregations"}
        data_group = aggr.get("data_group", None)
        if data_group is None:
            return {"code": 0, "msg": "no data, no data group"}
        buckets = data_group.get("buckets", None)
        if buckets is None:
            return {"code": 0, "msg": "no data, no buckets"}
        for bucket in buckets:
            d = {}
            d["key"] = bucket.get("key", "")
            d["total"] = bucket.get("doc_count", 0)
            data_count = bucket.get("data_count", None)
            if data_count is None:
                continue
            sub_buckets = data_count.get("buckets", None)
            if sub_buckets is None:
                continue
            d["count"] = sub_buckets
            data_stats.append(d)

        ds = {}
        ds["stats"] = data_stats
        ds["interval_cn"] = interval_cn
        ds["interval_en"] = interval_en
        return {"code": 1, "msg": "OK", "data": ds}
示例#2
0
    def get_all_index(self, index_prefix):
        url = self.elasticsearch_url + '/_cat/indices/' + index_prefix + '-*'
        index_names = []
        status, indexes = utils.request_es(url, 'GET')
        if status != 200:
            LOG.error("failed to get all es indexes")
            return ""
        indexes_array = indexes.split('\n')
        for index in indexes_array:
            index_name = index.split(' ')[2]
            index_names.append(index_name)

        return index_names
示例#3
0
    def stat_instance_created_other(self, index_names, params):
        data = es_template.search_all_logs(params)
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return [], "internal error, bad request"
        json_text = json.loads(text)
        hits1 = json_text.get("hits", None)
        if hits1 is None:
            return [], "no data, no hit"
        hits = hits1.get("hits", None)

        hostinfos = {}
        for hit in hits:
            info = {}
            _source = hit.get("_source", None)
            if _source is not None:
                hostname = _source.get("Hostname", "")
                if hostinfos.get(hostname, None) is None:
                    hostinfos[hostname] = []
                info["level"] = _source.get("log_level", "")
                info["time"] = _source.get("@timestamp", "")
                hostinfos[hostname].append(info)

        res = []
        for (k, v) in hostinfos.items():
            r = {}
            r["hostname"] = k
            error_num = 0
            start_time = ""
            end_time = ""
            for i in v:
                level = i.get("level")
                if level == "ERROR" or level == "error":
                    error_num += 1

            if len(v) > 0:
                start_time = v[0].get("time", "")
                end_time = v[len(v) - 1].get("time", "")

            r["log_num"] = len(v)
            r["error_log_num"] = error_num
            r["start_time"] = start_time
            r["end_time"] = end_time
            res.append(r)

        def sort_time(e):
            return e.get('start_time')
        res.sort(key=sort_time)

        return res, None
示例#4
0
    def delete_es_history_index(self):
        len_d = self.custom_sql.get_config("es_index_length")
        if len_d is None:
            LOG.error(_LE("es_index_length no exist"))
            return

        today = time.strftime('%Y-%m-%d')
        url = self.elasticsearch_url + '/_cat/indices/*log-*'
        status, indexes = utils.request_es(url, "GET")
        if status != 200:
            LOG.error(_LE("failed to get es indexes"))
            return
        indexes_array = indexes.split('\n')
        for index in indexes_array:
            index_name = index.split(' ')[2]
            index_day = index_name.split('-')[1]
            diff_day = datetime.datetime.strptime(today, "%Y-%m-%d") - \
                datetime.datetime.strptime(index_day, '%Y.%m.%d')
            if diff_day.days >= int(len_d):
                self.delete_index(index_name)
示例#5
0
 def delete_index(self, name):
     url = self.elasticsearch_url + '/' + name
     status, text = utils.request_es(url, "DELETE")
     if status != 200:
         LOG.error(_LE("failed to delete es index"))
         return
示例#6
0
    def stat_instance_created_compute(self, request_id, uuid, index_names,
                                      start_time, end_time):
        gen_params = {}
        gen_not_params = {}
        gen_params["request_id.keyword"] = request_id
        gen_params["programname.keyword"] = "nova-compute"
        must_params = self.generate_must(gen_params)
        must_not_params = self.generate_must_not(gen_not_params)

        match = {}
        field = {}
        field["all_fields"] = True
        field["analyze_wildcard"] = True
        field["query"] = '"' + uuid + '"'
        match["query_string"] = field
        must_params.append(match)
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        data = es_template.search_logs(must_params, must_not_params,
                                       start_time, end_time, "24h",
                                       0, 10000)
        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return None, "internal error, bad request"
        res = json.loads(text)
        hits1 = res.get("hits", None)
        if hits1 is None:
            return [], "no data, no hit"
        hits = hits1.get("hits", None)
        if hits is None:
            return [], "no data, no hit"
        hostinfos = {}
        for hit in hits:
            info = {}
            _source = hit.get("_source", None)
            if _source is not None:
                hostname = _source.get("Hostname", "")
                if hostinfos.get(hostname, None) is None:
                    hostinfos[hostname] = []
                info["payload"] = _source.get("Payload", "")
                info["time"] = _source.get("@timestamp", "")
                hostinfos[hostname].append(info)

        res = []
        for (k, v) in hostinfos.items():
            r = {}
            r["hostname"] = k
            start_time = ""
            end_time = ""
            is_success = 0
            for i in v:
                payload = i.get("payload", None)
                if "Took" in payload and "seconds to build" in payload:
                    end_time = i.get("time", "")
                    is_success = 1
                if ("Enter inspur build_and_run_instance" in payload and
                        start_time == ""):
                    start_time = i.get("time", "")

            if is_success == 0 and len(v) > 0:
                end_time = v[0].get("time", "")
                start_time = v[len(v) - 1].get("time", "")

            r["is_success"] = is_success
            r["start_time"] = start_time
            r["end_time"] = end_time
            res.append(r)

        def sort_time(e):
            return e.get('start_time')
        res.sort(key=sort_time)

        return res, None
示例#7
0
    def analyse_logs(self, group_name, host_name, module_name,
                     program_name, level, start_time, end_time):
        gen_params = {}
        gen_not_params = {}
        title_cn_params = []
        title_en_params = []

        if group_name == "host_name":
            g_name = "Hostname.keyword"
            title_cn = "Host Log Analysis Histogram TOP5".decode('utf-8')
            title_en = "Host Log Analysis Histogram TOP5"
        elif group_name == "program_name":
            g_name = "programname.keyword"
            title_cn = "Program Log Analysis Histogram TOP5".decode('utf-8')
            title_en = "Program Log Analysis Histogram TOP5"
        else:
            return {"code": -1, "msg": "invalid param"}

        if host_name:
            gen_params["Hostname.keyword"] = host_name
            title_cn_params.append("host=".decode('utf-8') + host_name)
            title_en_params.append("host=" + host_name)

        if module_name:
            gen_params["Logger.keyword"] = module_name
            title_cn_params.append("module=".decode('utf-8') + module_name)
            title_en_params.append("module=" + module_name)

        if program_name:
            gen_params["programname.keyword"] = program_name
            title_cn_params.append("program=".decode('utf-8') + program_name)
            title_en_params.append("program=" + program_name)

        if level:
            if level == "NO EXIST":
                gen_not_params["log_level.keyword"] = "log_level"
            else:
                gen_params["log_level.keyword"] = level
            title_cn_params.append("level=".decode('utf-8') + level)
            title_en_params.append("level=" + level)

        if len(title_cn_params) > 0:
            title_cn = title_cn + " (" + " ".join(title_cn_params) + ")"
        if len(title_en_params) > 0:
            title_en = title_cn + " (" + " ".join(title_en_params) + ")"

        must_params = self.generate_must(gen_params)
        must_not_params = self.generate_must_not(gen_not_params)

        match = {}
        field = {}
        q = {}
        q["format"] = "epoch_millis"
        q["gte"] = int(start_time) * 1000
        q["lte"] = int(end_time) * 1000
        field["@timestamp"] = q
        match["range"] = field
        must_params.append(match)

        t_start_time = datetime.datetime.utcfromtimestamp(int(start_time))
        t_end_time = datetime.datetime.utcfromtimestamp(int(end_time))
        index_names = self.get_index_names(self.flog_index_prefix,
                                           t_start_time, t_end_time)
        if index_names is None:
            return {"code": 0, "msg": "no data, no index"}
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        data = es_template.search_analyse_logs(must_params,
                                               must_not_params,
                                               g_name)

        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return {"code": -1, "msg": "internal error, bad request"}
        res = json.loads(text)
        aggr = res.get("aggregations", None)
        if aggr is None:
            return {"code": 0, "msg": "no data, no aggregations"}
        search_values = aggr.get("data_count", None)
        if search_values is None:
            return {"code": 0, "msg": "no data, no count data"}
        buckets = search_values.get("buckets", None)
        if buckets is None:
            return {"code": 0, "msg": "no data, no buckets"}
        data_count = buckets

        d = {}
        d["count"] = data_count
        d["title_cn"] = title_cn
        d["title_en"] = title_en

        return {"code": 1, "msg": "OK", "data": d}
示例#8
0
    def logs(self, host_name, module_name, program_name,
             level, user_id, project_id, query, index_type,
             start_time, end_time, page_num, page_size):
        if (start_time is None or end_time is None or
                page_num is None or page_size is None):
            return {"code": -1, "msg": "invalid param"}

        if index_type is None:
            index_type = self.flog_index_prefix
        if (index_type != self.flog_index_prefix and
                index_type != self.slog_index_prefix):
            return {"code": -1, "msg": "invalid param"}

        size = int(page_size)
        from_i = (int(page_num) - 1) * size
        gen_params = {}
        gen_not_params = {}
        if host_name:
            gen_params["Hostname.keyword"] = host_name

        if module_name:
            gen_params["Logger.keyword"] = module_name

        if program_name:
            gen_params["programname.keyword"] = program_name

        if level:
            if level == "NO EXIST":
                gen_not_params["log_level.keyword"] = "log_level"
            else:
                gen_params["log_level.keyword"] = level

        if user_id:
            gen_params["user_id.keyword"] = user_id

        if project_id:
            gen_params["tenant_id.keyword"] = project_id

        must_params = self.generate_must(gen_params)
        must_not_params = self.generate_must_not(gen_not_params)

        if query is not None and query != "":
            match = {}
            field = {}
            field["all_fields"] = True
            field["analyze_wildcard"] = True
            query = query.replace('"', '\\"')
            field["query"] = '"' + query + '"'
            match["query_string"] = field
            must_params.append(match)

        match = {}
        field = {}
        q = {}
        q["format"] = "epoch_millis"
        q["gte"] = int(start_time) * 1000
        q["lte"] = int(end_time) * 1000
        field["@timestamp"] = q
        match["range"] = field
        must_params.append(match)

        t_start_time = datetime.datetime.utcfromtimestamp(int(start_time))
        t_end_time = datetime.datetime.utcfromtimestamp(int(end_time))

        index_prefix = index_type
        index_names = self.get_index_names(index_prefix,
                                           t_start_time, t_end_time)
        if index_names is None:
            return {"code": 0, "msg": "no data, no index"}
        interval, interval_cn, interval_en = \
            self.get_interval(int(start_time), int(end_time))
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        data = es_template.search_logs(must_params, must_not_params,
                                       start_time, end_time, interval,
                                       from_i, size)

        data_count = []
        res_values = []
        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return {"code": -1, "msg": "internal error, bad request"}
        res = json.loads(text)

        aggr = res.get("aggregations", None)
        if aggr is None:
            return {"code": 0, "msg": "no data, no aggregations"}
        search_values = aggr.get("data_count", None)
        if search_values is None:
            return {"code": 0, "msg": "no data, no count data"}
        buckets = search_values.get("buckets", None)
        if buckets is None:
            return {"code": 0, "msg": "no data, no buckets"}
        for bucket in buckets:
            data_count.append(bucket)
        hits1 = res.get("hits", None)
        if hits1 is None:
            return {"code": 0, "msg": "no data, no hit"}
        hits = hits1.get("hits", None)
        total = hits1.get("total", 0)
        if hits is None:
            return {"code": 0, "msg": "no data, no hit"}
        for hit in hits:
            d = {}
            _source = hit.get("_source", None)
            if _source is not None:
                d["host_name"] = _source.get("Hostname", "")
                d["time"] = _source.get("@timestamp", "")
                d["level"] = _source.get("log_level", "")
                d["desc"] = _source.get("Payload", "")
                if d["desc"] == "":
                    d["desc"] = _source.get("message", "")
                d["program_name"] = _source.get("programname", "")
                d["user_id"] = _source.get("user_id", "")
                d["project_id"] = _source.get("tenant_id", "")
                d["module_name"] = _source.get("Logger", "")
            res_values.append(d)

        ds = {}
        ds["count"] = data_count
        ds["interval_cn"] = interval_cn
        ds["interval_en"] = interval_en
        d = {}
        d["total"] = total
        d["values"] = res_values
        return {"code": 1, "msg": "OK", "data_stats": ds, "data": d}
示例#9
0
    def params(self, type, module_name, index_type):
        field = ""
        if type == "host_name":
            field = "Hostname.keyword"
        elif type == "level":
            field = "log_level.keyword"
        elif type == "program_name":
            field = "programname.keyword"
        elif type == "module_name":
            field = "Logger.keyword"
        else:
            return {"code": -1, "msg": "invalid param"}

        gen_params = {}
        if module_name:
            gen_params["Logger.keyword"] = module_name
        must_params = self.generate_must(gen_params)

        if index_type is None:
            index_type = self.flog_index_prefix
        if (index_type != self.flog_index_prefix and
                index_type != self.slog_index_prefix):
            return {"code": -1, "msg": "invalid param"}

        end_time = timeutils.utcnow()
        start_time = end_time - datetime.timedelta(days=7)
        index_prefix = index_type
        index_names = self.get_index_names(index_prefix,
                                           start_time, end_time)
        if index_names is None:
            return {"code": 0, "msg": "no data, no index"}
        url = self.elasticsearch_url + '/' + index_names + '/_search'
        data = es_template.search_params(field, must_params)

        values = []
        status, text = utils.request_es(url, "POST", data)
        if status != 200:
            return {"code": -1, "msg": "internal error, bad request"}
        res = json.loads(text)

        aggr = res.get("aggregations", None)
        if aggr is None:
            return {"code": 0, "msg": "no data, no aggregations"}
        search_values = aggr.get("search_values", None)
        if search_values is None:
            return {"code": 0, "msg": "no data, no values"}
        buckets = search_values.get("buckets", None)
        if buckets is None:
            return {"code": 0, "msg": "no data, no buckets"}
        for bucket in buckets:
            if type == "level":
                v = bucket["key"]
                vu = v.upper()
                if vu not in values:
                    values.append(vu)
            else:
                values.append(bucket["key"])

        values.sort()
        if type == "level":
            values.append("NO EXIST")

        return {"code": 1, "msg": "OK", "values": values}