Beispiel #1
0
def dashboard(request):
    count_ips, count_domains = total_data()
    count_bugs = total_bug()
    total = {
        "ips": count_ips,
        "domains": count_domains,
        "bugs": count_bugs
    }
    # 资产相关
    data = properly.objects.order_by("-id").all()

    # 图表统计
    payload = {"size": 0,
               "aggs": {
                   "sales": {
                       "date_histogram": {
                           "field": "published_from",
                           "interval": STATIC_TASKS,
                           "format": "yyyy-MM-dd"
                       }
                   }
               }
               }
    s = Search(using=es, index='w12scan').from_dict(payload)
    res = s.execute().to_dict()
    try:
        charts = res["aggregations"]["sales"]["buckets"]
    except KeyError:
        charts = []
    data_chart = {
        "labels": [],
        "data": []
    }
    for item in charts:
        data_chart["labels"].append(item["key_as_string"])
        data_chart["data"].append(item["doc_count"])

    # Bar chart
    names = count_name()
    data_bar = {
        "labels": [],
        "data": []
    }
    for item in names:
        data_bar["labels"].append(item["key"])
        data_bar["data"].append(item["doc_count"])
    return render(request, "frontend/dashboard.html",
                  {"total": total, "zc_data": data, "data_chart": data_chart, "data_bar": data_bar})
Beispiel #2
0
def index(request):
    page = request.GET.get("p", "1")
    q = request.GET.get("q", None)
    try:
        page = int(page)
    except:
        page = 1
    if page <= 0:
        page = 1

    es = Elasticsearch(ELASTICSEARCH_HOSTS)
    start_time = datetime.now()
    keywords = None
    if q is None:
        _search = {
            "from": (page - 1) * 20,
            "size": 20,
            "sort": {"published_from": {"order": "desc"}}
        }
    else:
        _search, keywords = k2e_search(q, page)
    s = Search(using=es, index='w12scan').from_dict(_search)
    count = s.execute().hits.total

    # 分页逻辑
    max_page = math.ceil(count / 20)
    if page <= 5:
        paginations = range(1, 10)
    elif page + 5 > max_page:
        paginations = range(max_page - 5, max_page + 5)
    else:
        paginations = range(page - 5, page + 5)
    temp_pagin = []
    for i in paginations:
        if i <= max_page:
            temp_pagin.append(i)
    paginations = temp_pagin

    pagination = {
        "max_page": str(max_page),
        "current": page,
        "pre": str(page - 1) if page - 1 > 0 else "1",
        "next": str(page + 1) if page + 1 <= max_page else str(max_page),
        "paginations": paginations,
        "keyword": ""
    }
    if q is not None:
        pagination["keyword"] = "&q=" + q
    # 分页完

    datas = []
    for hit in s:
        doc_type = hit.meta.doc_type
        id = hit.meta.id
        d = {}
        if doc_type == "ips":
            d.update(hit.to_dict())
            if d.get("infos"):
                d["info_tags"] = []
                for info in d["infos"]:
                    d["info_tags"].append("{}/{}".format(info["port"], info.get("name", "unknown")))
                d["infos"] = json.dumps(d["infos"], indent=2)
            # 资产关联
            d["proper"] = is_proper(d["target"], "ip")
        elif doc_type == "domains":
            d.update(hit.to_dict())
            d["target"] = d.get("title") or d.get("url")
            if d.get("ip"):
                ip = d.get("ip")
                ip_info = es_search_ip(ip, True)
                if ip_info:
                    d["location"] = ip_info.location
            d["proper"] = is_proper(d["url"], "domain")
        d["doc_type"] = doc_type
        d["id"] = id
        d["published_from"] = datetime_string_format(d["published_from"])
        datas.append(d)

    # 左侧统计代码逻辑
    statistics = {}
    # 1.组件统计
    apps = count_app()
    countrys = count_country()
    names = count_name()
    ports = count_port()
    statistics["apps"] = apps
    statistics["countrys"] = countrys
    statistics["names"] = names
    statistics["ports"] = ports

    # 总耗时间
    end_time = (datetime.now() - start_time).total_seconds()

    return render(request, "frontend/recent.html",
                  {"datas": datas, "count": count, "second": end_time, "pagination": pagination,
                   "statistics": statistics, "keyword": keywords})
Beispiel #3
0
def dashboard(request):
    count_ips, count_domains = total_data()
    count_bugs = total_bug()
    total = {
        "ips": count_ips,
        "domains": count_domains,
        "bugs": count_bugs
    }
    # 资产相关
    data = properly.objects.order_by("-id").all()

    # 图表统计
    payload = {"size": 0,
               "aggs": {
                   "sales": {
                       "date_histogram": {
                           "field": "published_from",
                           "interval": STATIC_TASKS,
                           "format": "yyyy-MM-dd"
                       }
                   }
               }
               }
    s = Search(using=es, index='w12scan').from_dict(payload)
    res = s.execute().to_dict()
    try:
        charts = res["aggregations"]["sales"]["buckets"]
    except KeyError:
        charts = []
    data_chart = {
        "labels": [],
        "data": []
    }
    for item in charts:
        count = item["doc_count"]
        if count == 0:
            continue
        data_chart["labels"].append(item["key_as_string"])
        data_chart["data"].append(item["doc_count"])

    # Bar chart
    names = count_name(6)
    data_bar = {
        "labels": [],
        "data": []
    }
    for item in names:
        data_bar["labels"].append(item["key"])
        data_bar["data"].append(item["doc_count"])

    # node monitor
    nodenames = redis_con.keys("w12_node_*")
    nodes = []
    for nodename in nodenames:
        dd = redis_con.hgetall(nodename)
        tem_dict = {}
        tem_dict["nodename"] = lstrsub(nodename, "w12_node_")
        tem_dict["last_time"] = dd.get("last_time", 0)
        tem_dict["tasks"] = dd.get("tasks", "error")
        tem_dict["running"] = dd.get("running", "error")
        tem_dict["finished"] = dd.get("finished", "error")
        tem_dict["status"] = "Running"
        if time.time() - float(tem_dict["last_time"]) > 60 * 5:
            tem_dict["status"] = "Pending"
        tem_dict["time"] = smartDate(float(tem_dict["last_time"]))
        nodes.append(tem_dict)

    # bug[domain]漏洞图表展示
    dd = es.indices.get_mapping(index='w12scan', doc_type='domains')
    dd = dd["w12scan"]["mappings"]["domains"]["properties"]
    data_bugs = []
    if "bugs" in dd:
        bug_type = dd["bugs"]["properties"].keys()
        index = 0
        for bug_name in bug_type:
            index += 1
            count = get_bug_count('domains', bug_name)
            dd = {}
            _cls = ["primary", "info", "danger", "success", "warning"]
            dd["label"] = bug_name
            dd["count"] = count
            dd["cls"] = _cls[index % 5]
            data_bugs.append(dd)

    return render(request, "frontend/dashboard.html",
                  {"total": total, "zc_data": data, "data_chart": data_chart, "data_bar": data_bar, "nodes": nodes,
                   "data_bugs": data_bugs})
Beispiel #4
0
def dashboard(request):
    count_ips, count_domains = total_data()
    count_bugs = total_bug()
    total = {
        "ips": count_ips,
        "domains": count_domains,
        "bugs": count_bugs
    }
    # 资产相关
    data = properly.objects.order_by("-id").all()

    # 图表统计
    payload = {"size": 0,
               "aggs": {
                   "sales": {
                       "date_histogram": {
                           "field": "published_from",
                           "interval": STATIC_TASKS,
                           "format": "yyyy-MM-dd"
                       }
                   }
               }
               }
    s = Search(using=es, index='w12scan').from_dict(payload)
    res = s.execute().to_dict()
    try:
        charts = res["aggregations"]["sales"]["buckets"]
    except KeyError:
        charts = []
    data_chart = {
        "labels": [],
        "data": []
    }
    for item in charts:
        data_chart["labels"].append(item["key_as_string"])
        data_chart["data"].append(item["doc_count"])

    # Bar chart
    names = count_name()
    data_bar = {
        "labels": [],
        "data": []
    }
    for item in names:
        data_bar["labels"].append(item["key"])
        data_bar["data"].append(item["doc_count"])

    # node monitor
    nodenames = redis_con.keys("w12_node_*")
    nodes = []
    for nodename in nodenames:
        dd = redis_con.hgetall(nodename)
        tem_dict = {}
        tem_dict["nodename"] = lstrsub(nodename, "w12_node_")
        tem_dict["last_time"] = dd.get("last_time", 0)
        tem_dict["running"] = dd.get("running", "error")
        tem_dict["finished"] = dd.get("finished", "error")
        tem_dict["status"] = "Running"
        if time.time() - float(tem_dict["last_time"]) > 60 * 5:
            tem_dict["status"] = "Pending"
        tem_dict["time"] = smartDate(float(tem_dict["last_time"]))
        nodes.append(tem_dict)

    return render(request, "frontend/dashboard.html",
                  {"total": total, "zc_data": data, "data_chart": data_chart, "data_bar": data_bar, "nodes": nodes})