Exemplo n.º 1
0
def anomaliesSummary(request, deviceID, anomalyID=None):
    data = defaultdict(list)

    data_pre = request.data
    data_pre['deviceID'] = deviceID

    serializer = EASSerializer(data=data_pre)
    serializer.is_valid(raise_exception=True)
    kwargs = serializer.data

    from_size = kwargs.pop('from_size')
    size = kwargs.pop('size')
    must_list = kwargs.pop('must_list')
    must_not_list = [{"terms": {"scores": [0]}}]
    esorm = AnomaliesScoresORM()
    datetime_list = get_datetime_list(esorm, deviceID, must_not_list, must_list, **kwargs)
    if anomalyID:
        must_list += [{"term": {"anomalyID.keyword": anomalyID}}]
    else:
        anomalyID = "_all"

    anomalies = get_anomaly(anomalyID)

    must_display = [{"terms": {"anomalyID.keyword": list(anomalies.keys())}}] if anomalyID == "_all" else []

    query = {
        "size": size,
        "_source": ["userId", "username", "timestamp", "scores", "summary", "anomalyID"],
        "from": from_size,
        "sort": [{"timestamp": {"order": "desc"}}, {"scores": {"order": "desc"}}],
        "query": {
            "bool": {
                "must": must_list + must_display,
                "filter": [{"terms": {"timestamp": datetime_list}}, {"term": {"deviceID.keyword": deviceID}}],
                "must_not": must_not_list
            }
        }
    }
    res = esorm.search(False, query=query)
    docs = [i['_source'] for i in res['hits']['hits']]

    if anomalyID == "_all":
        for i in anomalies:
            anomaly = anomalies[i]
            anomaly['summary'] = anomaly['forensics']['summary']
            del anomaly['forensics']
    else:
        anomaly = anomalies
        anomaly['summary'] = anomaly['forensics']['summary']
        del anomaly['forensics']

    for doc in docs:
        anomaly = anomalies[doc["anomalyID"]] if anomalyID == "_all" else anomalies
        data[anomalyID].append(dict_merge(anomaly, doc, True))

    data["hits"] = res["hits"]["total"]
    return Response(data)
Exemplo n.º 2
0
def get_anomalyGroup_forensics(deviceID, ID, **kwargs):
    data = {ID: {}}
    anomalyGroup = get_threat(deviceID)['anomalyGroups'][ID]
    anomalies_list = anomalyGroup.pop('anomalies')
    data[ID].update(anomalyGroup)

    for anomalyID in anomalies_list:
        temp = get_anomaly(anomalyID)
        data[ID].update(
            get_anomaly_forensics(deviceID=deviceID,
                                  ID=anomalyID,
                                  temp=temp,
                                  **kwargs))

    return data
Exemplo n.º 3
0
 def get(self, request, deviceID, anomalyID):
     return Response({anomalyID: get_anomaly(deviceID, anomalyID)})
Exemplo n.º 4
0
def eas_anomaly(request, deviceID, anomalyID='_all'):
    data = {anomalyID: get_anomaly(anomalyID)}
    return Response(data)
Exemplo n.º 5
0
 def post(self, deviceID, paramID=None):
     data = defaultdict(dict)
     data_pre = request.data
     data_pre['deviceID'] = deviceID
     serializer = EASSerializer(data=data_pre)
     serializer.is_valid(raise_exception=True)
     kwargs = serializer.data
     startTimestamp = kwargs['startTimestamp']
     endTimestamp = kwargs['endTimestamp']
     size = kwargs.pop('size')
     from_size = kwargs.pop('from_size')
     must_list = kwargs.pop('must_list')
     must_not_list = [{"term": {"scores": 0}}]
     if paramID:
         must_list += [{"term": {"anomalyID.keyword": paramID}}]
         anomaly = get_anomaly(deviceID, paramID)
         data[paramID]['name'] = anomaly['name']
     else:
         paramID = "_all"
         anomalies = get_anomaly(deviceID, paramID)
         data[paramID]['name'] = {"en": "All anomaly", "zh": "全部异常"}
     esorm = AnomaliesScoresORM()
     datetime_list = get_datetime_list(esorm, deviceID, must_not_list, must_list, **kwargs)
     min_bound_date = simple_datetime(startTimestamp, str, True)[:10]
     max_bound_date = simple_datetime(endTimestamp - 3600 * 24, str, True)[:10]
     demo = {
         "hits": 0,
         "top": {
             "abnormalHits": [],
             "abnormalScores": []
         },
         "histogram": {
             "abnormalHits": []
         },
     }
     data[paramID].update(demo)
     query = {
         "size": size,
         "from": from_size,
         "_source": ["username", "scores"],
         "sort": [
             {"scores": {"order": "desc"}}
         ],
         "query": {
             "bool": {
                 "must": must_list,
                 "filter": [
                     {"terms": {"timestamp": datetime_list}},
                     {"term": {"deviceID.keyword": deviceID}}
                 ],
                 "must_not": must_not_list
             }
         },
         "aggs": {
             "count_anomaly": {
                 "terms": {
                     "field": "anomalyID.keyword",
                     "size": config.MAX_AGGS
                 },
                 "aggs": {
                     "histinfo": {
                         "date_histogram": {
                             "field": "timestamp",
                             "interval": "day",
                             "extended_bounds": {
                                 "min": min_bound_date,
                                 "max": max_bound_date
                             },
                             "min_doc_count": 0,
                             "format": "yyyy-MM-dd"
                         },
                         "aggs": {
                             "clone_count": {
                                 "cumulative_sum": {
                                     "buckets_path": "_count"
                                 }
                             }
                         }
                     }
                 }
             },
             "top_hits": {
                 "terms": {
                     "field": "username.keyword",
                     "size": config.TOP_SIZA,
                     "show_term_doc_count_error": True
                 }
             },
             "top_scores": {
                 "terms": {
                     "field": "username.keyword",
                     "size": config.TOP_SIZA,
                     "show_term_doc_count_error": True,
                     "order": [
                         {"max_scores": "desc"}
                     ]
                 },
                 "aggs": {
                     "max_scores": {
                         "max": {
                             "field": "scores"
                         }
                     }
                 }
             }
         }
     }
     res = esorm.aggregate(query=query)
     if res["hits"]["total"] != 0:
         data[paramID]["hits"] = res["hits"]["total"]
         acab = res["aggregations"]["count_anomaly"]["buckets"]
         athb = res["aggregations"]["top_hits"]["buckets"]
         atsb = res["aggregations"]["top_scores"]["buckets"]
         data[paramID]["histogram"]["abnormalHits"] = sum([[j["doc_count"] for j in i["histinfo"]["buckets"]] for i in acab], axis=0)  # .tolist()
         data[paramID]["top"]["abnormalHits"] = [{"username": i["key"], "hits": i["doc_count"]} for i in athb[0:size]]
         data[paramID]["top"]["abnormalScores"] = [{"username": i["key"], "scores": i["max_scores"]["value"]} for i in atsb[0:size]]
         if paramID == "_all":
             data[paramID]["anomalies_hits"] = [{"anomalyID": i["key"], "hits": i["doc_count"], "name": anomalies[i["key"]]["name"]} for i in acab]
     return Response(data)
Exemplo n.º 6
0
def get_anomaly_forensics(deviceID=None,
                          ID=None,
                          temp=None,
                          xrs=None,
                          user=None,
                          pageSize=None,
                          timestamp=None):
    try:
        data = {}
        es_orm = AnomaliesScoresORM()
        start = simple_datetime(timestamp, str, True)
        doc_id = get_doc_id(start, deviceID, ID, user)
        res = es_orm.get_obj_or_404(doc_id=doc_id)

        if res.get("scores", 1) != 0:
            if temp is None:
                if xrs == 'eas':
                    temp = get_anomaly(ID)
                elif xrs == 'ers':
                    temp = get_ers_models(deviceID)['params'][ID]
                else:
                    raise Exception

            data[ID] = temp
            data[ID]['scores'] = res.get('scores', -1)
            from_ = (pageSize - 1) * 5

            if res['details'].get('logs'):
                size = pageSize * 5
                log_size = len(res["details"]["logs"])
                ids = res['details']['logs'][from_:size]
                res['details']['logs'] = get_logs_with_ids(ids)
                res['details']['size'] = log_size
            else:
                index = res['details'].pop('index', None)
                index_list = res['details'].pop('index_list', None)
                query = res['details'].pop('query', {})
                index = index_list or index

            if index and query != {}:
                size = 5
                res['details']['logs'], res['details'][
                    'size'] = get_logs_with_query(index, query, from_, size)

                if 'display' in temp and xrs == 'eas' and 'agg_query' in temp[
                        'forensics']['graphs']['template'][
                            0]:  # if anomalyid has no graphs,exception,短路
                    aggs_querys = {}
                    for graph in temp['forensics']['graphs']['template']:
                        aggs_querys.update(graph['agg_query'])

                    _query = json.loads(query)
                    _query['aggs'] = aggs_querys

                    graphs_values = ElasticsearchORM(index).search(
                        query=_query)['aggregations']
                    remove_buckets(graphs_values)
                    res['graphs'] = graphs_values

                if ID in [
                        "23787c99-4b94-4514-a38e-f753b8f47e57",
                        "c91dd8fa-af7f-11e9-a5a5-144f8a006a90"
                ]:
                    for i in res['details']['logs']:
                        if "geoip" in i:
                            if i['geoip']['country_name'] in [
                                    "Taiwan", "Hong Kong", "Macao"
                            ]:
                                i['geoip']['country_name'] = "China " + i[
                                    'geoip']['country_name']

            dct = data[ID]['forensics']
            data[ID]['forensics'] = dict_merge(dct, res)

        # added by wendong, compatible with version 3.3
        if config.UCSS_VERSION == 3.3:
            for k, v in data.items():
                graphs = v["forensics"]["graphs"]
            _graphs = copy.deepcopy(graphs)
            for i in _graphs["template"]:
                if i["type"] == 1:
                    graphs["template"].remove(i)
                    continue
                elif i["type"] == 2:
                    graphs["histCnt"] = get_histCnt(graphs["histCnt"],
                                                    timestamp)
                elif i["type"] == 3:
                    graphs["timeseries"] = [
                        item["key_as_string"] for item in graphs["timeseries"]
                    ]

        return data

    except APIDataNotFound:
        logger.debug(
            "{}ScoresORM 404_id:{} start:{} deviceID ID:{} userId".format(
                xrs.upper(), doc_id, timestamp, ID))
        return {}

    except:
        logger.exception("{} {} {} {} {}\n".format(timestamp, deviceID, ID,
                                                   user, pageSize))
        return {}