Exemplo n.º 1
0
def _load_report(task_id: int, return_one: bool = False):

    if repconf.mongodb.enabled:
        if return_one:
            analysis = mongo_find_one("analysis", {"info.id": int(task_id)},
                                      sort=[("_id", -1)])
            for process in analysis.get("behavior", {}).get("processes", []):
                calls = []
                for call in process["calls"]:
                    calls.append(ObjectId(call))
                process["calls"] = []
                for call in mongo_find("calls", {"_id": {
                        "$in": calls
                }},
                                       sort=[("_id", 1)]) or []:
                    process["calls"] += call["calls"]
            return analysis

        else:
            return mongo_find("analysis", {"info.id": int(task_id)})

    if repconf.elasticsearchdb.enabled and not repconf.elasticsearchdb.searchonly:
        try:
            analyses = (es.search(index=get_analysis_index(),
                                  query=get_query_by_info_id(task_id),
                                  sort={
                                      "info.id": {
                                          "order": "desc"
                                      }
                                  }).get("hits", {}).get("hits", []))
            if analyses:
                if return_one:
                    return analyses[0]
                else:
                    return analyses
        except ESRequestError as e:
            print(e)

    return False
Exemplo n.º 2
0
def cuckoo_clean_failed_url_tasks():
    """Clean up failed tasks
    It deletes all stored data from file system and configured databases (SQL
    and MongoDB for failed tasks.
    """
    # Init logging.
    # This need to init a console logger handler, because the standard
    # logger (init_logging()) logs to a file which will be deleted.
    create_structure()
    init_console_logging()
    if not is_reporting_db_connected():
        return

    if repconf.mongodb.enabled:
        rtmp = mongo_find("analysis", {
            "info.category": "url",
            "network.http.0": {
                "$exists": False
            }
        }, {
            "info.id": 1
        },
                          sort=[("_id", -1)]).limit(100)
    elif repconf.elasticsearchdb.enabled:
        rtmp = [
            d["_source"] for d in all_docs(
                index=get_analysis_index(),
                query={
                    "query": {
                        "bool": {
                            "must": [{
                                "exists": {
                                    "field": "network.http"
                                }
                            }, {
                                "match": {
                                    "info.category": "url"
                                }
                            }]
                        }
                    }
                },
                _source=["info.id"],
            )
        ]
    else:
        rtmp = []

    if rtmp and len(rtmp) > 0:
        resolver_pool.map(lambda tid: delete_data(tid), rtmp)
Exemplo n.º 3
0
def cuckoo_clean_lower_score(args):
    """Clean up tasks with score <= X
    It deletes all stored data from file system and configured databases (SQL
    and MongoDB for tasks.
    """
    # Init logging.
    # This need to init a console logger handler, because the standard
    # logger (init_logging()) logs to a file which will be deleted.
    if not args.malscore:
        log.info("No malscore argument provided bailing")
        return

    create_structure()
    init_console_logging()
    id_arr = []
    if not is_reporting_db_connected():
        return

    if repconf.mongodb.enabled:
        result = list(
            mongo_find("analysis", {"malscore": {
                "$lte": args.malscore
            }}))
        id_arr = [entry["info"]["id"] for entry in result]
    elif repconf.elasticsearchdb.enabled:
        id_arr = [
            d["_source"]["info"]["id"]
            for d in all_docs(index=get_analysis_index(),
                              query={
                                  "query": {
                                      "range": {
                                          "malscore": {
                                              "lte": args.malscore
                                          }
                                      }
                                  }
                              },
                              _source=["info.id"])
        ]
    log.info(("number of matching records %s" % len(id_arr)))
    resolver_pool.map(lambda tid: delete_data(tid), id_arr)
Exemplo n.º 4
0
def left(request, left_id):
    if enabledconf["mongodb"]:
        left = mongo_find_one("analysis", {"info.id": int(left_id)}, {
            "target": 1,
            "info": 1
        })
    if es_as_db:
        hits = es.search(index=get_analysis_index(),
                         query=get_query_by_info_id(left_id))["hits"]["hits"]
        if hits:
            left = hits[-1]["_source"]
        else:
            left = None
    if not left:
        return render(request, "error.html",
                      {"error": "No analysis found with specified ID"})

    # Select all analyses with same file hash.
    if enabledconf["mongodb"]:
        records = mongo_find(
            "analysis",
            {
                "$and": [{
                    "target.file.md5": left["target"]["file"]["md5"]
                }, {
                    "info.id": {
                        "$ne": int(left_id)
                    }
                }]
            },
            {
                "target": 1,
                "info": 1
            },
        )
    if es_as_db:
        records = []
        q = {
            "query": {
                "bool": {
                    "must": [{
                        "match": {
                            "target.file.md5": left["target"]["file"]["md5"]
                        }
                    }],
                    "must_not": [{
                        "match": {
                            "info.id": left_id
                        }
                    }],
                }
            }
        }
        results = es.search(index=get_analysis_index(), body=q)["hits"]["hits"]
        for item in results:
            records.append(item["_source"])

    return render(request, "compare/left.html", {
        "left": left,
        "records": records
    })
Exemplo n.º 5
0
def cuckoo_clean_sorted_pcap_dump():
    """Clean up failed tasks
    It deletes all stored data from file system and configured databases (SQL
    and MongoDB for failed tasks.
    """
    # Init logging.
    # This need to init a console logger handler, because the standard
    # logger (init_logging()) logs to a file which will be deleted.
    create_structure()
    init_console_logging()

    if not is_reporting_db_connected():
        return

    if repconf.elasticsearchdb.enabled:
        es = connect_to_es()

    done = False

    while not done:
        if repconf.mongodb.enabled:
            rtmp = mongo_find("analysis", {
                "network.sorted_pcap_id": {
                    "$exists": True
                }
            }, {
                "info.id": 1
            },
                              sort=[("_id", -1)]).limit(100)
        elif repconf.elasticsearchdb.enabled:
            rtmp = [
                d["_source"] for d in all_docs(
                    index=get_analysis_index(),
                    query={
                        "query": {
                            "exists": {
                                "field": "network.sorted_pcap_id"
                            }
                        }
                    },
                    _source=["info.id"],
                )
            ]
        else:
            rtmp = 0

        if rtmp and len(rtmp) > 0:
            for e in rtmp:
                if e["info"]["id"]:
                    log.info((e["info"]["id"]))
                    try:
                        if repconf.mongodb.enabled:
                            mongo_update_one(
                                "analysis", {"info.id": int(e["info"]["id"])},
                                {"$unset": {
                                    "network.sorted_pcap_id": ""
                                }})
                        elif repconf.elasticsearchdb.enabled:
                            es.update(index=e["index"],
                                      id=e["info"]["id"],
                                      body={"network.sorted_pcap_id": ""})
                    except Exception:
                        log.info(
                            ("failed to remove sorted pcap from db for id %s" %
                             (e["info"]["id"])))
                    try:
                        path = os.path.join(CUCKOO_ROOT, "storage", "analyses",
                                            "%s" % (e["info"]["id"]),
                                            "dump_sorted.pcap")
                        os.remove(path)
                    except Exception as e:
                        log.info(("failed to remove sorted_pcap from disk %s" %
                                  (e)))
                else:
                    done = True
        else:
            done = True
Exemplo n.º 6
0
def cuckoo_clean_before_day(args):
    """Clean up failed tasks
    It deletes all stored data from file system and configured databases (SQL
    and MongoDB for tasks completed before now - days.
    """
    # Init logging.
    # This need to init a console logger handler, because the standard
    # logger (init_logging()) logs to a file which will be deleted.
    if not args.delete_older_than_days:
        log.info("No days argument provided bailing")
        return
    else:
        days = args.delete_older_than_days
    create_structure()
    init_console_logging()
    id_arr = []

    if not is_reporting_db_connected():
        return

    added_before = datetime.now() - timedelta(days=int(days))
    if args.files_only_filter:
        log.info("file filter applied")
        old_tasks = db.list_tasks(added_before=added_before, category="file")
    elif args.urls_only_filter:
        log.info("url filter applied")
        old_tasks = db.list_tasks(added_before=added_before, category="url")
    else:
        old_tasks = db.list_tasks(added_before=added_before)

    for e in old_tasks:
        id_arr.append({"info.id": (int(e.to_dict()["id"]))})

    log.info(("number of matching records %s before suri/custom filter " %
              len(id_arr)))
    if id_arr and args.suricata_zero_alert_filter:
        result = list(
            mongo_find("analysis", {
                "suricata.alerts.alert": {
                    "$exists": False
                },
                "$or": id_arr
            }, {
                "info.id": 1,
                "_id": 0
            }))
        id_arr = [entry["info"]["id"] for entry in result]
    if id_arr and args.custom_include_filter:
        result = list(
            mongo_find(
                "analysis", {
                    "info.custom": {
                        "$regex": args.custom_include_filter
                    },
                    "$or": id_arr
                }, {
                    "info.id": 1,
                    "_id": 0
                }))
        id_arr = [entry["info"]["id"] for entry in result]
    log.info("number of matching records %s" % len(id_arr))
    delete_bulk_tasks_n_folders(id_arr, args.delete_mongo)
Exemplo n.º 7
0
def perform_search(term,
                   value,
                   search_limit=False,
                   user_id=False,
                   privs=False,
                   web=True):
    if repconf.mongodb.enabled and repconf.elasticsearchdb.enabled and essearch and not term:
        multi_match_search = {
            "query": {
                "multi_match": {
                    "query": value,
                    "fields": ["*"]
                }
            }
        }
        numhits = es.search(index=get_analysis_index(),
                            body=multi_match_search,
                            size=0)["hits"]["total"]
        return [
            d["_source"] for d in es.search(index=get_analysis_index(),
                                            body=multi_match_search,
                                            sort="task_id:desc",
                                            size=numhits)["hits"]["hits"]
        ]

    query_val = False
    search_limit = web_cfg.general.get("search_limit", 50) if web else 0
    if term in normalized_lower_terms:
        query_val = value.lower()
    elif term in normalized_int_terms:
        query_val = int(value)
    elif term in ("surisid", "id"):
        try:
            query_val = int(value)
        except Exception:
            pass
    elif term in ("ids", "options", "tags_tasks", "user_tasks"):
        try:
            ids = []
            if term == "ids":
                ids = value
            elif term == "tags_tasks":
                ids = [
                    int(v.id) for v in db.list_tasks(tags_tasks_like=value,
                                                     limit=search_limit)
                ]
            elif term == "user_tasks":
                if not user_id:
                    ids = 0
                else:
                    # ToDo allow to admin search by user tasks
                    ids = [
                        int(v.id) for v in db.list_tasks(user_id=user_id,
                                                         limit=search_limit)
                    ]
            else:
                ids = [
                    int(v.id) for v in db.list_tasks(options_like=value,
                                                     limit=search_limit)
                ]
            if ids:
                if len(ids) > 1:
                    term = "ids"
                    query_val = {"$in": ids}
                else:
                    term = "id"
                    if isinstance(value, list):
                        value = value[0]
                    query_val = int(value)
        except Exception as e:
            print(term, value, e)
    elif term == "configs":
        # check if family name is string only maybe?
        query_val = {
            f"{search_term_map[term]}.{value}": {
                "$exist": True
            },
            "$options": "-i"
        }
    elif term == "ttp":
        if validate_ttp(value):
            query_val = value.upper()
        else:
            raise ValueError("Invalid TTP enterred")
    elif term == "malscore":
        query_val = {"$gte": float(value)}
    else:
        query_val = {"$regex": value, "$options": "-i"}

    if term not in search_term_map:
        return None

    if not search_limit:
        search_limit = web_cfg.general.get("search_limit", 50)

    if term == "payloads" and len(value) in (32, 40, 64, 128):
        search_term_map[term] = f"CAPE.payloads.{hash_len.get(len(value))}"

    elif term == "configs":
        # check if family name is string only maybe?
        search_term_map[term] = f"CAPE.configs.{value}"
        query_val = {"$exists": True}

    if repconf.mongodb.enabled and query_val:
        if isinstance(search_term_map[term], str):
            mongo_search_query = {search_term_map[term]: query_val}
        else:
            mongo_search_query = {
                "$or": [{
                    search_term: query_val
                } for search_term in search_term_map[term]]
            }
        return mongo_find("analysis", mongo_search_query,
                          perform_search_filters).sort([["_id", -1]
                                                        ]).limit(search_limit)
    if es_as_db:
        _source_fields = list(perform_search_filters.keys())[:-1]
        if isinstance(search_term_map[term], str):
            q = {"query": {"match": {search_term_map[term]: value}}}
            return [
                d["_source"]
                for d in es.search(index=get_analysis_index(),
                                   body=q,
                                   _source=_source_fields)["hits"]["hits"]
            ]
        else:
            queries = [{
                "match": {
                    search_term: value
                }
            } for search_term in search_term_map[term]]
            q = {
                "query": {
                    "bool": {
                        "should": queries,
                        "minimum_should_match": 1
                    }
                }
            }
            return [
                d["_source"]
                for d in es.search(index=get_analysis_index(),
                                   body=q,
                                   _source=_source_fields)["hits"]["hits"]
            ]