Exemple #1
0
    def export(request, task_id):
        if request.method == "POST":
            taken_dirs = request.POST.getlist("dirs")
            taken_files = request.POST.getlist("files")

            try:
                zip = ExportController.create(task_id=task_id,
                                              taken_dirs=taken_dirs,
                                              taken_files=taken_files)

                response = HttpResponse(zip.getvalue(),
                                        content_type="application/zip")
                response[
                    "Content-Disposition"] = "attachment; filename=%s.zip" % task_id
                return response

            except Exception as e:
                return view_error(request, str(e))

        report = AnalysisController.get_report(task_id)

        if "analysis_path" not in report.get("analysis", {}).get("info", {}):
            return view_error(
                request, "The analysis was created before the export "
                "functionality was integrated with Cuckoo and is "
                "therefore not available for this task (in order to "
                "export this analysis, please reprocess its report).")

        analysis_path = report["analysis"]["info"]["analysis_path"]
        dirs, files = ExportController.get_files(analysis_path)
        return render_template(request,
                               "analysis/export.html",
                               report=report,
                               dirs=dirs,
                               files=files)
Exemple #2
0
 def hash(request, task_id, compare_with_hash):
     """Select all analyses with specified file hash."""
     try:
         data = AnalysisCompareController.hash(task_id, compare_with_hash)
         return render_template(request, "analysis/pages/compare/hash.html", **data)
     except Exception as e:
         return view_error(request, str(e))
Exemple #3
0
    def detail(request, task_id, page):
        report = AnalysisController.get_report(task_id)

        pages = {
            "summary": "summary/index",
            "static": "static/index",
            "behavior": "behavior/index",
            "network": "network/index",
            "misp": "misp/index",
            "dropped_files": "dropped/dropped_files",
            "dropped_buffers": "dropped/dropped_buffers",
            "procmemory": "procmemory/index",
            "options": "options/index",
            "feedback": "feedback/index"
        }

        if page in pages.keys():
            return render_template(request,
                                   "analysis/pages/%s.html" % pages[page],
                                   report=report,
                                   page=page)
        else:
            return view_error(request,
                              msg="Analysis subpage not found",
                              status=404)
Exemple #4
0
def moloch(request, **kwargs):
    if not config("reporting:moloch:enabled"):
        return view_error(request, "Moloch is not enabled!")

    query = []
    for key, value in kwargs.items():
        if value and value != "None":
            query.append(moloch_mapper[key] % value)

    if ":" in request.get_host():
        hostname = request.get_host().split(":")[0]
    else:
        hostname = request.get_host()

    if config("reporting:moloch:insecure"):
        url = "http://"
    else:
        url = "https://"

    url += "%s:8005/?%s" % (
        config("reporting:moloch:host") or hostname,
        urllib.urlencode({
            "date": "-1",
            "expression": " && ".join(query),
        }),
    )
    return redirect(url)
Exemple #5
0
    def postsubmit(request, submit_id):
        submit = Database().view_submit(submit_id, tasks=True)
        if not submit:
            return view_error(request, "Invalid Submit ID specified")

        task_ids = []
        for task in submit.tasks:
            task_ids.append(task.id)

        if not task_ids:
            return view_error(
                request, "This Submit ID is not associated with any tasks. "
                "Please submit some files before loading this page."
            )

        return render_template(
            request, "submission/postsubmit.html", task_ids=sorted(task_ids)
        )
Exemple #6
0
def latest_report(request):
    report = results_db.analysis.find_one({},
                                          sort=[("_id", pymongo.DESCENDING)])
    if not report:
        return view_error(request, "No analysis has been found")

    return redirect(reverse("analysis",
                            args=(report["info"]["id"], "summary")),
                    permanent=False)
Exemple #7
0
    def reboot(request, task_id):
        # TODO Dummy usage, should probably be improved.
        submit_id = Database().add_submit(None, None, None)

        task_id = Database().add_reboot(task_id=task_id, submit_id=submit_id)
        if not task_id:
            return view_error(request, "Error adding reboot analysis!")

        return redirect("submission/post", submit_id=submit_id)
Exemple #8
0
def pcapstream(request, task_id, conntuple):
    """Get packets from the task PCAP related to a certain connection.
    This is possible because we sort the PCAP during processing and remember offsets for each stream.
    """
    src, sport, dst, dport, proto = conntuple.split(",")
    sport, dport = int(sport), int(dport)

    conndata = results_db.analysis.find_one({
        "info.id": int(task_id),
    }, {
        "network.tcp": 1,
        "network.udp": 1,
        "network.sorted_pcap_id": 1,
    },
                                            sort=[("_id", pymongo.DESCENDING)])

    if not conndata:
        return view_error(request, "The specified analysis does not exist")

    try:
        if proto == "udp":
            connlist = conndata["network"]["udp"]
        else:
            connlist = conndata["network"]["tcp"]

        conns = filter(
            lambda i: (i["sport"], i["dport"], i["src"], i["dst"]) ==
            (sport, dport, src, dst), connlist)
        stream = conns[0]
        offset = stream["offset"]
    except:
        return view_error(request, "Could not find the requested stream")

    try:
        fobj = fs.get(conndata["network"]["sorted_pcap_id"])
        setattr(fobj, "fileno", lambda: -1)
    except:
        return view_error("The required sorted PCAP does not exist")

    packets = list(network.packets_for_stream(fobj, offset))
    # TODO: starting from django 1.7 we should use JsonResponse.
    return HttpResponse(json.dumps(packets), content_type="application/json")
Exemple #9
0
def full_memory_dump_file(request, analysis_number):
    file_path = cwd("storage", "analyses", "%s" % analysis_number,
                    "memory.dmp")
    if os.path.exists(file_path):
        content_type = "application/octet-stream"
        response = HttpResponse(open(file_path, "rb").read(),
                                content_type=content_type)
        response["Content-Disposition"] = "attachment; filename=memory.dmp"
        return response
    else:
        return view_error(request, "File not found")
Exemple #10
0
    def dropped(request, task_id, sha1):
        filepath = dropped_filepath(task_id, sha1)
        if not filepath:
            return view_error(request, "No such dropped file was found!")

        # TODO Obtain the original name for this file.
        submit_id = submit_manager.pre("files", [{
            "name": os.path.basename(filepath),
            "data": open(filepath, "rb"),
        }])

        return redirect("submission/pre", submit_id=submit_id)
Exemple #11
0
def search(request):
    """New Search API using ElasticSearch as backend."""
    if not elastic.enabled:
        return view_error(
            request, "ElasticSearch is not enabled and therefore it "
            "is not possible to do a global search.")

    if request.method == "GET":
        return render_template(request, "analysis/search.html")

    value = request.POST["search"]

    match_value = ".*".join(re.split("[^a-zA-Z0-9]+", value.lower()))

    r = elastic.client.search(index=elastic.index + "-*",
                              body={
                                  "query": {
                                      "query_string": {
                                          "query": '"%s"*' % value,
                                      },
                                  },
                              })

    analyses = []
    for hit in r["hits"]["hits"]:
        # Find the actual matches in this hit and limit to 8 matches.
        matches = _search_helper(hit, "none", match_value)
        if not matches:
            continue

        analyses.append({
            "task_id": hit["_source"]["report_id"],
            "matches": matches[:16],
            "total": max(len(matches) - 16, 0),
        })

    if request.POST.get("raw"):
        return render_template(
            request, "analysis/search_results.html", **{
                "analyses": analyses,
                "term": request.POST["search"],
            })

    return render_template(
        request, "analysis/search.html", **{
            "analyses": analyses,
            "term": request.POST["search"],
            "error": None,
        })
Exemple #12
0
    def resubmit(request, task_id):
        task = Database().view_task(task_id)
        if not task:
            return view_error(request, "No Task was found with this ID")

        if task.category == "url":
            # TODO This most certainly needs to be improved.
            submit_id = submit_manager.pre("strings", [
                task.target,
            ], submit_manager.translate_options_to(task.options))
        else:
            # TODO There's a very good chance this won't work properly for
            # analyses of type "archive".
            submit_id = submit_manager.pre("files", [{
                "name": os.path.basename(task.target),
                "data": open(task.target, "rb"),
            }], submit_manager.translate_options_to(task.options))

        return redirect("submission/pre", submit_id=submit_id)
Exemple #13
0
def file(request, category, object_id, fetch="fetch"):
    file_item = fs.get(ObjectId(object_id))

    if file_item:
        # Composing file name in format sha256_originalfilename.
        file_name = file_item.sha256 + "_" + file_item.filename

        # Managing gridfs error if field contentType is missing.
        try:
            content_type = file_item.contentType
        except AttributeError:
            content_type = "application/octet-stream"

        response = HttpResponse(file_item.read(), content_type=content_type)

        if fetch != "nofetch":
            response[
                "Content-Disposition"] = "attachment; filename=%s" % file_name

        return response
    else:
        return view_error(request, "File not found")
Exemple #14
0
    def import_(request):
        if request.method == "GET":
            return render_template(request, "analysis/import.html")

        if request.method != "POST":
            return view_error(request, "Import analysis request must be POST!")

        submit_id = Database().add_submit(None, None, None)
        task_ids = []

        for analysis in request.FILES.values():
            if not analysis.size:
                continue

            try:
                task_ids.append(submit_manager.import_(analysis, submit_id))
            except CuckooOperationalError as e:
                log.warning(
                    "Error importing analysis (%s): %s", analysis.name, e
                )
                continue

        return redirect("submission/post", submit_id=submit_id)
Exemple #15
0
 def both(request, task_id, compare_with_task_id):
     try:
         data = AnalysisCompareController.both(task_id, compare_with_task_id)
         return render_template(request, "analysis/pages/compare/both.html", **data)
     except Exception as e:
         return view_error(request, str(e))
Exemple #16
0
 def left(request, task_id):
     try:
         data = AnalysisCompareController.left(task_id)
         return render_template(request, "analysis/pages/compare/left.html", **data)
     except Exception as e:
         return view_error(request, str(e))
Exemple #17
0
def remove(request, task_id):
    """Remove an analysis.
    @todo: remove folder from storage.
    """
    analyses = results_db.analysis.find({"info.id": int(task_id)})

    # Checks if more analysis found with the same ID, like if process.py
    # was run manually.
    if analyses.count() > 1:
        message = (
            "Multiple tasks with this ID deleted, thanks for all the fish "
            "(the specified analysis was present multiple times in mongo).")
    elif analyses.count() == 1:
        message = "Task deleted, thanks for all the fish."

    if not analyses.count():
        return view_error(request, "The specified analysis does not exist")

    for analysis in analyses:
        # Delete sample if not used.
        if "file_id" in analysis["target"]:
            if results_db.analysis.find({
                    "target.file_id":
                    ObjectId(analysis["target"]["file_id"])
            }).count() == 1:
                fs.delete(ObjectId(analysis["target"]["file_id"]))

        # Delete screenshots.
        for shot in analysis["shots"]:
            if isinstance(shot, dict):
                if "small" in shot:
                    if results_db.analysis.find({
                            "shots":
                            ObjectId(shot["small"]),
                    }).count() == 1:
                        fs.delete(ObjectId(shot["small"]))

                if "original" in shot:
                    if results_db.analysis.find({
                            "shots":
                            ObjectId(shot["original"]),
                    }).count() == 1:
                        fs.delete(ObjectId(shot["original"]))

                continue

            if results_db.analysis.find({
                    "shots": ObjectId(shot)
            }).count() == 1:
                fs.delete(ObjectId(shot))

        # Delete network pcap.
        if "pcap_id" in analysis["network"] and results_db.analysis.find({
                "network.pcap_id":
                ObjectId(analysis["network"]["pcap_id"])
        }).count() == 1:
            fs.delete(ObjectId(analysis["network"]["pcap_id"]))

        # Delete sorted pcap
        if "sorted_pcap_id" in analysis[
                "network"] and results_db.analysis.find({
                    "network.sorted_pcap_id":
                    ObjectId(analysis["network"]["sorted_pcap_id"])
                }).count() == 1:
            fs.delete(ObjectId(analysis["network"]["sorted_pcap_id"]))

        # Delete mitmproxy dump.
        if "mitmproxy_id" in analysis["network"] and results_db.analysis.find({
                "network.mitmproxy_id":
                ObjectId(analysis["network"]["mitmproxy_id"])
        }).count() == 1:
            fs.delete(ObjectId(analysis["network"]["mitmproxy_id"]))

        # Delete dropped.
        for drop in analysis.get("dropped", []):
            if "object_id" in drop and results_db.analysis.find({
                    "dropped.object_id":
                    ObjectId(drop["object_id"])
            }).count() == 1:
                fs.delete(ObjectId(drop["object_id"]))

        # Delete calls.
        for process in analysis.get("behavior", {}).get("processes", []):
            for call in process["calls"]:
                results_db.calls.remove({"_id": ObjectId(call)})

        # Delete analysis data.
        results_db.analysis.remove({"_id": ObjectId(analysis["_id"])})

    # Delete from SQL db.
    db = Database()
    db.delete_task(task_id)

    return render_template(request, "success.html", **{
        "message": message,
    })