def search(request): """New Search API using ElasticSearch as backend.""" if not elastic.enabled: return view_error( request, "ElasticSearch is not enabled and therefore it " "is not possible to do a global search.") if request.method == "GET": return render_template(request, "analysis/search.html") value = request.POST["search"] match_value = ".*".join(re.split("[^a-zA-Z0-9]+", value.lower())) r = elastic.client.search(index=elastic.index + "-*", body={ "query": { "query_string": { "query": '"%s"*' % value, }, }, }) analyses = [] for hit in r["hits"]["hits"]: # Find the actual matches in this hit and limit to 8 matches. matches = _search_helper(hit, "none", match_value) if not matches: continue analyses.append({ "task_id": hit["_source"]["report_id"], "matches": matches[:16], "total": max(len(matches) - 16, 0), }) if request.POST.get("raw"): return render_template( request, "analysis/search_results.html", **{ "analyses": analyses, "term": request.POST["search"], }) return render_template( request, "analysis/search.html", **{ "analyses": analyses, "term": request.POST["search"], "error": None, })
def detail(request, task_id, page): report = AnalysisController.get_report(task_id) pages = { "summary": "summary/index", "static": "static/index", "behavior": "behavior/index", "network": "network/index", "misp": "misp/index", "dropped_files": "dropped/dropped_files", "dropped_buffers": "dropped/dropped_buffers", "procmemory": "procmemory/index", "options": "options/index", "feedback": "feedback/index" } if page in pages.keys(): return render_template(request, "analysis/pages/%s.html" % pages[page], report=report, page=page) else: return view_error(request, msg="Analysis subpage not found", status=404)
def export(request, task_id): if request.method == "POST": taken_dirs = request.POST.getlist("dirs") taken_files = request.POST.getlist("files") try: zip = ExportController.create(task_id=task_id, taken_dirs=taken_dirs, taken_files=taken_files) response = HttpResponse(zip.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = "attachment; filename=%s.zip" % task_id return response except Exception as e: return view_error(request, str(e)) report = AnalysisController.get_report(task_id) if "analysis_path" not in report.get("analysis", {}).get("info", {}): return view_error( request, "The analysis was created before the export " "functionality was integrated with Cuckoo and is " "therefore not available for this task (in order to " "export this analysis, please reprocess its report).") analysis_path = report["analysis"]["info"]["analysis_path"] dirs, files = ExportController.get_files(analysis_path) return render_template(request, "analysis/export.html", report=report, dirs=dirs, files=files)
def reboot(request, task_id): task_obj = Database().add_reboot(task_id=task_id) return render_template(request, "submission/reboot.html", task_id=task_id, task_obj=task_obj, baseurl=request.build_absolute_uri("/")[:-1])
def hash(request, task_id, compare_with_hash): """Select all analyses with specified file hash.""" try: data = AnalysisCompareController.hash(task_id, compare_with_hash) return render_template(request, "analysis/pages/compare/hash.html", **data) except Exception as e: return view_error(request, str(e))
def search_behavior(request, task_id): if request.method != "POST": raise PermissionDenied query = request.POST.get("search") query = re.compile(query, re.I) results = [] # Fetch analysis report. record = results_db.analysis.find_one({ "info.id": int(task_id), }) # Loop through every process for process in record["behavior"]["processes"]: process_results = [] chunks = results_db.calls.find({"_id": {"$in": process["calls"]}}) index = -1 for chunk in chunks: for call in chunk["calls"]: index += 1 if query.search(call["api"]): call["id"] = index process_results.append(call) continue for key, value in call["arguments"].items(): if query.search(key): call["id"] = index process_results.append(call) break if isinstance(value, basestring) and query.search(value): call["id"] = index process_results.append(call) break if isinstance(value, (tuple, list)): for arg in value: if not isinstance(arg, basestring): continue if query.search(arg): call["id"] = index process_results.append(call) break else: continue break if process_results: results.append({"process": process, "signs": process_results}) return render_template(request, "analysis/pages/behavior/_search_results.html", **{ "results": results, })
def pending(request): pending = [] for task in Database().list_tasks(status=TASK_PENDING, limit=500): pending.append(normalize_task(task.to_dict())) return render_template(request, "analysis/pending.html", **{ "tasks": pending, })
def presubmit(request, submit_id): submit = Database().view_submit(submit_id) if not submit: # TODO Include an error message regarding the invalid Submit entry. return redirect("submission/index") return render_template( request, "submission/presubmit.html", submit_id=submit_id )
def filtered_chunk(request, task_id, pid, category): """Filters calls for call category. @param task_id: cuckoo task id @param pid: pid you want calls @param category: call category type """ if not request.is_ajax(): raise PermissionDenied # Search calls related to your PID. record = results_db.analysis.find_one( { "info.id": int(task_id), "behavior.processes.pid": int(pid), }, { "behavior.processes.pid": 1, "behavior.processes.calls": 1, }) if not record: raise ObjectDoesNotExist # Extract embedded document related to your process from response collection. process = None for pdict in record["behavior"]["processes"]: if pdict["pid"] == int(pid): process = pdict if not process: raise ObjectDoesNotExist # Create empty process dict for AJAX view. filtered_process = { "pid": pid, "calls": [], } # Populate dict, fetching data from all calls and selecting only appropriate category. for call in process["calls"]: chunk = results_db.calls.find_one({"_id": call}) for call in chunk["calls"]: if call["category"] == category: filtered_process["calls"].append(call) return render_template(request, "analysis/pages/behavior/_chunk.html", **{ "chunk": filtered_process, })
def postsubmit(request, submit_id): submit = Database().view_submit(submit_id, tasks=True) if not submit: return view_error(request, "Invalid Submit ID specified") task_ids = [] for task in submit.tasks: task_ids.append(task.id) if not task_ids: return view_error( request, "This Submit ID is not associated with any tasks. " "Please submit some files before loading this page." ) return render_template( request, "submission/postsubmit.html", task_ids=sorted(task_ids) )
def chunk(request, task_id, pid, pagenum): try: pid, pagenum = int(pid), int(pagenum) - 1 except: raise PermissionDenied if not request.is_ajax(): raise PermissionDenied record = results_db.analysis.find_one( { "info.id": int(task_id), "behavior.processes.pid": pid }, { "behavior.processes.pid": 1, "behavior.processes.calls": 1 }) if not record: raise ObjectDoesNotExist process = None for pdict in record["behavior"]["processes"]: if pdict["pid"] == pid: process = pdict if not process: raise ObjectDoesNotExist if pagenum >= 0 and pagenum < len(process["calls"]): objectid = process["calls"][pagenum] chunk = results_db.calls.find_one({"_id": ObjectId(objectid)}) for idx, call in enumerate(chunk["calls"]): call["id"] = pagenum * 100 + idx else: chunk = dict(calls=[]) return render_template(request, "analysis/pages/behavior/_chunk.html", **{ "chunk": chunk, })
def import_(request): if request.method == "GET": return render_template(request, "analysis/import.html") if request.method != "POST": return view_error(request, "Import analysis request must be POST!") submit_id = Database().add_submit(None, None, None) task_ids = [] for analysis in request.FILES.values(): if not analysis.size: continue try: task_ids.append(submit_manager.import_(analysis, submit_id)) except CuckooOperationalError as e: log.warning( "Error importing analysis (%s): %s", analysis.name, e ) continue return redirect("submission/post", submit_id=submit_id)
def both(request, task_id, compare_with_task_id): try: data = AnalysisCompareController.both(task_id, compare_with_task_id) return render_template(request, "analysis/pages/compare/both.html", **data) except Exception as e: return view_error(request, str(e))
def remove(request, task_id): """Remove an analysis. @todo: remove folder from storage. """ analyses = results_db.analysis.find({"info.id": int(task_id)}) # Checks if more analysis found with the same ID, like if process.py # was run manually. if analyses.count() > 1: message = ( "Multiple tasks with this ID deleted, thanks for all the fish " "(the specified analysis was present multiple times in mongo).") elif analyses.count() == 1: message = "Task deleted, thanks for all the fish." if not analyses.count(): return view_error(request, "The specified analysis does not exist") for analysis in analyses: # Delete sample if not used. if "file_id" in analysis["target"]: if results_db.analysis.find({ "target.file_id": ObjectId(analysis["target"]["file_id"]) }).count() == 1: fs.delete(ObjectId(analysis["target"]["file_id"])) # Delete screenshots. for shot in analysis["shots"]: if isinstance(shot, dict): if "small" in shot: if results_db.analysis.find({ "shots": ObjectId(shot["small"]), }).count() == 1: fs.delete(ObjectId(shot["small"])) if "original" in shot: if results_db.analysis.find({ "shots": ObjectId(shot["original"]), }).count() == 1: fs.delete(ObjectId(shot["original"])) continue if results_db.analysis.find({ "shots": ObjectId(shot) }).count() == 1: fs.delete(ObjectId(shot)) # Delete network pcap. if "pcap_id" in analysis["network"] and results_db.analysis.find({ "network.pcap_id": ObjectId(analysis["network"]["pcap_id"]) }).count() == 1: fs.delete(ObjectId(analysis["network"]["pcap_id"])) # Delete sorted pcap if "sorted_pcap_id" in analysis[ "network"] and results_db.analysis.find({ "network.sorted_pcap_id": ObjectId(analysis["network"]["sorted_pcap_id"]) }).count() == 1: fs.delete(ObjectId(analysis["network"]["sorted_pcap_id"])) # Delete mitmproxy dump. if "mitmproxy_id" in analysis["network"] and results_db.analysis.find({ "network.mitmproxy_id": ObjectId(analysis["network"]["mitmproxy_id"]) }).count() == 1: fs.delete(ObjectId(analysis["network"]["mitmproxy_id"])) # Delete dropped. for drop in analysis.get("dropped", []): if "object_id" in drop and results_db.analysis.find({ "dropped.object_id": ObjectId(drop["object_id"]) }).count() == 1: fs.delete(ObjectId(drop["object_id"])) # Delete calls. for process in analysis.get("behavior", {}).get("processes", []): for call in process["calls"]: results_db.calls.remove({"_id": ObjectId(call)}) # Delete analysis data. results_db.analysis.remove({"_id": ObjectId(analysis["_id"])}) # Delete from SQL db. db = Database() db.delete_task(task_id) return render_template(request, "success.html", **{ "message": message, })
def left(request, task_id): try: data = AnalysisCompareController.left(task_id) return render_template(request, "analysis/pages/compare/left.html", **data) except Exception as e: return view_error(request, str(e))
def index(request): report = { "machinery": config("cuckoo:cuckoo:machinery"), "version": version, } return render_template(request, "dashboard/index.html", report=report)
def recent(request): return render_template(request, "analysis/index.html")
def submit(request): return render_template(request, "submission/submit.html")