def index(request, resubmit_hash=False): if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) if paths: content = get_file_content(paths) if not content: return render( request, "error.html", { "error": "Can't find {} on disk, {}".format( resubmission_hash, str(paths)) }) folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename( resubmission_hash) path = store_temp_file(content, filename) details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmission_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) else: return render( request, "error.html", {"error": "File not found on hdd for resubmission"}) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") details["service"] = "WebGUI" for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: details["errors"].append( {sample.name: "You uploaded an empty file."}) continue elif sample.size > settings.MAX_UPLOAD_SIZE: details["errors"].append({ sample.name: "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }) continue if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to CAPE temporary storage to let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), filename) sha256 = File(path).get_sha256() if (not request.user.is_staff and (web_conf.uniq_submission.enabled or unique) and db.check_file_uniq( sha256, hours=web_conf.uniq_submission.hours)): details["errors"].append({ filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission" }) continue if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({sample.name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif "static" in request.FILES: samples = request.FILES.getlist("static") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }, ) else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): url = request.POST.get("dlnexec").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") response = _download_file(request.POST.get("route"), url, options) if not response: return render(request, "error.html", {"error": "Was impossible to retrieve url"}) name = os.path.basename(url) if not "." in name: name = get_user_filename(options, custom) or generate_fake_name() path = store_temp_file(response, name) details["path"] = path details["content"] = get_file_content(path) details["service"] = "DLnExec" details["source_url"] = url status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt( request.POST.get("vtdl").strip(), details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config( machinery), machinery).get("machines").split(",") ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )
def index(request, resubmit_hash=False): remote_console = False if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if web_conf.guacamole.enabled and request.POST.get( "interactive_desktop"): remote_console = True if "nohuman=yes," not in options: options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," job_category = False if request.POST.get("job_category"): job_category = request.POST.get("job_category") # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } task_category = False samples = [] if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": task_category = "resubmit" samples = request.POST.get("hash").strip().split(",") elif "sample" in request.FILES: task_category = "sample" samples = request.FILES.getlist("sample") elif "quarantine" in request.FILES: task_category = "quarantine" samples = request.FILES.getlist("quarantine") elif "static" in request.FILES: task_category = "static" samples = request.FILES.getlist("static") elif "pcap" in request.FILES: task_category = "pcap" samples = request.FILES.getlist("pcap") elif "url" in request.POST and request.POST.get("url").strip(): task_category = "url" samples = request.POST.get("url").strip() elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): task_category = "dlnexec" samples = request.POST.get("dlnexec").strip() elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): task_category = "vtdl" samples = request.POST.get("vtdl").strip() list_of_files = [] if task_category in ("url", "dlnexec"): if not samples: return render(request, "error.html", {"error": "You specified an invalid URL!"}) for url in samples.split(","): url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if task_category == "dlnexec": path, content, sha256 = process_new_dlnexec_task( url, route, options, custom) if path: list_of_files.append((content, path, sha256)) elif task_category == "url": list_of_files.append(("", url, "")) elif task_category in ("sample", "quarantine", "static", "pcap"): list_of_files, details = process_new_task_files( request, samples, details, opt_filename, unique) elif task_category == "resubmit": for hash in samples: paths = [] if len(hash) in (32, 40, 64): paths = db.sample_path_by_hash(hash) else: task_binary = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(hash), "binary") if os.path.exists(task_binary): paths.append(task_binary) else: tmp_paths = db.find_sample(task_id=int(hash)) if not tmp_paths: details["errors"].append( {hash: "Task not found for resubmission"}) continue for tmp_sample in tmp_paths: path = False tmp_dict = tmp_sample.to_dict() if os.path.exists(tmp_dict.get("target", "")): path = tmp_dict["target"] else: tmp_tasks = db.find_sample( sample_id=tmp_dict["sample_id"]) for tmp_task in tmp_tasks: tmp_path = os.path.join( settings.CUCKOO_PATH, "storage", "binaries", tmp_task.to_dict()["sha256"]) if os.path.exists(tmp_path): path = tmp_path break if path: paths.append(path) if not paths: details["errors"].append( {hash: "File not found on hdd for resubmission"}) continue content = get_file_content(paths) if not content: details["errors"].append( {hash: f"Can't find {hash} on disk"}) continue folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(hash) path = store_temp_file(content, filename) list_of_files.append((content, path, hash)) # Hack for resubmit first find all files and then put task as proper category if job_category and job_category in ("resubmit", "sample", "quarantine", "static", "pcap", "dlnexec", "vtdl"): task_category = job_category if task_category == "resubmit": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", hash, search_limit=5) for record in records or []: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) elif task_category == "sample": details["service"] = "WebGUI" for content, path, sha256 in list_of_files: if web_conf.pre_script.enabled and "pre_script" in request.FILES: pre_script = request.FILES["pre_script"] details["pre_script_name"] = request.FILES[ "pre_script"].name details["pre_script_content"] = pre_script.read() if web_conf.during_script.enabled and "during_script" in request.FILES: during_script = request.FILES["during_script"] details["during_script_name"] = request.FILES[ "during_script"].name details["during_script_content"] = during_script.read() if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif task_category == "quarantine": for content, tmp_path, sha256 in list_of_files: path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: details["errors"].append({ os.path.basename(path): "You uploaded an unsupported quarantine file." }) continue details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "static": for content, path, sha256 in list_of_files: task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif task_category == "pcap": for content, path, sha256 in list_of_files: if path.lower().endswith(b".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: details["errors"].append({ os.path.basename(path): "Conversion from SAZ to PCAP failed." }) continue task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif task_category == "url": for _, url, _ in list_of_files: if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: details["errors"].append({ os.path.basename(url): f"Wrong platform, {machine_details.platform} VM selected for {platform} sample" }) continue else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif task_category == "dlnexec": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "vtdl": if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt(samples, details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, "remote_console": remote_console, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled enabledconf["pre_script"] = web_conf.pre_script.enabled enabledconf["during_script"] = web_conf.during_script.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") if x.strip() ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in str( getattr(Config(machinery), machinery).get( "machines")).split(",") if x.strip() ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )