def run(self): """Run information gathering. @return: information dict. """ self.key = "info" self.order = 1 try: started = time.strptime(self.task["started_on"], "%Y-%m-%d %H:%M:%S") started = datetime.fromtimestamp(time.mktime(started)) ended = time.strptime(self.task["completed_on"], "%Y-%m-%d %H:%M:%S") ended = datetime.fromtimestamp(time.mktime(ended)) except Exception: log.critical("Failed to get start/end time from Task") duration = -1 else: duration = (ended - started).seconds # Fetch sqlalchemy object. task = db.view_task(self.task["id"], details=True) if task and task.guest: # Get machine description as json. machine = task.guest.to_dict() # Remove useless task_id. del machine["task_id"] # Save. self.task["machine"] = machine parsed_options = get_options(self.task["options"]) parent_sample_details = False if "maint_task_id" not in parsed_options: parent_sample_details = db.list_sample_parent( task_id=self.task["id"]) source_url = db.get_source_url(sample_id=self.task["sample_id"]) return { "version": CUCKOO_VERSION, "started": self.task["started_on"], "ended": self.task.get("completed_on", "none"), "duration": duration, "id": int(self.task["id"]), "category": self.task["category"], "custom": self.task["custom"], "machine": self.task["machine"], "package": self.get_package(), "timeout": self.had_timeout(), "shrike_url": self.task["shrike_url"], "shrike_refer": self.task["shrike_refer"], "shrike_msg": self.task["shrike_msg"], "shrike_sid": self.task["shrike_sid"], "parent_id": self.task["parent_id"], "tlp": self.task["tlp"], "parent_sample": parent_sample_details, "options": parsed_options, "source_url": source_url, "route": self.task.get("route"), "user_id": self.task.get("user_id"), }
def run(self): """Run information gathering. @return: information dict. """ self.key = "info" try: started = time.strptime(self.task["started_on"], "%Y-%m-%d %H:%M:%S") started = datetime.fromtimestamp(time.mktime(started)) ended = time.strptime(self.task["completed_on"], "%Y-%m-%d %H:%M:%S") ended = datetime.fromtimestamp(time.mktime(ended)) except: log.critical("Failed to get start/end time from Task.") duration = -1 else: duration = (ended - started).seconds # Fetch sqlalchemy object. task = db.view_task(self.task["id"], details=True) if task and task.guest: # Get machine description ad json. machine = task.guest.to_dict() # Remove useless task_id. del machine["task_id"] # Save. self.task["machine"] = machine distributed = dict() parsed_options = get_options(self.task["options"]) parent_sample_details = False if "maint_task_id" not in parsed_options: parent_sample_details = db.list_sample_parent( task_id=self.task["id"]) source_url = db.get_source_url(sample_id=self.task["sample_id"]) return dict( version=CUCKOO_VERSION, started=self.task["started_on"], ended=self.task.get("completed_on", "none"), duration=duration, id=int(self.task["id"]), category=self.task["category"], custom=self.task["custom"], machine=self.task["machine"], package=self.get_package(), timeout=self.had_timeout(), shrike_url=self.task["shrike_url"], shrike_refer=self.task["shrike_refer"], shrike_msg=self.task["shrike_msg"], shrike_sid=self.task["shrike_sid"], parent_id=self.task["parent_id"], tlp=self.task["tlp"], parent_sample=parent_sample_details, distributed=distributed, options=parsed_options, source_url=source_url, )
def options2passwd(options): password = False if "password="******"password") if password and isinstance(password, bytes): password = password.decode("utf8") return password
def options2passwd(options: str) -> str: password = "" if "password="******"password") if password and isinstance(password, bytes): password = password.decode() return password
def __init__(self, logs_path, task): """@param logs_path: logs path.""" self.task = task self._logs_path = logs_path self.options = get_options(self.task["options"])
def run(self): """Run information gathering. @return: information dict. """ self.key = "info" try: started = time.strptime(self.task["started_on"], "%Y-%m-%d %H:%M:%S") started = datetime.fromtimestamp(time.mktime(started)) ended = time.strptime(self.task["completed_on"], "%Y-%m-%d %H:%M:%S") ended = datetime.fromtimestamp(time.mktime(ended)) except: log.critical("Failed to get start/end time from Task.") duration = -1 else: duration = (ended - started).seconds # Fetch sqlalchemy object. task = db.view_task(self.task["id"], details=True) if task and task.guest: # Get machine description ad json. machine = task.guest.to_dict() # Remove useless task_id. del (machine["task_id"]) # Save. self.task["machine"] = machine distributed = dict() if HAVE_REQUEST and report_cfg.distributed.enabled: try: res = requests.get("http://127.0.0.1:9003/task/{}".format( self.task["id"]), timeout=3, verify=False) if res and res.ok: if "name" in res.json(): distributed["name"] = res.json()["name"] distributed["task_id"] = res.json()["task_id"] except Exception as e: print(e) parent_sample_details = db.list_sample_parent(task_id=self.task["id"]) source_url = db.get_source_url(sample_id=self.task["sample_id"]) return dict( version=CUCKOO_VERSION, started=self.task["started_on"], ended=self.task.get("completed_on", "none"), duration=duration, id=int(self.task["id"]), category=self.task["category"], custom=self.task["custom"], machine=self.task["machine"], package=self.get_package(), timeout=self.had_timeout(), shrike_url=self.task["shrike_url"], shrike_refer=self.task["shrike_refer"], shrike_msg=self.task["shrike_msg"], shrike_sid=self.task["shrike_sid"], parent_id=self.task["parent_id"], tlp=self.task["tlp"], parent_sample=parent_sample_details, distributed=distributed, options=get_options(self.task["options"]), source_url=source_url, )
def index(request, resubmit_hash=False): if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) if paths: content = get_file_content(paths) if not content: return render( request, "error.html", { "error": "Can't find {} on disk, {}".format( resubmission_hash, str(paths)) }) folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename( resubmission_hash) path = store_temp_file(content, filename) details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmission_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) else: return render( request, "error.html", {"error": "File not found on hdd for resubmission"}) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") details["service"] = "WebGUI" for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: details["errors"].append( {sample.name: "You uploaded an empty file."}) continue elif sample.size > settings.MAX_UPLOAD_SIZE: details["errors"].append({ sample.name: "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }) continue if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to CAPE temporary storage to let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), filename) sha256 = File(path).get_sha256() if (not request.user.is_staff and (web_conf.uniq_submission.enabled or unique) and db.check_file_uniq( sha256, hours=web_conf.uniq_submission.hours)): details["errors"].append({ filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission" }) continue if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({sample.name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif "static" in request.FILES: samples = request.FILES.getlist("static") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }, ) else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): url = request.POST.get("dlnexec").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") response = _download_file(request.POST.get("route"), url, options) if not response: return render(request, "error.html", {"error": "Was impossible to retrieve url"}) name = os.path.basename(url) if not "." in name: name = get_user_filename(options, custom) or generate_fake_name() path = store_temp_file(response, name) details["path"] = path details["content"] = get_file_content(path) details["service"] = "DLnExec" details["source_url"] = url status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt( request.POST.get("vtdl").strip(), details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config( machinery), machinery).get("machines").split(",") ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )
def download_file(**kwargs): """Example of kwargs { "errors": [], "content": content, "request": request, "task_id": [], "url": False, "params": {}, "headers": {}, "service": "tasks_create_file_API", "path": tmp_path, "fhash": False, "options": options, "only_extraction": False, } """ ( static, package, timeout, priority, _, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(kwargs["request"]) onesuccess = False username = False """ put here your custom username assignation from your custom auth, Ex: request_url = kwargs["request"].build_absolute_uri() if "yourdomain.com/submit/" in request_url: username = kwargs["request"].COOKIES.get("X-user") """ # in case if user didn't specify routing, and we have enabled random route if not route: socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing_conf.socks5.random_socks5 and socks5s: socks5s_random = choice(socks5s.values()).get("name", False) if routing_conf.vpn.random_vpn: vpn_random = choice(list(vpns.values())).get("name", False) if vpn_random and socks5s_random: route = choice((vpn_random, socks5s_random)) elif vpn_random: route = vpn_random elif socks5s_random: route = socks5s_random if package: if package == "Emotet": return "error", {"error": "Hey guy update your script, this package doesn't exist anymore"} if package.endswith("_x64"): if tags: if "x64" not in tags: tags += ",x64" else: tags = "x64" if tags: if not all([tag.strip() in all_vms_tags for tag in tags.split(",")]): return "error", { "error": f"Check Tags help, you have introduced incorrect tag(s). Your tags: {tags} - Supported tags: {all_vms_tags_str}" } elif all([tag in tags for tag in ("x64", "x86")]): return "error", {"error": "Check Tags help, you have introduced x86 and x64 tags for the same task, choose only 1"} if not kwargs.get("content", False) and kwargs.get("url", False): try: r = requests.get(kwargs["url"], params=kwargs.get("params", {}), headers=kwargs.get("headers", {}), verify=False) except requests.exceptions.RequestException as e: logging.error(e) return "error", {"error": "Provided hash not found on {}".format(kwargs["service"])} if ( r.status_code == 200 and r.content != b"Hash Not Present" and b"The request requires higher privileges than provided by the access token" not in r.content ): kwargs["content"] = r.content elif r.status_code == 403: return "error", { "error": "API key provided is not a valid {0} key or is not authorized for {0} downloads".format(kwargs["service"]) } elif r.status_code == 404: return "error", {"error": "Server returns 404 from {}".format(kwargs["service"])} else: return "error", {"error": "Was impossible to download from {0}".format(kwargs["service"])} if not kwargs["content"]: return "error", {"error": "Error downloading file from {}".format(kwargs["service"])} try: if kwargs.get("fhash", False): retrieved_hash = hashes[len(kwargs["fhash"])](kwargs["content"]).hexdigest() if retrieved_hash != kwargs["fhash"].lower(): return "error", { "error": "Hashes mismatch, original hash: {} - retrieved hash: {}".format(kwargs["fhash"], retrieved_hash) } if not os.path.exists(kwargs.get("path")): f = open(kwargs["path"], "wb") f.write(kwargs["content"]) f.close() except Exception as e: print(e) return "error", {"error": "Error writing {} storing/download file to temporary path".format(kwargs["service"])} # Distribute task based on route support by worker if route and route not in ("none", "None") and all_nodes_exits_list: parsed_options = get_options(kwargs["options"]) node = parsed_options.get("node") if node and route not in all_nodes_exits.get(node): return "error", {"error": f"Specified worker {node} doesn't support this route: {route}"} elif route not in all_nodes_exits_list: return "error", {"error": "Specified route doesn't exist on any worker"} if not node: # get nodes that supports this exit tmp_workers = list() for node, exitnodes in all_nodes_exits.items(): if route in exitnodes: tmp_workers.append(node) if tmp_workers: if kwargs["options"]: kwargs["options"] += ",node=" + choice(tmp_workers) else: kwargs["options"] = "node=" + choice(tmp_workers) # Remove workers prefixes if route.startswith(("socks5:", "vpn:")): route = route.replace("socks5:", "", 1).replace("vpn:", "", 1) onesuccess = True magic_type = get_magic_type(kwargs["path"]) if disable_x64 is True and kwargs["path"] and magic_type and ("x86-64" in magic_type or "PE32+" in magic_type): if len(kwargs["request"].FILES) == 1: return "error", {"error": "Sorry no x64 support yet"} kwargs["options"], timeout, enforce_timeout = recon(kwargs["path"], kwargs["options"], timeout, enforce_timeout) if not kwargs.get("task_machines", []): kwargs["task_machines"] = [None] platform = get_platform(magic_type) if platform == "linux" and not linux_enabled: return "error", {"error": "Linux binaries analysis isn't enabled"} if machine.lower() == "all": kwargs["task_machines"] = [vm.name for vm in db.list_machines(platform=platform)] elif machine: machine_details = db.view_machine(machine) if hasattr(machine_details, "platform") and not machine_details.platform == platform: return "error", {"error": "Wrong platform, {} VM selected for {} sample".format(machine_details.platform, platform)} else: kwargs["task_machines"] = [machine] else: kwargs["task_machines"] = ["first"] # Try to extract before submit to VM if not static and "dist_extract" in kwargs["options"]: static = True for machine in kwargs.get("task_machines", []): if machine == "first": machine = None # Keep this as demux_sample_and_add_to_db in DB task_ids_new, extra_details = db.demux_sample_and_add_to_db( file_path=kwargs["path"], package=package, timeout=timeout, options=kwargs["options"], priority=priority, machine=machine, custom=custom, platform=platform, tags=tags, memory=memory, enforce_timeout=enforce_timeout, clock=clock, static=static, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, tlp=tlp, tags_tasks=tags_tasks, route=route, cape=cape, user_id=kwargs.get("user_id"), username=username, source_url=kwargs.get("source_url", False) # parent_id=kwargs.get("parent_id", None), # sample_parent_id=kwargs.get("sample_parent_id", None) ) if isinstance(kwargs.get("task_ids", False), list): kwargs["task_ids"].extend(task_ids_new) else: kwargs["task_ids"] = list() kwargs["task_ids"].extend(task_ids_new) if not onesuccess: return "error", {"error": "Provided hash not found on {}".format(kwargs["service"])} return "ok", kwargs["task_ids"]
def index(request, resubmit_hash=False): remote_console = False if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if web_conf.guacamole.enabled and request.POST.get( "interactive_desktop"): remote_console = True if "nohuman=yes," not in options: options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," job_category = False if request.POST.get("job_category"): job_category = request.POST.get("job_category") # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } task_category = False samples = [] if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": task_category = "resubmit" samples = request.POST.get("hash").strip().split(",") elif "sample" in request.FILES: task_category = "sample" samples = request.FILES.getlist("sample") elif "quarantine" in request.FILES: task_category = "quarantine" samples = request.FILES.getlist("quarantine") elif "static" in request.FILES: task_category = "static" samples = request.FILES.getlist("static") elif "pcap" in request.FILES: task_category = "pcap" samples = request.FILES.getlist("pcap") elif "url" in request.POST and request.POST.get("url").strip(): task_category = "url" samples = request.POST.get("url").strip() elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): task_category = "dlnexec" samples = request.POST.get("dlnexec").strip() elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): task_category = "vtdl" samples = request.POST.get("vtdl").strip() list_of_files = [] if task_category in ("url", "dlnexec"): if not samples: return render(request, "error.html", {"error": "You specified an invalid URL!"}) for url in samples.split(","): url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if task_category == "dlnexec": path, content, sha256 = process_new_dlnexec_task( url, route, options, custom) if path: list_of_files.append((content, path, sha256)) elif task_category == "url": list_of_files.append(("", url, "")) elif task_category in ("sample", "quarantine", "static", "pcap"): list_of_files, details = process_new_task_files( request, samples, details, opt_filename, unique) elif task_category == "resubmit": for hash in samples: paths = [] if len(hash) in (32, 40, 64): paths = db.sample_path_by_hash(hash) else: task_binary = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(hash), "binary") if os.path.exists(task_binary): paths.append(task_binary) else: tmp_paths = db.find_sample(task_id=int(hash)) if not tmp_paths: details["errors"].append( {hash: "Task not found for resubmission"}) continue for tmp_sample in tmp_paths: path = False tmp_dict = tmp_sample.to_dict() if os.path.exists(tmp_dict.get("target", "")): path = tmp_dict["target"] else: tmp_tasks = db.find_sample( sample_id=tmp_dict["sample_id"]) for tmp_task in tmp_tasks: tmp_path = os.path.join( settings.CUCKOO_PATH, "storage", "binaries", tmp_task.to_dict()["sha256"]) if os.path.exists(tmp_path): path = tmp_path break if path: paths.append(path) if not paths: details["errors"].append( {hash: "File not found on hdd for resubmission"}) continue content = get_file_content(paths) if not content: details["errors"].append( {hash: f"Can't find {hash} on disk"}) continue folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(hash) path = store_temp_file(content, filename) list_of_files.append((content, path, hash)) # Hack for resubmit first find all files and then put task as proper category if job_category and job_category in ("resubmit", "sample", "quarantine", "static", "pcap", "dlnexec", "vtdl"): task_category = job_category if task_category == "resubmit": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", hash, search_limit=5) for record in records or []: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) elif task_category == "sample": details["service"] = "WebGUI" for content, path, sha256 in list_of_files: if web_conf.pre_script.enabled and "pre_script" in request.FILES: pre_script = request.FILES["pre_script"] details["pre_script_name"] = request.FILES[ "pre_script"].name details["pre_script_content"] = pre_script.read() if web_conf.during_script.enabled and "during_script" in request.FILES: during_script = request.FILES["during_script"] details["during_script_name"] = request.FILES[ "during_script"].name details["during_script_content"] = during_script.read() if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif task_category == "quarantine": for content, tmp_path, sha256 in list_of_files: path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: details["errors"].append({ os.path.basename(path): "You uploaded an unsupported quarantine file." }) continue details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "static": for content, path, sha256 in list_of_files: task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif task_category == "pcap": for content, path, sha256 in list_of_files: if path.lower().endswith(b".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: details["errors"].append({ os.path.basename(path): "Conversion from SAZ to PCAP failed." }) continue task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif task_category == "url": for _, url, _ in list_of_files: if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: details["errors"].append({ os.path.basename(url): f"Wrong platform, {machine_details.platform} VM selected for {platform} sample" }) continue else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif task_category == "dlnexec": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "vtdl": if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt(samples, details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, "remote_console": remote_console, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled enabledconf["pre_script"] = web_conf.pre_script.enabled enabledconf["during_script"] = web_conf.during_script.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") if x.strip() ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in str( getattr(Config(machinery), machinery).get( "machines")).split(",") if x.strip() ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )
def static_file_info( data_dictionary: dict, file_path: str, task_id: str, package: str, options: str, destination_folder: str, results: dict ): if int(os.path.getsize(file_path) / (1024 * 1024)) > int(processing_conf.static.max_file_size): return if ( not HAVE_OLETOOLS and "Zip archive data, at least v2.0" in data_dictionary["type"] and package in {"doc", "ppt", "xls", "pub"} ): log.info("Missed dependencies: pip3 install oletools") options_dict = get_options(options) if HAVE_PEFILE and ("PE32" in data_dictionary["type"] or "MS-DOS executable" in data_dictionary["type"]): data_dictionary["pe"] = PortableExecutable(file_path).run(task_id) if HAVE_FLARE_CAPA: capa_details = flare_capa_details(file_path, "static") if capa_details: data_dictionary["flare_capa"] = capa_details if HAVE_FLOSS: floss_strings = Floss(file_path, "static", "pe").run() if floss_strings: data_dictionary["floss"] = floss_strings if "Mono" in data_dictionary["type"]: data_dictionary["dotnet"] = DotNETExecutable(file_path).run() elif HAVE_OLETOOLS and package in {"doc", "ppt", "xls", "pub"}: # options is dict where we need to get pass get_options data_dictionary["office"] = Office(file_path, task_id, data_dictionary["sha256"], options_dict).run() elif "PDF" in data_dictionary["type"] or file_path.endswith(".pdf"): data_dictionary["pdf"] = PDF(file_path).run() elif package in {"wsf", "hta"} or data_dictionary["type"] == "XML document text" or file_path.endswith(".wsf"): data_dictionary["wsf"] = WindowsScriptFile(file_path).run() # elif package in {"js", "vbs"}: # data_dictionary["js"] = EncodedScriptFile(file_path).run() elif package == "lnk" or "MS Windows shortcut" in data_dictionary["type"]: data_dictionary["lnk"] = LnkShortcut(file_path).run() elif "Java Jar" in data_dictionary["type"] or file_path.endswith(".jar"): if selfextract_conf.procyon.binary and not os.path.exists(selfextract_conf.procyon.binary): log.error("procyon_path specified in processing.conf but the file does not exist") else: data_dictionary["java"] = Java(file_path, selfextract_conf.procyon.binary).run() # It's possible to fool libmagic into thinking our 2007+ file is a zip. # So until we have static analysis for zip files, we can use oleid to fail us out silently, # yeilding no static analysis results for actual zip files. # elif "ELF" in data_dictionary["type"] or file_path.endswith(".elf"): # data_dictionary["elf"] = ELF(file_path).run() # data_dictionary["keys"] = f.get_keys() # elif HAVE_OLETOOLS and package == "hwp": # data_dictionary["hwp"] = HwpDocument(file_path).run() with open(file_path, "rb") as f: is_text_file(data_dictionary, file_path, 8192, f.read()) if processing_conf.trid.enabled: trid_info(file_path, data_dictionary) if processing_conf.die.enabled: detect_it_easy_info(file_path, data_dictionary) if HAVE_FLOSS and processing_conf.floss.enabled: floss_strings = Floss(file_path, package).run() if floss_strings: data_dictionary["floss"] = floss_strings if HAVE_STRINGS: strings = extract_strings(file_path) if strings: data_dictionary["strings"] = strings # ToDo we need url support if HAVE_VIRUSTOTAL and processing_conf.virustotal.enabled: vt_details = vt_lookup("file", file_path, results) if vt_details: data_dictionary["virustotal"] = vt_details generic_file_extractors(file_path, destination_folder, data_dictionary["type"], data_dictionary, options_dict, results)
def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_push=False, db=None): # HACK do not create a new session if the current one (passed as parameter) is still valid. try: node = db.query(Node).filter_by(name=node_id).first() except (OperationalError, SQLAlchemyError) as e: log.warning( "Got an operational Exception when trying to submit tasks: {}".format(e)) return False limit = 0 if node.name != "master": # don't do nothing if nothing in pending # Get tasks from main_db submitted through web interface main_db_tasks = main_db.list_tasks(status=TASK_PENDING, order_by=desc("priority"), options_like=options_like, limit=pend_tasks_num) if not main_db_tasks: return True if main_db_tasks: for t in main_db_tasks: force_push = False try: # convert the options string to a dict, e.g. {'opt1': 'val1', 'opt2': 'val2', 'opt3': 'val3'} options = get_options(t.options) # check if node exist and its correct if "node=" in t.options: requested_node = options.get("node") if requested_node not in STATUSES: # if the requested node is not available force_push = True elif requested_node != node.name: # otherwise keep looping continue if "timeout=" in t.options: t.timeout = options["timeout"] except Exception as e: log.error(e) # wtf are you doing in pendings? tasks = db.query(Task).filter_by(main_task_id=t.id).all() if tasks: for task in tasks: print(task.id, task.task_id,task.main_task_id, task.node_id) #log.info("Deleting incorrectly uploaded file from dist db, main_task_id: {}".format(t.id)) #db.delete(task) #db.commit() if node.name == "master": main_db.set_status(t.id, TASK_RUNNING) else: main_db.set_status(t.id, TASK_DISTRIBUTED) continue # Check if file exist, if no wipe from db and continue, rare cases if t.category in ("file", "pcap", "static") and not os.path.exists(t.target): log.info("Task id: {} - File doesn't exist: {}".format(t.id, t.target)) main_db.delete_task(t.id) continue # Convert array of tags into comma separated list tags = ','.join([tag.name for tag in t.tags]) # Append a comma, to make LIKE searches more precise if tags: tags += ',' if "msoffice-crypt-tmp" in t.target and "password="******"password="******"pwd=") args = dict(package=t.package, category=t.category, timeout=t.timeout, priority=t.priority, options=t.options+",main_task_id={}".format(t.id), machine=t.machine, platform=t.platform, tags=tags, custom=t.custom, memory=t.memory, clock=t.clock, enforce_timeout=t.enforce_timeout, main_task_id=t.id) task = Task(path=t.target, **args) db.add(task) try: db.commit() except Exception as e: log.exception(e) log.info("TASK_FAILED_REPORTING") db.rollback() log.info(e) continue if force_push or force_push_push: # Submit appropriate tasks to node submitted = node_submit_task(task.id, node.id) if submitted: if node.name == "master": main_db.set_status(t.id, TASK_RUNNING) else: main_db.set_status(t.id, TASK_DISTRIBUTED) limit += 1 if limit == pend_tasks_num or limit == len(main_db_tasks): return True # Only get tasks that have not been pushed yet. q = db.query(Task).filter( or_(Task.node_id == None, Task.task_id == None), Task.finished == False) if q is None: return True # Order by task priority and task id. q = q.order_by(-Task.priority, Task.main_task_id) # if we have node set in options push if reporting_conf.distributed.enable_tags: # Create filter query from tasks in ta tags = [getattr(Task, "tags") == ""] for tg in SERVER_TAGS[node.name]: if len(tg.split(',')) == 1: tags.append(getattr(Task, "tags") == (tg + ',')) else: tg = tg.split(',') # ie. LIKE '%,%,%,' t_combined = [getattr(Task, "tags").like( "%s" % ('%,' * len(tg)))] for tag in tg: t_combined.append( getattr(Task, "tags").like("%%%s%%" % (tag + ','))) tags.append(and_(*t_combined)) # Filter by available tags q = q.filter(or_(*tags)) to_upload = q.limit(pend_tasks_num).all() if not to_upload: return True # Submit appropriate tasks to node log.debug("going to upload {} tasks to node {}".format( pend_tasks_num, node.name)) for task in to_upload: submitted = node_submit_task(task.id, node.id) if submitted: if node.name == "master": main_db.set_status(task.main_task_id, TASK_RUNNING) else: main_db.set_status( task.main_task_id, TASK_DISTRIBUTED) else: print( "something is wrong with submission of task: {}".format(task.id)) db.delete(task) db.commit() limit += 1 if limit == pend_tasks_num: return True return True
def route_network(self): """Enable network routing if desired.""" # Determine the desired routing strategy (none, internet, VPN). self.route = routing.routing.route # Allow overwrite default conf value if self.task.options: options = get_options(self.task.options) if options.get("route"): self.route = options.get("route") if self.route in ("none", "None", "drop"): self.interface = None self.rt_table = None elif self.route == "inetsim": self.interface = routing.inetsim.interface elif self.route == "tor": self.interface = routing.tor.interface elif self.route == "internet" and routing.routing.internet != "none": self.interface = routing.routing.internet self.rt_table = routing.routing.rt_table elif self.route in vpns: self.interface = vpns[self.route].interface self.rt_table = vpns[self.route].rt_table elif self.route in self.socks5s: self.interface = "" else: log.warning( "Unknown network routing destination specified, " "ignoring routing for this analysis: %r", self.route) self.interface = None self.rt_table = None # Check if the network interface is still available. If a VPN dies for # some reason, its tunX interface will no longer be available. if self.interface and not rooter("nic_available", self.interface): log.error( "The network interface '%s' configured for this analysis is " "not available at the moment, switching to route=none mode.", self.interface, ) self.route = "none" self.interface = None self.rt_table = None if self.route == "inetsim": self.rooter_response = rooter( "inetsim_enable", self.machine.ip, str(routing.inetsim.server), str(routing.inetsim.dnsport), str(self.cfg.resultserver.port), str(routing.inetsim.ports), ) elif self.route == "tor": self.rooter_response = rooter("socks5_enable", self.machine.ip, str(self.cfg.resultserver.port), str(routing.tor.dnsport), str(routing.tor.proxyport)) elif self.route in self.socks5s: self.rooter_response = rooter( "socks5_enable", self.machine.ip, str(self.cfg.resultserver.port), str(self.socks5s[self.route]["dnsport"]), str(self.socks5s[self.route]["port"]), ) elif self.route in ("none", "None", "drop"): self.rooter_response = rooter("drop_enable", self.machine.ip, str(self.cfg.resultserver.port)) self._rooter_response_check() if self.interface: self.rooter_response = rooter("forward_enable", self.machine.interface, self.interface, self.machine.ip) self._rooter_response_check() log.info("Enabled route '%s'", self.route) if self.rt_table: self.rooter_response = rooter("srcroute_enable", self.rt_table, self.machine.ip) self._rooter_response_check()